From 7bbfcf559b8a917b964357abbecb8e9863dac86f Mon Sep 17 00:00:00 2001 From: harishwarrior Date: Tue, 22 Jun 2021 19:46:21 +0530 Subject: [PATCH 01/21] downgraded pycrypto --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index b7c4cfc..04a61e7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ -requests==2.11.1 +owngrequests==2.11.1 gcloud==0.17.0 oauth2client==3.0.0 requests-toolbelt==0.7.0 python-jwt==2.0.1 -pycrypto==2.6.1 \ No newline at end of file +pycryptodome==3.10.1 \ No newline at end of file From 5b34b82a7aa0ebfc94dd1a10e502277c3c5a3590 Mon Sep 17 00:00:00 2001 From: harishwarrior Date: Tue, 22 Jun 2021 20:34:26 +0530 Subject: [PATCH 02/21] fix 2 --- env/Lib/site-packages/Crypto/Cipher/AES.py | 250 + env/Lib/site-packages/Crypto/Cipher/AES.pyi | 47 + env/Lib/site-packages/Crypto/Cipher/ARC2.py | 175 + env/Lib/site-packages/Crypto/Cipher/ARC2.pyi | 35 + env/Lib/site-packages/Crypto/Cipher/ARC4.py | 137 + env/Lib/site-packages/Crypto/Cipher/ARC4.pyi | 16 + .../site-packages/Crypto/Cipher/Blowfish.py | 159 + .../site-packages/Crypto/Cipher/Blowfish.pyi | 35 + env/Lib/site-packages/Crypto/Cipher/CAST.py | 159 + env/Lib/site-packages/Crypto/Cipher/CAST.pyi | 35 + .../site-packages/Crypto/Cipher/ChaCha20.py | 287 + .../site-packages/Crypto/Cipher/ChaCha20.pyi | 25 + .../Crypto/Cipher/ChaCha20_Poly1305.py | 336 + .../Crypto/Cipher/ChaCha20_Poly1305.pyi | 28 + env/Lib/site-packages/Crypto/Cipher/DES.py | 158 + env/Lib/site-packages/Crypto/Cipher/DES.pyi | 35 + env/Lib/site-packages/Crypto/Cipher/DES3.py | 187 + env/Lib/site-packages/Crypto/Cipher/DES3.pyi | 37 + .../site-packages/Crypto/Cipher/PKCS1_OAEP.py | 239 + .../Crypto/Cipher/PKCS1_OAEP.pyi | 35 + .../site-packages/Crypto/Cipher/PKCS1_v1_5.py | 199 + .../Crypto/Cipher/PKCS1_v1_5.pyi | 17 + .../site-packages/Crypto/Cipher/Salsa20.py | 167 + .../site-packages/Crypto/Cipher/Salsa20.pyi | 27 + env/Lib/site-packages/Crypto/Cipher/_ARC4.pyd | Bin 0 -> 11264 bytes .../Crypto/Cipher/_EKSBlowfish.py | 131 + .../Crypto/Cipher/_EKSBlowfish.pyi | 15 + .../site-packages/Crypto/Cipher/_Salsa20.pyd | Bin 0 -> 13824 bytes .../site-packages/Crypto/Cipher/__init__.py | 79 + .../site-packages/Crypto/Cipher/__init__.pyi | 0 .../site-packages/Crypto/Cipher/_chacha20.pyd | Bin 0 -> 13312 bytes .../site-packages/Crypto/Cipher/_mode_cbc.py | 293 + .../site-packages/Crypto/Cipher/_mode_cbc.pyi | 25 + .../site-packages/Crypto/Cipher/_mode_ccm.py | 650 ++ .../site-packages/Crypto/Cipher/_mode_ccm.pyi | 47 + .../site-packages/Crypto/Cipher/_mode_cfb.py | 293 + .../site-packages/Crypto/Cipher/_mode_cfb.pyi | 26 + .../site-packages/Crypto/Cipher/_mode_ctr.py | 393 + .../site-packages/Crypto/Cipher/_mode_ctr.pyi | 27 + .../site-packages/Crypto/Cipher/_mode_eax.py | 408 + .../site-packages/Crypto/Cipher/_mode_eax.pyi | 45 + .../site-packages/Crypto/Cipher/_mode_ecb.py | 220 + .../site-packages/Crypto/Cipher/_mode_ecb.pyi | 19 + .../site-packages/Crypto/Cipher/_mode_gcm.py | 620 ++ .../site-packages/Crypto/Cipher/_mode_gcm.pyi | 45 + .../site-packages/Crypto/Cipher/_mode_ocb.py | 525 ++ .../site-packages/Crypto/Cipher/_mode_ocb.pyi | 36 + .../site-packages/Crypto/Cipher/_mode_ofb.py | 282 + .../site-packages/Crypto/Cipher/_mode_ofb.pyi | 25 + .../Crypto/Cipher/_mode_openpgp.py | 206 + .../Crypto/Cipher/_mode_openpgp.pyi | 20 + .../site-packages/Crypto/Cipher/_mode_siv.py | 392 + .../site-packages/Crypto/Cipher/_mode_siv.pyi | 38 + .../site-packages/Crypto/Cipher/_raw_aes.pyd | Bin 0 -> 35840 bytes .../Crypto/Cipher/_raw_aesni.pyd | Bin 0 -> 15360 bytes .../site-packages/Crypto/Cipher/_raw_arc2.pyd | Bin 0 -> 16384 bytes .../Crypto/Cipher/_raw_blowfish.pyd | Bin 0 -> 20480 bytes .../site-packages/Crypto/Cipher/_raw_cast.pyd | Bin 0 -> 24576 bytes .../site-packages/Crypto/Cipher/_raw_cbc.pyd | Bin 0 -> 12288 bytes .../site-packages/Crypto/Cipher/_raw_cfb.pyd | Bin 0 -> 13312 bytes .../site-packages/Crypto/Cipher/_raw_ctr.pyd | Bin 0 -> 14848 bytes .../site-packages/Crypto/Cipher/_raw_des.pyd | Bin 0 -> 56832 bytes .../site-packages/Crypto/Cipher/_raw_des3.pyd | Bin 0 -> 57344 bytes .../site-packages/Crypto/Cipher/_raw_ecb.pyd | Bin 0 -> 10240 bytes .../Crypto/Cipher/_raw_eksblowfish.pyd | Bin 0 -> 21504 bytes .../site-packages/Crypto/Cipher/_raw_ocb.pyd | Bin 0 -> 17920 bytes .../site-packages/Crypto/Cipher/_raw_ofb.pyd | Bin 0 -> 12288 bytes env/Lib/site-packages/Crypto/Hash/BLAKE2b.py | 247 + env/Lib/site-packages/Crypto/Hash/BLAKE2b.pyi | 31 + env/Lib/site-packages/Crypto/Hash/BLAKE2s.py | 247 + env/Lib/site-packages/Crypto/Hash/BLAKE2s.pyi | 26 + env/Lib/site-packages/Crypto/Hash/CMAC.py | 302 + env/Lib/site-packages/Crypto/Hash/CMAC.pyi | 30 + env/Lib/site-packages/Crypto/Hash/HMAC.py | 213 + env/Lib/site-packages/Crypto/Hash/HMAC.pyi | 25 + env/Lib/site-packages/Crypto/Hash/MD2.py | 166 + env/Lib/site-packages/Crypto/Hash/MD2.pyi | 19 + env/Lib/site-packages/Crypto/Hash/MD4.py | 185 + env/Lib/site-packages/Crypto/Hash/MD4.pyi | 19 + env/Lib/site-packages/Crypto/Hash/MD5.py | 184 + env/Lib/site-packages/Crypto/Hash/MD5.pyi | 19 + env/Lib/site-packages/Crypto/Hash/Poly1305.py | 217 + .../site-packages/Crypto/Hash/Poly1305.pyi | 24 + env/Lib/site-packages/Crypto/Hash/RIPEMD.py | 26 + env/Lib/site-packages/Crypto/Hash/RIPEMD.pyi | 3 + .../site-packages/Crypto/Hash/RIPEMD160.py | 169 + .../site-packages/Crypto/Hash/RIPEMD160.pyi | 19 + env/Lib/site-packages/Crypto/Hash/SHA.py | 24 + env/Lib/site-packages/Crypto/Hash/SHA.pyi | 4 + env/Lib/site-packages/Crypto/Hash/SHA1.py | 185 + env/Lib/site-packages/Crypto/Hash/SHA1.pyi | 19 + env/Lib/site-packages/Crypto/Hash/SHA224.py | 186 + env/Lib/site-packages/Crypto/Hash/SHA224.pyi | 19 + env/Lib/site-packages/Crypto/Hash/SHA256.py | 185 + env/Lib/site-packages/Crypto/Hash/SHA256.pyi | 18 + env/Lib/site-packages/Crypto/Hash/SHA384.py | 186 + env/Lib/site-packages/Crypto/Hash/SHA384.pyi | 19 + env/Lib/site-packages/Crypto/Hash/SHA3_224.py | 147 + .../site-packages/Crypto/Hash/SHA3_224.pyi | 16 + env/Lib/site-packages/Crypto/Hash/SHA3_256.py | 147 + .../site-packages/Crypto/Hash/SHA3_256.pyi | 16 + env/Lib/site-packages/Crypto/Hash/SHA3_384.py | 147 + .../site-packages/Crypto/Hash/SHA3_384.pyi | 16 + env/Lib/site-packages/Crypto/Hash/SHA3_512.py | 148 + .../site-packages/Crypto/Hash/SHA3_512.pyi | 16 + env/Lib/site-packages/Crypto/Hash/SHA512.py | 204 + env/Lib/site-packages/Crypto/Hash/SHA512.pyi | 22 + env/Lib/site-packages/Crypto/Hash/SHAKE128.py | 127 + .../site-packages/Crypto/Hash/SHAKE128.pyi | 13 + env/Lib/site-packages/Crypto/Hash/SHAKE256.py | 127 + .../site-packages/Crypto/Hash/SHAKE256.pyi | 13 + .../site-packages/Crypto/Hash/_BLAKE2b.pyd | Bin 0 -> 14336 bytes .../site-packages/Crypto/Hash/_BLAKE2s.pyd | Bin 0 -> 13824 bytes env/Lib/site-packages/Crypto/Hash/_MD2.pyd | Bin 0 -> 13824 bytes env/Lib/site-packages/Crypto/Hash/_MD4.pyd | Bin 0 -> 13824 bytes env/Lib/site-packages/Crypto/Hash/_MD5.pyd | Bin 0 -> 15360 bytes .../site-packages/Crypto/Hash/_RIPEMD160.pyd | Bin 0 -> 13824 bytes env/Lib/site-packages/Crypto/Hash/_SHA1.pyd | Bin 0 -> 17920 bytes env/Lib/site-packages/Crypto/Hash/_SHA224.pyd | Bin 0 -> 21504 bytes env/Lib/site-packages/Crypto/Hash/_SHA256.pyd | Bin 0 -> 21504 bytes env/Lib/site-packages/Crypto/Hash/_SHA384.pyd | Bin 0 -> 26624 bytes env/Lib/site-packages/Crypto/Hash/_SHA512.pyd | Bin 0 -> 26624 bytes env/Lib/site-packages/Crypto/Hash/__init__.py | 22 + .../site-packages/Crypto/Hash/__init__.pyi | 0 .../Crypto/Hash/_ghash_clmul.pyd | Bin 0 -> 12800 bytes .../Crypto/Hash/_ghash_portable.pyd | Bin 0 -> 13312 bytes env/Lib/site-packages/Crypto/Hash/_keccak.pyd | Bin 0 -> 15360 bytes .../site-packages/Crypto/Hash/_poly1305.pyd | Bin 0 -> 14848 bytes env/Lib/site-packages/Crypto/Hash/keccak.py | 173 + env/Lib/site-packages/Crypto/Hash/keccak.pyi | 23 + env/Lib/site-packages/Crypto/IO/PEM.py | 189 + env/Lib/site-packages/Crypto/IO/PEM.pyi | 10 + env/Lib/site-packages/Crypto/IO/PKCS8.py | 231 + env/Lib/site-packages/Crypto/IO/PKCS8.pyi | 14 + env/Lib/site-packages/Crypto/IO/_PBES.py | 435 + env/Lib/site-packages/Crypto/IO/_PBES.pyi | 19 + env/Lib/site-packages/Crypto/IO/__init__.py | 31 + env/Lib/site-packages/Crypto/Math/Numbers.py | 42 + env/Lib/site-packages/Crypto/Math/Numbers.pyi | 4 + .../site-packages/Crypto/Math/Primality.py | 369 + .../site-packages/Crypto/Math/Primality.pyi | 18 + .../site-packages/Crypto/Math/_IntegerBase.py | 392 + .../Crypto/Math/_IntegerBase.pyi | 61 + .../Crypto/Math/_IntegerCustom.py | 111 + .../Crypto/Math/_IntegerCustom.pyi | 8 + .../site-packages/Crypto/Math/_IntegerGMP.py | 708 ++ .../site-packages/Crypto/Math/_IntegerGMP.pyi | 3 + .../Crypto/Math/_IntegerNative.py | 380 + .../Crypto/Math/_IntegerNative.pyi | 3 + env/Lib/site-packages/Crypto/Math/__init__.py | 0 env/Lib/site-packages/Crypto/Math/_modexp.pyd | Bin 0 -> 32768 bytes env/Lib/site-packages/Crypto/Protocol/KDF.py | 574 ++ env/Lib/site-packages/Crypto/Protocol/KDF.pyi | 24 + .../Crypto/Protocol/SecretSharing.py | 278 + .../Crypto/Protocol/SecretSharing.pyi | 22 + .../site-packages/Crypto/Protocol/__init__.py | 31 + .../Crypto/Protocol/__init__.pyi | 1 + .../site-packages/Crypto/Protocol/_scrypt.pyd | Bin 0 -> 12288 bytes env/Lib/site-packages/Crypto/PublicKey/DSA.py | 682 ++ .../site-packages/Crypto/PublicKey/DSA.pyi | 31 + env/Lib/site-packages/Crypto/PublicKey/ECC.py | 1182 +++ .../site-packages/Crypto/PublicKey/ECC.pyi | 62 + .../site-packages/Crypto/PublicKey/ElGamal.py | 286 + .../Crypto/PublicKey/ElGamal.pyi | 18 + env/Lib/site-packages/Crypto/PublicKey/RSA.py | 799 ++ .../site-packages/Crypto/PublicKey/RSA.pyi | 51 + .../Crypto/PublicKey/__init__.py | 95 + .../Crypto/PublicKey/__init__.pyi | 0 .../site-packages/Crypto/PublicKey/_ec_ws.pyd | Bin 0 -> 747520 bytes .../Crypto/PublicKey/_openssh.py | 135 + .../Crypto/PublicKey/_openssh.pyi | 7 + .../site-packages/Crypto/Random/__init__.py | 57 + .../site-packages/Crypto/Random/__init__.pyi | 19 + env/Lib/site-packages/Crypto/Random/random.py | 138 + .../site-packages/Crypto/Random/random.pyi | 20 + .../Crypto/SelfTest/Cipher/__init__.py | 60 + .../Crypto/SelfTest/Cipher/common.py | 512 + .../Crypto/SelfTest/Cipher/test_AES.py | 1351 +++ .../Crypto/SelfTest/Cipher/test_ARC2.py | 167 + .../Crypto/SelfTest/Cipher/test_ARC4.py | 466 + .../Crypto/SelfTest/Cipher/test_Blowfish.py | 160 + .../Crypto/SelfTest/Cipher/test_CAST.py | 101 + .../Crypto/SelfTest/Cipher/test_CBC.py | 555 ++ .../Crypto/SelfTest/Cipher/test_CCM.py | 930 ++ .../Crypto/SelfTest/Cipher/test_CFB.py | 411 + .../Crypto/SelfTest/Cipher/test_CTR.py | 471 + .../Crypto/SelfTest/Cipher/test_ChaCha20.py | 529 ++ .../SelfTest/Cipher/test_ChaCha20_Poly1305.py | 770 ++ .../Crypto/SelfTest/Cipher/test_DES.py | 374 + .../Crypto/SelfTest/Cipher/test_DES3.py | 195 + .../Crypto/SelfTest/Cipher/test_EAX.py | 772 ++ .../Crypto/SelfTest/Cipher/test_GCM.py | 950 ++ .../Crypto/SelfTest/Cipher/test_OCB.py | 742 ++ .../Crypto/SelfTest/Cipher/test_OFB.py | 238 + .../Crypto/SelfTest/Cipher/test_OpenPGP.py | 218 + .../Crypto/SelfTest/Cipher/test_SIV.py | 551 ++ .../Crypto/SelfTest/Cipher/test_Salsa20.py | 367 + .../Crypto/SelfTest/Cipher/test_pkcs1_15.py | 252 + .../Crypto/SelfTest/Cipher/test_pkcs1_oaep.py | 506 + .../Crypto/SelfTest/Hash/__init__.py | 61 + .../Crypto/SelfTest/Hash/common.py | 290 + .../Crypto/SelfTest/Hash/test_BLAKE2.py | 482 + .../Crypto/SelfTest/Hash/test_CMAC.py | 448 + .../Crypto/SelfTest/Hash/test_HMAC.py | 402 + .../Crypto/SelfTest/Hash/test_MD2.py | 62 + .../Crypto/SelfTest/Hash/test_MD4.py | 64 + .../Crypto/SelfTest/Hash/test_MD5.py | 94 + .../Crypto/SelfTest/Hash/test_Poly1305.py | 542 ++ .../Crypto/SelfTest/Hash/test_RIPEMD160.py | 71 + .../Crypto/SelfTest/Hash/test_SHA1.py | 84 + .../Crypto/SelfTest/Hash/test_SHA224.py | 63 + .../Crypto/SelfTest/Hash/test_SHA256.py | 94 + .../Crypto/SelfTest/Hash/test_SHA384.py | 61 + .../Crypto/SelfTest/Hash/test_SHA3_224.py | 79 + .../Crypto/SelfTest/Hash/test_SHA3_256.py | 80 + .../Crypto/SelfTest/Hash/test_SHA3_384.py | 79 + .../Crypto/SelfTest/Hash/test_SHA3_512.py | 79 + .../Crypto/SelfTest/Hash/test_SHA512.py | 140 + .../Crypto/SelfTest/Hash/test_SHAKE.py | 143 + .../Crypto/SelfTest/Hash/test_keccak.py | 250 + .../Crypto/SelfTest/IO/__init__.py | 47 + .../Crypto/SelfTest/IO/test_PBES.py | 93 + .../Crypto/SelfTest/IO/test_PKCS8.py | 423 + .../Crypto/SelfTest/Math/__init__.py | 49 + .../Crypto/SelfTest/Math/test_Numbers.py | 774 ++ .../Crypto/SelfTest/Math/test_Primality.py | 118 + .../Crypto/SelfTest/Math/test_modexp.py | 201 + .../Crypto/SelfTest/Protocol/__init__.py | 44 + .../Crypto/SelfTest/Protocol/test_KDF.py | 732 ++ .../SelfTest/Protocol/test_SecretSharing.py | 267 + .../Crypto/SelfTest/Protocol/test_rfc1751.py | 62 + .../Crypto/SelfTest/PublicKey/__init__.py | 54 + .../Crypto/SelfTest/PublicKey/test_DSA.py | 247 + .../Crypto/SelfTest/PublicKey/test_ECC.py | 859 ++ .../Crypto/SelfTest/PublicKey/test_ElGamal.py | 217 + .../Crypto/SelfTest/PublicKey/test_RSA.py | 317 + .../SelfTest/PublicKey/test_import_DSA.py | 554 ++ .../SelfTest/PublicKey/test_import_ECC.py | 1346 +++ .../SelfTest/PublicKey/test_import_RSA.py | 585 ++ .../Crypto/SelfTest/Random/__init__.py | 39 + .../Crypto/SelfTest/Random/test_random.py | 167 + .../Crypto/SelfTest/Signature/__init__.py | 39 + .../Crypto/SelfTest/Signature/test_dss.py | 1132 +++ .../SelfTest/Signature/test_pkcs1_15.py | 348 + .../Crypto/SelfTest/Signature/test_pss.py | 377 + .../Crypto/SelfTest/Util/__init__.py | 46 + .../Crypto/SelfTest/Util/test_Counter.py | 67 + .../Crypto/SelfTest/Util/test_Padding.py | 154 + .../Crypto/SelfTest/Util/test_asn1.py | 784 ++ .../Crypto/SelfTest/Util/test_number.py | 144 + .../Crypto/SelfTest/Util/test_rfc1751.py | 38 + .../Crypto/SelfTest/Util/test_strxor.py | 280 + .../site-packages/Crypto/SelfTest/__init__.py | 97 + .../site-packages/Crypto/SelfTest/__main__.py | 38 + .../site-packages/Crypto/SelfTest/loader.py | 206 + .../Crypto/SelfTest/st_common.py | 55 + env/Lib/site-packages/Crypto/Signature/DSS.py | 416 + .../site-packages/Crypto/Signature/DSS.pyi | 27 + .../Crypto/Signature/PKCS1_PSS.py | 55 + .../Crypto/Signature/PKCS1_PSS.pyi | 7 + .../Crypto/Signature/PKCS1_v1_5.py | 53 + .../Crypto/Signature/PKCS1_v1_5.pyi | 6 + .../Crypto/Signature/__init__.py | 36 + .../Crypto/Signature/pkcs1_15.py | 222 + .../Crypto/Signature/pkcs1_15.pyi | 17 + env/Lib/site-packages/Crypto/Signature/pss.py | 386 + .../site-packages/Crypto/Signature/pss.pyi | 30 + env/Lib/site-packages/Crypto/Util/Counter.py | 77 + env/Lib/site-packages/Crypto/Util/Counter.pyi | 5 + env/Lib/site-packages/Crypto/Util/Padding.py | 108 + env/Lib/site-packages/Crypto/Util/Padding.pyi | 6 + env/Lib/site-packages/Crypto/Util/RFC1751.py | 386 + env/Lib/site-packages/Crypto/Util/RFC1751.pyi | 7 + env/Lib/site-packages/Crypto/Util/__init__.py | 41 + .../Crypto/Util/_cpu_features.py | 46 + .../Crypto/Util/_cpu_features.pyi | 2 + .../site-packages/Crypto/Util/_cpuid_c.pyd | Bin 0 -> 10240 bytes .../site-packages/Crypto/Util/_file_system.py | 54 + .../Crypto/Util/_file_system.pyi | 4 + env/Lib/site-packages/Crypto/Util/_raw_api.py | 307 + .../site-packages/Crypto/Util/_raw_api.pyi | 27 + env/Lib/site-packages/Crypto/Util/_strxor.pyd | Bin 0 -> 10240 bytes env/Lib/site-packages/Crypto/Util/asn1.py | 940 ++ env/Lib/site-packages/Crypto/Util/asn1.pyi | 74 + env/Lib/site-packages/Crypto/Util/number.py | 1469 +++ env/Lib/site-packages/Crypto/Util/number.pyi | 19 + .../site-packages/Crypto/Util/py3compat.py | 160 + .../site-packages/Crypto/Util/py3compat.pyi | 31 + env/Lib/site-packages/Crypto/Util/strxor.py | 137 + env/Lib/site-packages/Crypto/Util/strxor.pyi | 6 + env/Lib/site-packages/Crypto/__init__.py | 6 + env/Lib/site-packages/Crypto/__init__.pyi | 4 + env/Lib/site-packages/Crypto/py.typed | 0 .../certifi-2021.5.30.dist-info/INSTALLER | 1 + .../certifi-2021.5.30.dist-info/LICENSE | 21 + .../certifi-2021.5.30.dist-info/METADATA | 83 + .../certifi-2021.5.30.dist-info/RECORD | 13 + .../certifi-2021.5.30.dist-info/WHEEL | 6 + .../certifi-2021.5.30.dist-info/top_level.txt | 1 + env/Lib/site-packages/certifi/__init__.py | 3 + env/Lib/site-packages/certifi/__main__.py | 12 + env/Lib/site-packages/certifi/cacert.pem | 4257 +++++++++ env/Lib/site-packages/certifi/core.py | 60 + .../chardet-3.0.4.dist-info/DESCRIPTION.rst | 70 + .../chardet-3.0.4.dist-info/INSTALLER | 1 + .../chardet-3.0.4.dist-info/METADATA | 96 + .../chardet-3.0.4.dist-info/RECORD | 91 + .../chardet-3.0.4.dist-info/WHEEL | 6 + .../chardet-3.0.4.dist-info/entry_points.txt | 3 + .../chardet-3.0.4.dist-info/metadata.json | 1 + .../chardet-3.0.4.dist-info/top_level.txt | 1 + env/Lib/site-packages/chardet/__init__.py | 39 + env/Lib/site-packages/chardet/big5freq.py | 386 + env/Lib/site-packages/chardet/big5prober.py | 47 + .../site-packages/chardet/chardistribution.py | 233 + .../chardet/charsetgroupprober.py | 106 + .../site-packages/chardet/charsetprober.py | 145 + env/Lib/site-packages/chardet/cli/__init__.py | 1 + .../site-packages/chardet/cli/chardetect.py | 85 + .../chardet/codingstatemachine.py | 88 + env/Lib/site-packages/chardet/compat.py | 34 + env/Lib/site-packages/chardet/cp949prober.py | 49 + env/Lib/site-packages/chardet/enums.py | 76 + env/Lib/site-packages/chardet/escprober.py | 101 + env/Lib/site-packages/chardet/escsm.py | 246 + env/Lib/site-packages/chardet/eucjpprober.py | 92 + env/Lib/site-packages/chardet/euckrfreq.py | 195 + env/Lib/site-packages/chardet/euckrprober.py | 47 + env/Lib/site-packages/chardet/euctwfreq.py | 387 + env/Lib/site-packages/chardet/euctwprober.py | 46 + env/Lib/site-packages/chardet/gb2312freq.py | 283 + env/Lib/site-packages/chardet/gb2312prober.py | 46 + env/Lib/site-packages/chardet/hebrewprober.py | 292 + env/Lib/site-packages/chardet/jisfreq.py | 325 + env/Lib/site-packages/chardet/jpcntx.py | 233 + .../chardet/langbulgarianmodel.py | 228 + .../chardet/langcyrillicmodel.py | 333 + .../site-packages/chardet/langgreekmodel.py | 225 + .../site-packages/chardet/langhebrewmodel.py | 200 + .../chardet/langhungarianmodel.py | 225 + .../site-packages/chardet/langthaimodel.py | 199 + .../site-packages/chardet/langturkishmodel.py | 193 + env/Lib/site-packages/chardet/latin1prober.py | 145 + .../site-packages/chardet/mbcharsetprober.py | 91 + .../site-packages/chardet/mbcsgroupprober.py | 54 + env/Lib/site-packages/chardet/mbcssm.py | 572 ++ .../site-packages/chardet/sbcharsetprober.py | 132 + .../site-packages/chardet/sbcsgroupprober.py | 73 + env/Lib/site-packages/chardet/sjisprober.py | 92 + .../chardet/universaldetector.py | 286 + env/Lib/site-packages/chardet/utf8prober.py | 82 + env/Lib/site-packages/chardet/version.py | 9 + env/Lib/site-packages/easy_install.py | 5 + .../gcloud-0.17.0.dist-info/INSTALLER | 1 + .../gcloud-0.17.0.dist-info/METADATA | 327 + .../gcloud-0.17.0.dist-info/RECORD | 425 + .../gcloud-0.17.0.dist-info/REQUESTED | 0 .../gcloud-0.17.0.dist-info/WHEEL | 5 + .../gcloud-0.17.0.dist-info/top_level.txt | 1 + env/Lib/site-packages/gcloud/__init__.py | 19 + env/Lib/site-packages/gcloud/_helpers.py | 553 ++ env/Lib/site-packages/gcloud/_testing.py | 61 + .../site-packages/gcloud/bigquery/__init__.py | 32 + .../site-packages/gcloud/bigquery/_helpers.py | 166 + .../site-packages/gcloud/bigquery/client.py | 275 + .../gcloud/bigquery/connection.py | 34 + .../site-packages/gcloud/bigquery/dataset.py | 577 ++ env/Lib/site-packages/gcloud/bigquery/job.py | 1028 ++ .../site-packages/gcloud/bigquery/query.py | 349 + .../site-packages/gcloud/bigquery/table.py | 1020 ++ .../gcloud/bigquery/test__helpers.py | 116 + .../gcloud/bigquery/test_client.py | 431 + .../gcloud/bigquery/test_connection.py | 47 + .../gcloud/bigquery/test_dataset.py | 788 ++ .../site-packages/gcloud/bigquery/test_job.py | 1592 ++++ .../gcloud/bigquery/test_query.py | 330 + .../gcloud/bigquery/test_table.py | 1741 ++++ .../site-packages/gcloud/bigtable/__init__.py | 38 + .../gcloud/bigtable/_generated/__init__.py | 15 + .../_generated/_bigtable_cluster_data.proto | 93 + .../_bigtable_cluster_service.proto | 129 + .../_bigtable_cluster_service_messages.proto | 134 + .../bigtable/_generated/_bigtable_data.proto | 515 + .../_generated/_bigtable_service.proto | 73 + .../_bigtable_service_messages.proto | 214 + .../_generated/_bigtable_table_data.proto | 125 + .../_generated/_bigtable_table_service.proto | 74 + .../_bigtable_table_service_messages.proto | 101 + .../bigtable/_generated/_operations.proto | 144 + .../_generated/bigtable_cluster_data_pb2.py | 221 + .../bigtable_cluster_service_messages_pb2.py | 538 ++ .../bigtable_cluster_service_pb2.py | 187 + .../bigtable/_generated/bigtable_data_pb2.py | 1226 +++ .../bigtable_service_messages_pb2.py | 678 ++ .../_generated/bigtable_service_pb2.py | 167 + .../_generated/bigtable_table_data_pb2.py | 377 + .../bigtable_table_service_messages_pb2.py | 389 + .../_generated/bigtable_table_service_pb2.py | 203 + .../_generated/operations_grpc_pb2.py | 100 + .../gcloud/bigtable/_generated_v2/__init__.py | 15 + .../bigtable/_generated_v2/_bigtable.proto | 321 + .../_bigtable_instance_admin.proto | 232 + .../_generated_v2/_bigtable_table_admin.proto | 195 + .../bigtable/_generated_v2/_common.proto | 37 + .../gcloud/bigtable/_generated_v2/_data.proto | 532 ++ .../bigtable/_generated_v2/_instance.proto | 113 + .../bigtable/_generated_v2/_operations.proto | 144 + .../bigtable/_generated_v2/_table.proto | 115 + .../bigtable_instance_admin_pb2.py | 1061 +++ .../bigtable/_generated_v2/bigtable_pb2.py | 1100 +++ .../_generated_v2/bigtable_table_admin_pb2.py | 784 ++ .../bigtable/_generated_v2/common_pb2.py | 67 + .../gcloud/bigtable/_generated_v2/data_pb2.py | 1260 +++ .../bigtable/_generated_v2/instance_pb2.py | 222 + .../_generated_v2/operations_grpc_pb2.py | 264 + .../bigtable/_generated_v2/table_pb2.py | 393 + .../site-packages/gcloud/bigtable/_testing.py | 57 + .../site-packages/gcloud/bigtable/client.py | 480 + .../site-packages/gcloud/bigtable/cluster.py | 384 + .../gcloud/bigtable/column_family.py | 339 + .../gcloud/bigtable/happybase/__init__.py | 167 + .../gcloud/bigtable/happybase/batch.py | 326 + .../gcloud/bigtable/happybase/connection.py | 484 + .../gcloud/bigtable/happybase/pool.py | 153 + .../gcloud/bigtable/happybase/table.py | 980 ++ .../gcloud/bigtable/happybase/test_batch.py | 568 ++ .../bigtable/happybase/test_connection.py | 682 ++ .../gcloud/bigtable/happybase/test_pool.py | 264 + .../gcloud/bigtable/happybase/test_table.py | 1505 +++ .../site-packages/gcloud/bigtable/instance.py | 488 + .../bigtable/read-rows-acceptance-test.json | 1178 +++ env/Lib/site-packages/gcloud/bigtable/row.py | 889 ++ .../site-packages/gcloud/bigtable/row_data.py | 442 + .../gcloud/bigtable/row_filters.py | 768 ++ .../site-packages/gcloud/bigtable/table.py | 379 + .../gcloud/bigtable/test_client.py | 784 ++ .../gcloud/bigtable/test_cluster.py | 643 ++ .../gcloud/bigtable/test_column_family.py | 669 ++ .../gcloud/bigtable/test_instance.py | 866 ++ .../site-packages/gcloud/bigtable/test_row.py | 915 ++ .../gcloud/bigtable/test_row_data.py | 727 ++ .../gcloud/bigtable/test_row_filters.py | 1001 ++ .../gcloud/bigtable/test_table.py | 565 ++ env/Lib/site-packages/gcloud/client.py | 186 + env/Lib/site-packages/gcloud/connection.py | 355 + env/Lib/site-packages/gcloud/credentials.py | 234 + .../gcloud/datastore/__init__.py | 62 + .../gcloud/datastore/_generated/__init__.py | 15 + .../datastore/_generated/_datastore.proto | 289 + .../gcloud/datastore/_generated/_entity.proto | 196 + .../gcloud/datastore/_generated/_query.proto | 281 + .../_generated/datastore_grpc_pb2.py | 279 + .../datastore/_generated/datastore_pb2.py | 862 ++ .../gcloud/datastore/_generated/entity_pb2.py | 493 + .../gcloud/datastore/_generated/query_pb2.py | 917 ++ .../site-packages/gcloud/datastore/batch.py | 301 + .../site-packages/gcloud/datastore/client.py | 454 + .../gcloud/datastore/connection.py | 426 + .../site-packages/gcloud/datastore/entity.py | 142 + .../site-packages/gcloud/datastore/helpers.py | 468 + env/Lib/site-packages/gcloud/datastore/key.py | 404 + .../site-packages/gcloud/datastore/query.py | 531 ++ .../gcloud/datastore/test_batch.py | 400 + .../gcloud/datastore/test_client.py | 1006 ++ .../gcloud/datastore/test_connection.py | 873 ++ .../gcloud/datastore/test_entity.py | 211 + .../gcloud/datastore/test_helpers.py | 926 ++ .../gcloud/datastore/test_key.py | 431 + .../gcloud/datastore/test_query.py | 759 ++ .../gcloud/datastore/test_transaction.py | 223 + .../gcloud/datastore/transaction.py | 162 + env/Lib/site-packages/gcloud/dns/__init__.py | 33 + env/Lib/site-packages/gcloud/dns/changes.py | 256 + env/Lib/site-packages/gcloud/dns/client.py | 116 + .../site-packages/gcloud/dns/connection.py | 33 + .../gcloud/dns/resource_record_set.py | 66 + .../site-packages/gcloud/dns/test_changes.py | 344 + .../site-packages/gcloud/dns/test_client.py | 252 + .../gcloud/dns/test_connection.py | 47 + .../gcloud/dns/test_resource_record_set.py | 94 + env/Lib/site-packages/gcloud/dns/test_zone.py | 692 ++ env/Lib/site-packages/gcloud/dns/zone.py | 395 + .../site-packages/gcloud/environment_vars.py | 37 + env/Lib/site-packages/gcloud/exceptions.py | 224 + env/Lib/site-packages/gcloud/iterator.py | 187 + .../site-packages/gcloud/logging/__init__.py | 23 + env/Lib/site-packages/gcloud/logging/_gax.py | 575 ++ .../site-packages/gcloud/logging/client.py | 300 + .../gcloud/logging/connection.py | 435 + .../site-packages/gcloud/logging/entries.py | 157 + .../site-packages/gcloud/logging/logger.py | 443 + .../site-packages/gcloud/logging/metric.py | 174 + env/Lib/site-packages/gcloud/logging/sink.py | 178 + .../site-packages/gcloud/logging/test__gax.py | 1012 ++ .../gcloud/logging/test_client.py | 495 + .../gcloud/logging/test_connection.py | 633 ++ .../gcloud/logging/test_entries.py | 235 + .../gcloud/logging/test_logger.py | 704 ++ .../gcloud/logging/test_metric.py | 251 + .../site-packages/gcloud/logging/test_sink.py | 262 + .../gcloud/monitoring/__init__.py | 45 + .../gcloud/monitoring/_dataframe.py | 116 + .../site-packages/gcloud/monitoring/client.py | 280 + .../gcloud/monitoring/connection.py | 47 + .../site-packages/gcloud/monitoring/label.py | 99 + .../site-packages/gcloud/monitoring/metric.py | 345 + .../site-packages/gcloud/monitoring/query.py | 673 ++ .../gcloud/monitoring/resource.py | 183 + .../gcloud/monitoring/test__dataframe.py | 226 + .../gcloud/monitoring/test_client.py | 358 + .../gcloud/monitoring/test_connection.py | 44 + .../gcloud/monitoring/test_label.py | 112 + .../gcloud/monitoring/test_metric.py | 553 ++ .../gcloud/monitoring/test_query.py | 638 ++ .../gcloud/monitoring/test_resource.py | 339 + .../gcloud/monitoring/test_timeseries.py | 198 + .../gcloud/monitoring/timeseries.py | 152 + .../site-packages/gcloud/pubsub/__init__.py | 32 + env/Lib/site-packages/gcloud/pubsub/_gax.py | 502 + .../site-packages/gcloud/pubsub/_helpers.py | 73 + env/Lib/site-packages/gcloud/pubsub/client.py | 186 + .../site-packages/gcloud/pubsub/connection.py | 539 ++ env/Lib/site-packages/gcloud/pubsub/iam.py | 259 + .../site-packages/gcloud/pubsub/message.py | 90 + .../gcloud/pubsub/subscription.py | 422 + .../site-packages/gcloud/pubsub/test__gax.py | 944 ++ .../gcloud/pubsub/test__helpers.py | 57 + .../gcloud/pubsub/test_client.py | 299 + .../gcloud/pubsub/test_connection.py | 749 ++ .../site-packages/gcloud/pubsub/test_iam.py | 188 + .../gcloud/pubsub/test_message.py | 126 + .../gcloud/pubsub/test_subscription.py | 679 ++ .../site-packages/gcloud/pubsub/test_topic.py | 783 ++ env/Lib/site-packages/gcloud/pubsub/topic.py | 451 + .../gcloud/resource_manager/__init__.py | 22 + .../gcloud/resource_manager/client.py | 185 + .../gcloud/resource_manager/connection.py | 42 + .../gcloud/resource_manager/project.py | 266 + .../gcloud/resource_manager/test_client.py | 296 + .../resource_manager/test_connection.py | 46 + .../gcloud/resource_manager/test_project.py | 340 + .../site-packages/gcloud/storage/__init__.py | 49 + .../site-packages/gcloud/storage/_helpers.py | 173 + env/Lib/site-packages/gcloud/storage/acl.py | 560 ++ env/Lib/site-packages/gcloud/storage/batch.py | 327 + env/Lib/site-packages/gcloud/storage/blob.py | 937 ++ .../site-packages/gcloud/storage/bucket.py | 829 ++ .../site-packages/gcloud/storage/client.py | 306 + .../gcloud/storage/connection.py | 43 + .../gcloud/storage/test__helpers.py | 223 + .../site-packages/gcloud/storage/test_acl.py | 812 ++ .../gcloud/storage/test_batch.py | 612 ++ .../site-packages/gcloud/storage/test_blob.py | 1398 +++ .../gcloud/storage/test_bucket.py | 1039 ++ .../gcloud/storage/test_client.py | 441 + .../gcloud/storage/test_connection.py | 47 + .../gcloud/streaming/__init__.py | 1 + .../gcloud/streaming/buffered_stream.py | 79 + .../gcloud/streaming/exceptions.py | 106 + .../gcloud/streaming/http_wrapper.py | 446 + .../gcloud/streaming/stream_slice.py | 70 + .../gcloud/streaming/test_buffered_stream.py | 103 + .../gcloud/streaming/test_exceptions.py | 87 + .../gcloud/streaming/test_http_wrapper.py | 556 ++ .../gcloud/streaming/test_stream_slice.py | 68 + .../gcloud/streaming/test_transfer.py | 1900 ++++ .../gcloud/streaming/test_util.py | 48 + .../gcloud/streaming/transfer.py | 1152 +++ .../site-packages/gcloud/streaming/util.py | 61 + env/Lib/site-packages/gcloud/test__helpers.py | 846 ++ env/Lib/site-packages/gcloud/test_client.py | 230 + .../site-packages/gcloud/test_connection.py | 390 + .../site-packages/gcloud/test_credentials.py | 240 + .../site-packages/gcloud/test_exceptions.py | 93 + env/Lib/site-packages/gcloud/test_iterator.py | 278 + .../gcloud/translate/__init__.py | 18 + .../site-packages/gcloud/translate/client.py | 224 + .../gcloud/translate/connection.py | 30 + .../gcloud/translate/test_client.py | 378 + .../gcloud/translate/test_connection.py | 51 + env/Lib/site-packages/google/api/__init__.py | 0 .../google/api/annotations.proto | 31 + .../google/api/annotations_pb2.py | 76 + env/Lib/site-packages/google/api/auth.proto | 181 + env/Lib/site-packages/google/api/auth_pb2.py | 487 + .../site-packages/google/api/backend.proto | 51 + .../site-packages/google/api/backend_pb2.py | 207 + .../site-packages/google/api/billing.proto | 67 + .../site-packages/google/api/billing_pb2.py | 173 + env/Lib/site-packages/google/api/client.proto | 99 + .../site-packages/google/api/client_pb2.py | 117 + .../google/api/config_change.proto | 85 + .../google/api/config_change_pb2.py | 282 + .../site-packages/google/api/consumer.proto | 83 + .../site-packages/google/api/consumer_pb2.py | 246 + .../site-packages/google/api/context.proto | 63 + .../site-packages/google/api/context_pb2.py | 188 + .../site-packages/google/api/control.proto | 33 + .../site-packages/google/api/control_pb2.py | 96 + .../google/api/distribution.proto | 213 + .../google/api/distribution_pb2.py | 750 ++ .../google/api/documentation.proto | 157 + .../google/api/documentation_pb2.py | 357 + .../site-packages/google/api/endpoint.proto | 71 + .../site-packages/google/api/endpoint_pb2.py | 176 + .../google/api/field_behavior.proto | 84 + .../google/api/field_behavior_pb2.py | 153 + env/Lib/site-packages/google/api/http.proto | 318 + env/Lib/site-packages/google/api/http_pb2.py | 439 + .../site-packages/google/api/httpbody.proto | 76 + .../site-packages/google/api/httpbody_pb2.py | 141 + env/Lib/site-packages/google/api/label.proto | 49 + env/Lib/site-packages/google/api/label_pb2.py | 176 + .../google/api/launch_stage.proto | 67 + .../google/api/launch_stage_pb2.py | 117 + env/Lib/site-packages/google/api/log.proto | 55 + env/Lib/site-packages/google/api/log_pb2.py | 160 + .../site-packages/google/api/logging.proto | 83 + .../site-packages/google/api/logging_pb2.py | 195 + env/Lib/site-packages/google/api/metric.proto | 192 + .../site-packages/google/api/metric_pb2.py | 503 + .../google/api/monitored_resource.proto | 116 + .../google/api/monitored_resource_pb2.py | 482 + .../site-packages/google/api/monitoring.proto | 89 + .../google/api/monitoring_pb2.py | 195 + env/Lib/site-packages/google/api/quota.proto | 259 + env/Lib/site-packages/google/api/quota_pb2.py | 561 ++ .../site-packages/google/api/resource.proto | 299 + .../site-packages/google/api/resource_pb2.py | 435 + .../site-packages/google/api/service.proto | 175 + .../site-packages/google/api/service_pb2.py | 653 ++ .../google/api/source_info.proto | 32 + .../google/api/source_info_pb2.py | 103 + .../google/api/system_parameter.proto | 96 + .../google/api/system_parameter_pb2.py | 261 + env/Lib/site-packages/google/api/usage.proto | 92 + env/Lib/site-packages/google/api/usage_pb2.py | 230 + .../google/gapic/metadata/__init__.py | 0 .../gapic/metadata/gapic_metadata.proto | 92 + .../gapic/metadata/gapic_metadata_pb2.py | 601 ++ .../google/logging/type/__init__.py | 0 .../google/logging/type/http_request.proto | 92 + .../google/logging/type/http_request_pb2.py | 373 + .../google/logging/type/log_severity.proto | 72 + .../google/logging/type/log_severity_pb2.py | 148 + .../google/longrunning/__init__.py | 0 .../google/longrunning/operations.proto | 247 + .../google/longrunning/operations_grpc.py | 19 + .../google/longrunning/operations_grpc_pb2.py | 6 + .../google/longrunning/operations_pb2.py | 27 + .../google/longrunning/operations_pb2_grpc.py | 341 + .../google/longrunning/operations_proto.py | 5 + .../longrunning/operations_proto_pb2.py | 795 ++ .../site-packages/google/protobuf/__init__.py | 33 + .../site-packages/google/protobuf/any_pb2.py | 78 + .../site-packages/google/protobuf/api_pb2.py | 252 + .../google/protobuf/compiler/__init__.py | 0 .../google/protobuf/compiler/plugin_pb2.py | 301 + .../google/protobuf/descriptor.py | 1183 +++ .../google/protobuf/descriptor_database.py | 177 + .../google/protobuf/descriptor_pb2.py | 2106 +++++ .../google/protobuf/descriptor_pool.py | 1271 +++ .../google/protobuf/duration_pb2.py | 78 + .../google/protobuf/empty_pb2.py | 64 + .../google/protobuf/field_mask_pb2.py | 71 + .../google/protobuf/internal/__init__.py | 30 + .../protobuf/internal/api_implementation.py | 159 + .../google/protobuf/internal/containers.py | 785 ++ .../google/protobuf/internal/decoder.py | 1057 +++ .../google/protobuf/internal/encoder.py | 830 ++ .../protobuf/internal/enum_type_wrapper.py | 117 + .../protobuf/internal/extension_dict.py | 213 + .../protobuf/internal/message_listener.py | 78 + .../protobuf/internal/python_message.py | 1541 +++ .../google/protobuf/internal/type_checkers.py | 426 + .../protobuf/internal/well_known_types.py | 863 ++ .../google/protobuf/internal/wire_format.py | 268 + .../google/protobuf/json_format.py | 865 ++ .../site-packages/google/protobuf/message.py | 413 + .../google/protobuf/message_factory.py | 187 + .../google/protobuf/proto_builder.py | 137 + .../google/protobuf/pyext/__init__.py | 0 .../google/protobuf/pyext/cpp_message.py | 65 + .../google/protobuf/reflection.py | 95 + .../site-packages/google/protobuf/service.py | 228 + .../google/protobuf/service_reflection.py | 287 + .../google/protobuf/source_context_pb2.py | 71 + .../google/protobuf/struct_pb2.py | 287 + .../google/protobuf/symbol_database.py | 194 + .../google/protobuf/text_encoding.py | 117 + .../google/protobuf/text_format.py | 1826 ++++ .../google/protobuf/timestamp_pb2.py | 78 + .../site-packages/google/protobuf/type_pb2.py | 573 ++ .../google/protobuf/util/__init__.py | 0 .../google/protobuf/util/json_format_pb2.py | 983 ++ .../protobuf/util/json_format_proto3_pb2.py | 2031 ++++ .../google/protobuf/wrappers_pb2.py | 391 + env/Lib/site-packages/google/rpc/__init__.py | 24 + env/Lib/site-packages/google/rpc/code.proto | 186 + env/Lib/site-packages/google/rpc/code_pb2.py | 216 + .../google/rpc/context/__init__.py | 0 .../rpc/context/attribute_context.proto | 287 + .../rpc/context/attribute_context_pb2.py | 1376 +++ .../google/rpc/error_details.proto | 246 + .../google/rpc/error_details_pb2.py | 1109 +++ env/Lib/site-packages/google/rpc/status.proto | 47 + .../site-packages/google/rpc/status_pb2.py | 139 + env/Lib/site-packages/google/type/__init__.py | 0 .../google/type/calendar_period.proto | 57 + .../google/type/calendar_period_pb2.py | 135 + env/Lib/site-packages/google/type/color.proto | 170 + .../site-packages/google/type/color_pb2.py | 160 + env/Lib/site-packages/google/type/date.proto | 50 + env/Lib/site-packages/google/type/date_pb2.py | 134 + .../site-packages/google/type/datetime.proto | 97 + .../site-packages/google/type/datetime_pb2.py | 349 + .../site-packages/google/type/dayofweek.proto | 51 + .../google/type/dayofweek_pb2.py | 135 + env/Lib/site-packages/google/type/expr.proto | 51 + env/Lib/site-packages/google/type/expr_pb2.py | 153 + .../site-packages/google/type/fraction.proto | 34 + .../site-packages/google/type/fraction_pb2.py | 115 + .../site-packages/google/type/latlng.proto | 37 + .../site-packages/google/type/latlng_pb2.py | 115 + env/Lib/site-packages/google/type/money.proto | 43 + .../site-packages/google/type/money_pb2.py | 134 + env/Lib/site-packages/google/type/month.proto | 66 + .../site-packages/google/type/month_pb2.py | 180 + .../google/type/postal_address.proto | 135 + .../google/type/postal_address_pb2.py | 286 + .../google/type/quaternion.proto | 95 + .../google/type/quaternion_pb2.py | 153 + .../site-packages/google/type/timeofday.proto | 44 + .../google/type/timeofday_pb2.py | 153 + ...eapis_common_protos-1.53.0-py3.9-nspkg.pth | 3 + .../INSTALLER | 1 + .../LICENSE | 202 + .../METADATA | 35 + .../RECORD | 179 + .../WHEEL | 6 + .../namespace_packages.txt | 2 + .../top_level.txt | 1 + .../httplib2-0.19.1.dist-info/INSTALLER | 1 + .../httplib2-0.19.1.dist-info/LICENSE | 23 + .../httplib2-0.19.1.dist-info/METADATA | 71 + .../httplib2-0.19.1.dist-info/RECORD | 19 + .../httplib2-0.19.1.dist-info/WHEEL | 5 + .../httplib2-0.19.1.dist-info/top_level.txt | 1 + env/Lib/site-packages/httplib2/__init__.py | 1783 ++++ env/Lib/site-packages/httplib2/auth.py | 63 + env/Lib/site-packages/httplib2/cacerts.txt | 2197 +++++ env/Lib/site-packages/httplib2/certs.py | 42 + env/Lib/site-packages/httplib2/error.py | 48 + env/Lib/site-packages/httplib2/iri2uri.py | 124 + env/Lib/site-packages/httplib2/socks.py | 518 + .../idna-2.7.dist-info/INSTALLER | 1 + .../idna-2.7.dist-info/LICENSE.txt | 80 + .../site-packages/idna-2.7.dist-info/METADATA | 239 + .../site-packages/idna-2.7.dist-info/RECORD | 22 + .../site-packages/idna-2.7.dist-info/WHEEL | 6 + .../idna-2.7.dist-info/top_level.txt | 1 + env/Lib/site-packages/idna/__init__.py | 2 + env/Lib/site-packages/idna/codec.py | 118 + env/Lib/site-packages/idna/compat.py | 12 + env/Lib/site-packages/idna/core.py | 399 + env/Lib/site-packages/idna/idnadata.py | 1893 ++++ env/Lib/site-packages/idna/intranges.py | 53 + env/Lib/site-packages/idna/package_data.py | 2 + env/Lib/site-packages/idna/uts46data.py | 8179 ++++++++++++++++ .../jws-0.1.3.dist-info/INSTALLER | 1 + .../jws-0.1.3.dist-info/METADATA | 152 + .../site-packages/jws-0.1.3.dist-info/RECORD | 17 + .../site-packages/jws-0.1.3.dist-info/WHEEL | 5 + .../jws-0.1.3.dist-info/top_level.txt | 1 + env/Lib/site-packages/jws/__init__.py | 56 + env/Lib/site-packages/jws/algos.py | 187 + env/Lib/site-packages/jws/exceptions.py | 11 + env/Lib/site-packages/jws/header.py | 70 + env/Lib/site-packages/jws/tests.py | 195 + env/Lib/site-packages/jws/utils.py | 56 + .../oauth2client-3.0.0.dist-info/INSTALLER | 1 + .../oauth2client-3.0.0.dist-info/METADATA | 30 + .../oauth2client-3.0.0.dist-info/RECORD | 80 + .../oauth2client-3.0.0.dist-info/REQUESTED | 0 .../oauth2client-3.0.0.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/oauth2client/__init__.py | 23 + .../site-packages/oauth2client/_helpers.py | 105 + .../oauth2client/_openssl_crypt.py | 136 + .../oauth2client/_pure_python_crypt.py | 184 + .../oauth2client/_pycrypto_crypt.py | 124 + env/Lib/site-packages/oauth2client/client.py | 2133 +++++ .../oauth2client/clientsecrets.py | 174 + .../oauth2client/contrib/__init__.py | 6 + .../oauth2client/contrib/_appengine_ndb.py | 163 + .../oauth2client/contrib/_fcntl_opener.py | 81 + .../oauth2client/contrib/_metadata.py | 123 + .../oauth2client/contrib/_win32_opener.py | 106 + .../oauth2client/contrib/appengine.py | 913 ++ .../oauth2client/contrib/devshell.py | 146 + .../contrib/dictionary_storage.py | 65 + .../contrib/django_util/__init__.py | 477 + .../oauth2client/contrib/django_util/apps.py | 32 + .../contrib/django_util/decorators.py | 145 + .../contrib/django_util/models.py | 75 + .../contrib/django_util/signals.py | 28 + .../oauth2client/contrib/django_util/site.py | 26 + .../contrib/django_util/storage.py | 81 + .../oauth2client/contrib/django_util/views.py | 190 + .../oauth2client/contrib/flask_util.py | 556 ++ .../site-packages/oauth2client/contrib/gce.py | 162 + .../oauth2client/contrib/keyring_storage.py | 98 + .../oauth2client/contrib/locked_file.py | 234 + .../contrib/multiprocess_file_storage.py | 355 + .../oauth2client/contrib/multistore_file.py | 505 + .../oauth2client/contrib/sqlalchemy.py | 173 + .../oauth2client/contrib/xsrfutil.py | 106 + env/Lib/site-packages/oauth2client/crypt.py | 250 + env/Lib/site-packages/oauth2client/file.py | 106 + .../oauth2client/service_account.py | 673 ++ env/Lib/site-packages/oauth2client/tools.py | 256 + .../site-packages/oauth2client/transport.py | 245 + env/Lib/site-packages/oauth2client/util.py | 206 + .../pip-20.2.3.dist-info/INSTALLER | 1 + .../pip-20.2.3.dist-info/LICENSE.txt | 20 + .../pip-20.2.3.dist-info/METADATA | 88 + .../site-packages/pip-20.2.3.dist-info/RECORD | 752 ++ .../pip-20.2.3.dist-info/REQUESTED | 0 .../site-packages/pip-20.2.3.dist-info/WHEEL | 6 + .../pip-20.2.3.dist-info/entry_points.txt | 5 + .../pip-20.2.3.dist-info/top_level.txt | 1 + env/Lib/site-packages/pip/__init__.py | 18 + env/Lib/site-packages/pip/__main__.py | 26 + .../site-packages/pip/_internal/__init__.py | 17 + .../site-packages/pip/_internal/build_env.py | 241 + env/Lib/site-packages/pip/_internal/cache.py | 346 + .../pip/_internal/cli/__init__.py | 4 + .../pip/_internal/cli/autocompletion.py | 164 + .../pip/_internal/cli/base_command.py | 265 + .../pip/_internal/cli/cmdoptions.py | 975 ++ .../pip/_internal/cli/command_context.py | 36 + .../site-packages/pip/_internal/cli/main.py | 75 + .../pip/_internal/cli/main_parser.py | 99 + .../site-packages/pip/_internal/cli/parser.py | 266 + .../pip/_internal/cli/progress_bars.py | 280 + .../pip/_internal/cli/req_command.py | 402 + .../pip/_internal/cli/spinners.py | 173 + .../pip/_internal/cli/status_codes.py | 8 + .../pip/_internal/commands/__init__.py | 122 + .../pip/_internal/commands/cache.py | 182 + .../pip/_internal/commands/check.py | 51 + .../pip/_internal/commands/completion.py | 98 + .../pip/_internal/commands/configuration.py | 284 + .../pip/_internal/commands/debug.py | 229 + .../pip/_internal/commands/download.py | 143 + .../pip/_internal/commands/freeze.py | 103 + .../pip/_internal/commands/hash.py | 63 + .../pip/_internal/commands/help.py | 44 + .../pip/_internal/commands/install.py | 749 ++ .../pip/_internal/commands/list.py | 320 + .../pip/_internal/commands/search.py | 160 + .../pip/_internal/commands/show.py | 186 + .../pip/_internal/commands/uninstall.py | 95 + .../pip/_internal/commands/wheel.py | 188 + .../pip/_internal/configuration.py | 418 + .../pip/_internal/distributions/__init__.py | 24 + .../pip/_internal/distributions/base.py | 45 + .../pip/_internal/distributions/installed.py | 24 + .../pip/_internal/distributions/sdist.py | 104 + .../pip/_internal/distributions/wheel.py | 36 + .../site-packages/pip/_internal/exceptions.py | 381 + .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 692 ++ .../pip/_internal/index/package_finder.py | 1014 ++ .../site-packages/pip/_internal/locations.py | 194 + env/Lib/site-packages/pip/_internal/main.py | 16 + .../pip/_internal/models/__init__.py | 2 + .../pip/_internal/models/candidate.py | 38 + .../pip/_internal/models/direct_url.py | 245 + .../pip/_internal/models/format_control.py | 92 + .../pip/_internal/models/index.py | 34 + .../pip/_internal/models/link.py | 245 + .../pip/_internal/models/scheme.py | 31 + .../pip/_internal/models/search_scope.py | 135 + .../pip/_internal/models/selection_prefs.py | 49 + .../pip/_internal/models/target_python.py | 120 + .../pip/_internal/models/wheel.py | 78 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 310 + .../pip/_internal/network/cache.py | 79 + .../pip/_internal/network/download.py | 182 + .../pip/_internal/network/lazy_wheel.py | 235 + .../pip/_internal/network/session.py | 421 + .../pip/_internal/network/utils.py | 97 + .../pip/_internal/network/xmlrpc.py | 52 + .../pip/_internal/operations/__init__.py | 0 .../pip/_internal/operations/check.py | 158 + .../pip/_internal/operations/freeze.py | 272 + .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 52 + .../_internal/operations/install/legacy.py | 130 + .../pip/_internal/operations/install/wheel.py | 861 ++ .../pip/_internal/operations/prepare.py | 562 ++ .../site-packages/pip/_internal/pyproject.py | 196 + .../pip/_internal/req/__init__.py | 103 + .../pip/_internal/req/constructors.py | 486 + .../pip/_internal/req/req_file.py | 592 ++ .../pip/_internal/req/req_install.py | 905 ++ .../pip/_internal/req/req_set.py | 203 + .../pip/_internal/req/req_tracker.py | 150 + .../pip/_internal/req/req_uninstall.py | 648 ++ .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../_internal/resolution/legacy/resolver.py | 485 + .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 82 + .../resolution/resolvelib/candidates.py | 600 ++ .../resolution/resolvelib/factory.py | 459 + .../resolution/resolvelib/provider.py | 153 + .../resolution/resolvelib/requirements.py | 137 + .../resolution/resolvelib/resolver.py | 259 + .../pip/_internal/self_outdated_check.py | 205 + .../pip/_internal/utils/__init__.py | 0 .../pip/_internal/utils/appdirs.py | 44 + .../pip/_internal/utils/compat.py | 271 + .../pip/_internal/utils/compatibility_tags.py | 166 + .../pip/_internal/utils/datetime.py | 14 + .../pip/_internal/utils/deprecation.py | 104 + .../pip/_internal/utils/direct_url_helpers.py | 130 + .../pip/_internal/utils/distutils_args.py | 48 + .../pip/_internal/utils/encoding.py | 41 + .../pip/_internal/utils/entrypoints.py | 31 + .../pip/_internal/utils/filesystem.py | 224 + .../pip/_internal/utils/filetypes.py | 16 + .../pip/_internal/utils/glibc.py | 98 + .../pip/_internal/utils/hashes.py | 145 + .../_internal/utils/inject_securetransport.py | 36 + .../pip/_internal/utils/logging.py | 399 + .../site-packages/pip/_internal/utils/misc.py | 959 ++ .../pip/_internal/utils/models.py | 44 + .../pip/_internal/utils/packaging.py | 94 + .../pip/_internal/utils/parallel.py | 107 + .../pip/_internal/utils/pkg_resources.py | 44 + .../pip/_internal/utils/setuptools_build.py | 181 + .../pip/_internal/utils/subprocess.py | 280 + .../pip/_internal/utils/temp_dir.py | 274 + .../pip/_internal/utils/typing.py | 38 + .../pip/_internal/utils/unpacking.py | 281 + .../site-packages/pip/_internal/utils/urls.py | 55 + .../pip/_internal/utils/virtualenv.py | 119 + .../pip/_internal/utils/wheel.py | 225 + .../pip/_internal/vcs/__init__.py | 15 + .../site-packages/pip/_internal/vcs/bazaar.py | 119 + .../site-packages/pip/_internal/vcs/git.py | 397 + .../pip/_internal/vcs/mercurial.py | 158 + .../pip/_internal/vcs/subversion.py | 336 + .../pip/_internal/vcs/versioncontrol.py | 811 ++ .../pip/_internal/wheel_builder.py | 308 + env/Lib/site-packages/pip/_vendor/__init__.py | 110 + env/Lib/site-packages/pip/_vendor/appdirs.py | 633 ++ .../pip/_vendor/cachecontrol/__init__.py | 11 + .../pip/_vendor/cachecontrol/_cmd.py | 57 + .../pip/_vendor/cachecontrol/adapter.py | 133 + .../pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/__init__.py | 2 + .../_vendor/cachecontrol/caches/file_cache.py | 146 + .../cachecontrol/caches/redis_cache.py | 33 + .../pip/_vendor/cachecontrol/compat.py | 29 + .../pip/_vendor/cachecontrol/controller.py | 376 + .../pip/_vendor/cachecontrol/filewrapper.py | 80 + .../pip/_vendor/cachecontrol/heuristics.py | 135 + .../pip/_vendor/cachecontrol/serialize.py | 188 + .../pip/_vendor/cachecontrol/wrapper.py | 29 + .../pip/_vendor/certifi/__init__.py | 3 + .../pip/_vendor/certifi/__main__.py | 12 + .../pip/_vendor/certifi/cacert.pem | 4620 +++++++++ .../site-packages/pip/_vendor/certifi/core.py | 60 + .../pip/_vendor/chardet/__init__.py | 39 + .../pip/_vendor/chardet/big5freq.py | 386 + .../pip/_vendor/chardet/big5prober.py | 47 + .../pip/_vendor/chardet/chardistribution.py | 233 + .../pip/_vendor/chardet/charsetgroupprober.py | 106 + .../pip/_vendor/chardet/charsetprober.py | 145 + .../pip/_vendor/chardet/cli/__init__.py | 1 + .../pip/_vendor/chardet/cli/chardetect.py | 85 + .../pip/_vendor/chardet/codingstatemachine.py | 88 + .../pip/_vendor/chardet/compat.py | 34 + .../pip/_vendor/chardet/cp949prober.py | 49 + .../pip/_vendor/chardet/enums.py | 76 + .../pip/_vendor/chardet/escprober.py | 101 + .../pip/_vendor/chardet/escsm.py | 246 + .../pip/_vendor/chardet/eucjpprober.py | 92 + .../pip/_vendor/chardet/euckrfreq.py | 195 + .../pip/_vendor/chardet/euckrprober.py | 47 + .../pip/_vendor/chardet/euctwfreq.py | 387 + .../pip/_vendor/chardet/euctwprober.py | 46 + .../pip/_vendor/chardet/gb2312freq.py | 283 + .../pip/_vendor/chardet/gb2312prober.py | 46 + .../pip/_vendor/chardet/hebrewprober.py | 292 + .../pip/_vendor/chardet/jisfreq.py | 325 + .../pip/_vendor/chardet/jpcntx.py | 233 + .../pip/_vendor/chardet/langbulgarianmodel.py | 228 + .../pip/_vendor/chardet/langcyrillicmodel.py | 333 + .../pip/_vendor/chardet/langgreekmodel.py | 225 + .../pip/_vendor/chardet/langhebrewmodel.py | 200 + .../pip/_vendor/chardet/langhungarianmodel.py | 225 + .../pip/_vendor/chardet/langthaimodel.py | 199 + .../pip/_vendor/chardet/langturkishmodel.py | 193 + .../pip/_vendor/chardet/latin1prober.py | 145 + .../pip/_vendor/chardet/mbcharsetprober.py | 91 + .../pip/_vendor/chardet/mbcsgroupprober.py | 54 + .../pip/_vendor/chardet/mbcssm.py | 572 ++ .../pip/_vendor/chardet/sbcharsetprober.py | 132 + .../pip/_vendor/chardet/sbcsgroupprober.py | 73 + .../pip/_vendor/chardet/sjisprober.py | 92 + .../pip/_vendor/chardet/universaldetector.py | 286 + .../pip/_vendor/chardet/utf8prober.py | 82 + .../pip/_vendor/chardet/version.py | 9 + .../pip/_vendor/colorama/__init__.py | 6 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 257 + .../pip/_vendor/colorama/initialise.py | 80 + .../pip/_vendor/colorama/win32.py | 152 + .../pip/_vendor/colorama/winterm.py | 169 + .../site-packages/pip/_vendor/contextlib2.py | 518 + .../pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 764 ++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 786 ++ .../pip/_vendor/distlib/_backport/tarfile.py | 2607 +++++ .../pip/_vendor/distlib/compat.py | 1120 +++ .../pip/_vendor/distlib/database.py | 1339 +++ .../pip/_vendor/distlib/index.py | 516 + .../pip/_vendor/distlib/locators.py | 1302 +++ .../pip/_vendor/distlib/manifest.py | 393 + .../pip/_vendor/distlib/markers.py | 131 + .../pip/_vendor/distlib/metadata.py | 1056 +++ .../pip/_vendor/distlib/resources.py | 355 + .../pip/_vendor/distlib/scripts.py | 419 + .../site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 96768 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 105984 bytes .../site-packages/pip/_vendor/distlib/util.py | 1761 ++++ .../pip/_vendor/distlib/version.py | 736 ++ .../site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 90112 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 99840 bytes .../pip/_vendor/distlib/wheel.py | 1018 ++ env/Lib/site-packages/pip/_vendor/distro.py | 1230 +++ .../pip/_vendor/html5lib/__init__.py | 35 + .../pip/_vendor/html5lib/_ihatexml.py | 289 + .../pip/_vendor/html5lib/_inputstream.py | 918 ++ .../pip/_vendor/html5lib/_tokenizer.py | 1735 ++++ .../pip/_vendor/html5lib/_trie/__init__.py | 5 + .../pip/_vendor/html5lib/_trie/_base.py | 40 + .../pip/_vendor/html5lib/_trie/py.py | 67 + .../pip/_vendor/html5lib/_utils.py | 159 + .../pip/_vendor/html5lib/constants.py | 2946 ++++++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 29 + .../pip/_vendor/html5lib/filters/base.py | 12 + .../html5lib/filters/inject_meta_charset.py | 73 + .../pip/_vendor/html5lib/filters/lint.py | 93 + .../_vendor/html5lib/filters/optionaltags.py | 207 + .../pip/_vendor/html5lib/filters/sanitizer.py | 916 ++ .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2795 ++++++ .../pip/_vendor/html5lib/serializer.py | 409 + .../_vendor/html5lib/treeadapters/__init__.py | 30 + .../_vendor/html5lib/treeadapters/genshi.py | 54 + .../pip/_vendor/html5lib/treeadapters/sax.py | 50 + .../_vendor/html5lib/treebuilders/__init__.py | 88 + .../pip/_vendor/html5lib/treebuilders/base.py | 417 + .../pip/_vendor/html5lib/treebuilders/dom.py | 239 + .../_vendor/html5lib/treebuilders/etree.py | 343 + .../html5lib/treebuilders/etree_lxml.py | 392 + .../_vendor/html5lib/treewalkers/__init__.py | 154 + .../pip/_vendor/html5lib/treewalkers/base.py | 252 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 131 + .../html5lib/treewalkers/etree_lxml.py | 215 + .../_vendor/html5lib/treewalkers/genshi.py | 69 + .../pip/_vendor/idna/__init__.py | 2 + .../site-packages/pip/_vendor/idna/codec.py | 118 + .../site-packages/pip/_vendor/idna/compat.py | 12 + .../site-packages/pip/_vendor/idna/core.py | 400 + .../pip/_vendor/idna/idnadata.py | 2050 ++++ .../pip/_vendor/idna/intranges.py | 53 + .../pip/_vendor/idna/package_data.py | 2 + .../pip/_vendor/idna/uts46data.py | 8357 +++++++++++++++++ .../site-packages/pip/_vendor/ipaddress.py | 2420 +++++ .../pip/_vendor/msgpack/__init__.py | 54 + .../pip/_vendor/msgpack/_version.py | 1 + .../pip/_vendor/msgpack/exceptions.py | 48 + .../site-packages/pip/_vendor/msgpack/ext.py | 191 + .../pip/_vendor/msgpack/fallback.py | 1063 +++ .../pip/_vendor/packaging/__about__.py | 27 + .../pip/_vendor/packaging/__init__.py | 26 + .../pip/_vendor/packaging/_compat.py | 38 + .../pip/_vendor/packaging/_structures.py | 86 + .../pip/_vendor/packaging/_typing.py | 48 + .../pip/_vendor/packaging/markers.py | 328 + .../pip/_vendor/packaging/requirements.py | 145 + .../pip/_vendor/packaging/specifiers.py | 863 ++ .../pip/_vendor/packaging/tags.py | 751 ++ .../pip/_vendor/packaging/utils.py | 65 + .../pip/_vendor/packaging/version.py | 535 ++ .../pip/_vendor/pep517/__init__.py | 4 + .../pip/_vendor/pep517/_in_process.py | 280 + .../site-packages/pip/_vendor/pep517/build.py | 124 + .../site-packages/pip/_vendor/pep517/check.py | 203 + .../pip/_vendor/pep517/colorlog.py | 115 + .../pip/_vendor/pep517/compat.py | 34 + .../pip/_vendor/pep517/dirtools.py | 44 + .../pip/_vendor/pep517/envbuild.py | 167 + .../site-packages/pip/_vendor/pep517/meta.py | 92 + .../pip/_vendor/pep517/wrappers.py | 308 + .../pip/_vendor/pkg_resources/__init__.py | 3296 +++++++ .../pip/_vendor/pkg_resources/py31compat.py | 23 + .../pip/_vendor/progress/__init__.py | 177 + .../site-packages/pip/_vendor/progress/bar.py | 91 + .../pip/_vendor/progress/counter.py | 41 + .../pip/_vendor/progress/spinner.py | 43 + .../site-packages/pip/_vendor/pyparsing.py | 7107 ++++++++++++++ .../pip/_vendor/requests/__init__.py | 144 + .../pip/_vendor/requests/__version__.py | 14 + .../pip/_vendor/requests/_internal_utils.py | 42 + .../pip/_vendor/requests/adapters.py | 533 ++ .../site-packages/pip/_vendor/requests/api.py | 161 + .../pip/_vendor/requests/auth.py | 305 + .../pip/_vendor/requests/certs.py | 18 + .../pip/_vendor/requests/compat.py | 76 + .../pip/_vendor/requests/cookies.py | 549 ++ .../pip/_vendor/requests/exceptions.py | 123 + .../pip/_vendor/requests/help.py | 119 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 954 ++ .../pip/_vendor/requests/packages.py | 16 + .../pip/_vendor/requests/sessions.py | 769 ++ .../pip/_vendor/requests/status_codes.py | 123 + .../pip/_vendor/requests/structures.py | 105 + .../pip/_vendor/requests/utils.py | 982 ++ .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/compat/__init__.py | 0 .../resolvelib/compat/collections_abc.py | 6 + .../pip/_vendor/resolvelib/providers.py | 109 + .../pip/_vendor/resolvelib/reporters.py | 42 + .../pip/_vendor/resolvelib/resolvers.py | 428 + .../pip/_vendor/resolvelib/structs.py | 68 + env/Lib/site-packages/pip/_vendor/retrying.py | 267 + env/Lib/site-packages/pip/_vendor/six.py | 982 ++ .../pip/_vendor/toml/__init__.py | 25 + .../site-packages/pip/_vendor/toml/common.py | 6 + .../site-packages/pip/_vendor/toml/decoder.py | 1052 +++ .../site-packages/pip/_vendor/toml/encoder.py | 304 + .../site-packages/pip/_vendor/toml/ordered.py | 15 + env/Lib/site-packages/pip/_vendor/toml/tz.py | 21 + .../pip/_vendor/urllib3/__init__.py | 86 + .../pip/_vendor/urllib3/_collections.py | 336 + .../pip/_vendor/urllib3/connection.py | 423 + .../pip/_vendor/urllib3/connectionpool.py | 1033 ++ .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 493 + .../contrib/_securetransport/low_level.py | 328 + .../pip/_vendor/urllib3/contrib/appengine.py | 314 + .../pip/_vendor/urllib3/contrib/ntlmpool.py | 121 + .../pip/_vendor/urllib3/contrib/pyopenssl.py | 501 + .../urllib3/contrib/securetransport.py | 864 ++ .../pip/_vendor/urllib3/contrib/socks.py | 210 + .../pip/_vendor/urllib3/exceptions.py | 272 + .../pip/_vendor/urllib3/fields.py | 273 + .../pip/_vendor/urllib3/filepost.py | 98 + .../pip/_vendor/urllib3/packages/__init__.py | 5 + .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 52 + .../pip/_vendor/urllib3/packages/six.py | 1021 ++ .../packages/ssl_match_hostname/__init__.py | 19 + .../ssl_match_hostname/_implementation.py | 160 + .../pip/_vendor/urllib3/poolmanager.py | 492 + .../pip/_vendor/urllib3/request.py | 171 + .../pip/_vendor/urllib3/response.py | 821 ++ .../pip/_vendor/urllib3/util/__init__.py | 46 + .../pip/_vendor/urllib3/util/connection.py | 138 + .../pip/_vendor/urllib3/util/queue.py | 21 + .../pip/_vendor/urllib3/util/request.py | 135 + .../pip/_vendor/urllib3/util/response.py | 86 + .../pip/_vendor/urllib3/util/retry.py | 453 + .../pip/_vendor/urllib3/util/ssl_.py | 414 + .../pip/_vendor/urllib3/util/timeout.py | 261 + .../pip/_vendor/urllib3/util/url.py | 430 + .../pip/_vendor/urllib3/util/wait.py | 153 + env/Lib/site-packages/pip/_vendor/vendor.txt | 24 + .../pip/_vendor/webencodings/__init__.py | 342 + .../pip/_vendor/webencodings/labels.py | 231 + .../pip/_vendor/webencodings/mklabels.py | 59 + .../pip/_vendor/webencodings/tests.py | 153 + .../_vendor/webencodings/x_user_defined.py | 325 + .../site-packages/pkg_resources/__init__.py | 3302 +++++++ .../pkg_resources/_vendor/__init__.py | 0 .../pkg_resources/_vendor/appdirs.py | 608 ++ .../_vendor/packaging/__about__.py | 27 + .../_vendor/packaging/__init__.py | 26 + .../_vendor/packaging/_compat.py | 31 + .../_vendor/packaging/_structures.py | 68 + .../_vendor/packaging/markers.py | 296 + .../_vendor/packaging/requirements.py | 138 + .../_vendor/packaging/specifiers.py | 749 ++ .../pkg_resources/_vendor/packaging/tags.py | 404 + .../pkg_resources/_vendor/packaging/utils.py | 57 + .../_vendor/packaging/version.py | 420 + .../pkg_resources/_vendor/pyparsing.py | 5742 +++++++++++ .../pkg_resources/_vendor/six.py | 868 ++ .../pkg_resources/extern/__init__.py | 66 + .../site-packages/protobuf-3.17.3-nspkg.pth | 1 + .../protobuf-3.17.3.dist-info/INSTALLER | 1 + .../protobuf-3.17.3.dist-info/METADATA | 24 + .../protobuf-3.17.3.dist-info/RECORD | 95 + .../protobuf-3.17.3.dist-info/WHEEL | 6 + .../namespace_packages.txt | 1 + .../protobuf-3.17.3.dist-info/top_level.txt | 1 + .../pyasn1-0.4.8.dist-info/INSTALLER | 1 + .../pyasn1-0.4.8.dist-info/LICENSE.rst | 24 + .../pyasn1-0.4.8.dist-info/METADATA | 38 + .../pyasn1-0.4.8.dist-info/RECORD | 79 + .../pyasn1-0.4.8.dist-info/WHEEL | 6 + .../pyasn1-0.4.8.dist-info/top_level.txt | 1 + .../pyasn1-0.4.8.dist-info/zip-safe | 1 + env/Lib/site-packages/pyasn1/__init__.py | 7 + .../site-packages/pyasn1/codec/__init__.py | 1 + .../pyasn1/codec/ber/__init__.py | 1 + .../site-packages/pyasn1/codec/ber/decoder.py | 1682 ++++ .../site-packages/pyasn1/codec/ber/encoder.py | 890 ++ env/Lib/site-packages/pyasn1/codec/ber/eoo.py | 28 + .../pyasn1/codec/cer/__init__.py | 1 + .../site-packages/pyasn1/codec/cer/decoder.py | 114 + .../site-packages/pyasn1/codec/cer/encoder.py | 313 + .../pyasn1/codec/der/__init__.py | 1 + .../site-packages/pyasn1/codec/der/decoder.py | 94 + .../site-packages/pyasn1/codec/der/encoder.py | 107 + .../pyasn1/codec/native/__init__.py | 1 + .../pyasn1/codec/native/decoder.py | 213 + .../pyasn1/codec/native/encoder.py | 256 + .../site-packages/pyasn1/compat/__init__.py | 1 + env/Lib/site-packages/pyasn1/compat/binary.py | 33 + .../site-packages/pyasn1/compat/calling.py | 20 + .../pyasn1/compat/dateandtime.py | 22 + .../site-packages/pyasn1/compat/integer.py | 110 + env/Lib/site-packages/pyasn1/compat/octets.py | 46 + env/Lib/site-packages/pyasn1/compat/string.py | 26 + env/Lib/site-packages/pyasn1/debug.py | 157 + env/Lib/site-packages/pyasn1/error.py | 75 + env/Lib/site-packages/pyasn1/type/__init__.py | 1 + env/Lib/site-packages/pyasn1/type/base.py | 707 ++ env/Lib/site-packages/pyasn1/type/char.py | 335 + .../site-packages/pyasn1/type/constraint.py | 756 ++ env/Lib/site-packages/pyasn1/type/error.py | 11 + .../site-packages/pyasn1/type/namedtype.py | 561 ++ env/Lib/site-packages/pyasn1/type/namedval.py | 192 + env/Lib/site-packages/pyasn1/type/opentype.py | 104 + env/Lib/site-packages/pyasn1/type/tag.py | 335 + env/Lib/site-packages/pyasn1/type/tagmap.py | 96 + env/Lib/site-packages/pyasn1/type/univ.py | 3321 +++++++ env/Lib/site-packages/pyasn1/type/useful.py | 191 + .../pyasn1_modules-0.2.8.dist-info/INSTALLER | 1 + .../LICENSE.txt | 24 + .../pyasn1_modules-0.2.8.dist-info/METADATA | 42 + .../pyasn1_modules-0.2.8.dist-info/RECORD | 221 + .../pyasn1_modules-0.2.8.dist-info/WHEEL | 6 + .../top_level.txt | 1 + .../pyasn1_modules-0.2.8.dist-info/zip-safe | 1 + .../site-packages/pyasn1_modules/__init__.py | 2 + env/Lib/site-packages/pyasn1_modules/pem.py | 65 + .../site-packages/pyasn1_modules/rfc1155.py | 96 + .../site-packages/pyasn1_modules/rfc1157.py | 126 + .../site-packages/pyasn1_modules/rfc1901.py | 22 + .../site-packages/pyasn1_modules/rfc1902.py | 129 + .../site-packages/pyasn1_modules/rfc1905.py | 135 + .../site-packages/pyasn1_modules/rfc2251.py | 563 ++ .../site-packages/pyasn1_modules/rfc2314.py | 48 + .../site-packages/pyasn1_modules/rfc2315.py | 294 + .../site-packages/pyasn1_modules/rfc2437.py | 69 + .../site-packages/pyasn1_modules/rfc2459.py | 1339 +++ .../site-packages/pyasn1_modules/rfc2511.py | 258 + .../site-packages/pyasn1_modules/rfc2560.py | 225 + .../site-packages/pyasn1_modules/rfc2631.py | 37 + .../site-packages/pyasn1_modules/rfc2634.py | 336 + .../site-packages/pyasn1_modules/rfc2985.py | 588 ++ .../site-packages/pyasn1_modules/rfc2986.py | 75 + .../site-packages/pyasn1_modules/rfc3114.py | 77 + .../site-packages/pyasn1_modules/rfc3161.py | 142 + .../site-packages/pyasn1_modules/rfc3274.py | 59 + .../site-packages/pyasn1_modules/rfc3279.py | 260 + .../site-packages/pyasn1_modules/rfc3280.py | 1543 +++ .../site-packages/pyasn1_modules/rfc3281.py | 331 + .../site-packages/pyasn1_modules/rfc3412.py | 53 + .../site-packages/pyasn1_modules/rfc3414.py | 28 + .../site-packages/pyasn1_modules/rfc3447.py | 45 + .../site-packages/pyasn1_modules/rfc3560.py | 74 + .../site-packages/pyasn1_modules/rfc3565.py | 57 + .../site-packages/pyasn1_modules/rfc3709.py | 207 + .../site-packages/pyasn1_modules/rfc3770.py | 75 + .../site-packages/pyasn1_modules/rfc3779.py | 137 + .../site-packages/pyasn1_modules/rfc3852.py | 706 ++ .../site-packages/pyasn1_modules/rfc4043.py | 43 + .../site-packages/pyasn1_modules/rfc4055.py | 258 + .../site-packages/pyasn1_modules/rfc4073.py | 59 + .../site-packages/pyasn1_modules/rfc4108.py | 350 + .../site-packages/pyasn1_modules/rfc4210.py | 803 ++ .../site-packages/pyasn1_modules/rfc4211.py | 396 + .../site-packages/pyasn1_modules/rfc4334.py | 75 + .../site-packages/pyasn1_modules/rfc4985.py | 49 + .../site-packages/pyasn1_modules/rfc5035.py | 199 + .../site-packages/pyasn1_modules/rfc5083.py | 52 + .../site-packages/pyasn1_modules/rfc5084.py | 97 + .../site-packages/pyasn1_modules/rfc5208.py | 56 + .../site-packages/pyasn1_modules/rfc5280.py | 1658 ++++ .../site-packages/pyasn1_modules/rfc5480.py | 190 + .../site-packages/pyasn1_modules/rfc5649.py | 33 + .../site-packages/pyasn1_modules/rfc5652.py | 761 ++ .../site-packages/pyasn1_modules/rfc5751.py | 124 + .../site-packages/pyasn1_modules/rfc5755.py | 398 + .../site-packages/pyasn1_modules/rfc5913.py | 44 + .../site-packages/pyasn1_modules/rfc5914.py | 119 + .../site-packages/pyasn1_modules/rfc5915.py | 32 + .../site-packages/pyasn1_modules/rfc5916.py | 35 + .../site-packages/pyasn1_modules/rfc5917.py | 55 + .../site-packages/pyasn1_modules/rfc5924.py | 19 + .../site-packages/pyasn1_modules/rfc5934.py | 786 ++ .../site-packages/pyasn1_modules/rfc5940.py | 59 + .../site-packages/pyasn1_modules/rfc5958.py | 98 + .../site-packages/pyasn1_modules/rfc5990.py | 237 + .../site-packages/pyasn1_modules/rfc6010.py | 88 + .../site-packages/pyasn1_modules/rfc6019.py | 45 + .../site-packages/pyasn1_modules/rfc6031.py | 469 + .../site-packages/pyasn1_modules/rfc6032.py | 68 + .../site-packages/pyasn1_modules/rfc6120.py | 43 + .../site-packages/pyasn1_modules/rfc6170.py | 17 + .../site-packages/pyasn1_modules/rfc6187.py | 22 + .../site-packages/pyasn1_modules/rfc6210.py | 42 + .../site-packages/pyasn1_modules/rfc6211.py | 72 + .../site-packages/pyasn1_modules/rfc6402-1.py | 627 ++ .../site-packages/pyasn1_modules/rfc6402.py | 628 ++ .../site-packages/pyasn1_modules/rfc6482.py | 74 + .../site-packages/pyasn1_modules/rfc6486.py | 68 + .../site-packages/pyasn1_modules/rfc6487.py | 22 + .../site-packages/pyasn1_modules/rfc6664.py | 147 + .../site-packages/pyasn1_modules/rfc6955.py | 108 + .../site-packages/pyasn1_modules/rfc6960.py | 223 + .../site-packages/pyasn1_modules/rfc7030.py | 66 + .../site-packages/pyasn1_modules/rfc7191.py | 261 + .../site-packages/pyasn1_modules/rfc7229.py | 29 + .../site-packages/pyasn1_modules/rfc7292.py | 357 + .../site-packages/pyasn1_modules/rfc7296.py | 32 + .../site-packages/pyasn1_modules/rfc7508.py | 90 + .../site-packages/pyasn1_modules/rfc7585.py | 50 + .../site-packages/pyasn1_modules/rfc7633.py | 38 + .../site-packages/pyasn1_modules/rfc7773.py | 52 + .../site-packages/pyasn1_modules/rfc7894-1.py | 92 + .../site-packages/pyasn1_modules/rfc7894.py | 92 + .../site-packages/pyasn1_modules/rfc7906.py | 736 ++ .../site-packages/pyasn1_modules/rfc7914.py | 49 + .../site-packages/pyasn1_modules/rfc8017.py | 153 + .../site-packages/pyasn1_modules/rfc8018.py | 260 + .../site-packages/pyasn1_modules/rfc8103.py | 36 + .../site-packages/pyasn1_modules/rfc8209.py | 20 + .../site-packages/pyasn1_modules/rfc8226.py | 149 + .../site-packages/pyasn1_modules/rfc8358.py | 50 + .../site-packages/pyasn1_modules/rfc8360.py | 44 + .../site-packages/pyasn1_modules/rfc8398.py | 52 + .../site-packages/pyasn1_modules/rfc8410.py | 43 + .../site-packages/pyasn1_modules/rfc8418.py | 36 + .../site-packages/pyasn1_modules/rfc8419.py | 68 + .../site-packages/pyasn1_modules/rfc8479.py | 45 + .../site-packages/pyasn1_modules/rfc8494.py | 80 + .../site-packages/pyasn1_modules/rfc8520.py | 63 + .../site-packages/pyasn1_modules/rfc8619.py | 45 + .../site-packages/pyasn1_modules/rfc8649.py | 40 + .../pycryptodome-3.10.1.dist-info/AUTHORS.rst | 49 + .../pycryptodome-3.10.1.dist-info/INSTALLER | 1 + .../pycryptodome-3.10.1.dist-info/LICENSE.rst | 69 + .../pycryptodome-3.10.1.dist-info/METADATA | 83 + .../pycryptodome-3.10.1.dist-info/RECORD | 471 + .../pycryptodome-3.10.1.dist-info/REQUESTED | 0 .../pycryptodome-3.10.1.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../pyparsing-2.4.7.dist-info/INSTALLER | 1 + .../pyparsing-2.4.7.dist-info/LICENSE | 18 + .../pyparsing-2.4.7.dist-info/METADATA | 104 + .../pyparsing-2.4.7.dist-info/RECORD | 8 + .../pyparsing-2.4.7.dist-info/WHEEL | 6 + .../pyparsing-2.4.7.dist-info/top_level.txt | 1 + env/Lib/site-packages/pyparsing.py | 7107 ++++++++++++++ .../DESCRIPTION.rst | 179 + .../python_jwt-2.0.1.dist-info/INSTALLER | 1 + .../python_jwt-2.0.1.dist-info/METADATA | 190 + .../python_jwt-2.0.1.dist-info/RECORD | 10 + .../python_jwt-2.0.1.dist-info/REQUESTED | 0 .../python_jwt-2.0.1.dist-info/WHEEL | 6 + .../python_jwt-2.0.1.dist-info/metadata.json | 1 + .../python_jwt-2.0.1.dist-info/top_level.txt | 1 + env/Lib/site-packages/python_jwt/__init__.py | 221 + .../requests-2.19.0.dist-info/DESCRIPTION.rst | 1676 ++++ .../requests-2.19.0.dist-info/INSTALLER | 1 + .../requests-2.19.0.dist-info/LICENSE.txt | 13 + .../requests-2.19.0.dist-info/METADATA | 1713 ++++ .../requests-2.19.0.dist-info/RECORD | 45 + .../requests-2.19.0.dist-info/REQUESTED | 0 .../requests-2.19.0.dist-info/WHEEL | 6 + .../requests-2.19.0.dist-info/metadata.json | 1 + .../requests-2.19.0.dist-info/top_level.txt | 1 + env/Lib/site-packages/requests/__init__.py | 136 + env/Lib/site-packages/requests/__version__.py | 14 + .../site-packages/requests/_internal_utils.py | 42 + env/Lib/site-packages/requests/adapters.py | 530 ++ env/Lib/site-packages/requests/api.py | 152 + env/Lib/site-packages/requests/auth.py | 305 + env/Lib/site-packages/requests/certs.py | 18 + env/Lib/site-packages/requests/compat.py | 71 + env/Lib/site-packages/requests/cookies.py | 546 ++ env/Lib/site-packages/requests/exceptions.py | 126 + env/Lib/site-packages/requests/help.py | 120 + env/Lib/site-packages/requests/hooks.py | 34 + env/Lib/site-packages/requests/models.py | 952 ++ env/Lib/site-packages/requests/packages.py | 14 + env/Lib/site-packages/requests/sessions.py | 741 ++ .../site-packages/requests/status_codes.py | 119 + env/Lib/site-packages/requests/structures.py | 103 + env/Lib/site-packages/requests/utils.py | 976 ++ .../DESCRIPTION.rst | 374 + .../INSTALLER | 1 + .../METADATA | 396 + .../requests_toolbelt-0.7.0.dist-info/RECORD | 74 + .../REQUESTED | 0 .../requests_toolbelt-0.7.0.dist-info/WHEEL | 6 + .../metadata.json | 1 + .../top_level.txt | 1 + .../requests_toolbelt/__init__.py | 34 + .../requests_toolbelt/_compat.py | 310 + .../requests_toolbelt/adapters/__init__.py | 15 + .../requests_toolbelt/adapters/appengine.py | 147 + .../requests_toolbelt/adapters/fingerprint.py | 48 + .../adapters/host_header_ssl.py | 43 + .../adapters/socket_options.py | 118 + .../requests_toolbelt/adapters/source.py | 67 + .../requests_toolbelt/adapters/ssl.py | 66 + .../requests_toolbelt/auth/__init__.py | 0 .../auth/_digest_auth_compat.py | 29 + .../requests_toolbelt/auth/guess.py | 146 + .../requests_toolbelt/auth/handler.py | 142 + .../auth/http_proxy_digest.py | 103 + .../requests_toolbelt/cookies/__init__.py | 0 .../requests_toolbelt/cookies/forgetful.py | 7 + .../downloadutils/__init__.py | 0 .../requests_toolbelt/downloadutils/stream.py | 177 + .../requests_toolbelt/downloadutils/tee.py | 123 + .../requests_toolbelt/exceptions.py | 25 + .../requests_toolbelt/multipart/__init__.py | 31 + .../requests_toolbelt/multipart/decoder.py | 155 + .../requests_toolbelt/multipart/encoder.py | 570 ++ .../requests_toolbelt/sessions.py | 70 + .../requests_toolbelt/streaming_iterator.py | 116 + .../requests_toolbelt/threaded/__init__.py | 97 + .../requests_toolbelt/threaded/pool.py | 211 + .../requests_toolbelt/threaded/thread.py | 53 + .../requests_toolbelt/utils/__init__.py | 0 .../requests_toolbelt/utils/deprecated.py | 84 + .../requests_toolbelt/utils/dump.py | 195 + .../requests_toolbelt/utils/formdata.py | 108 + .../requests_toolbelt/utils/user_agent.py | 76 + .../rsa-4.7.2.dist-info/INSTALLER | 1 + .../site-packages/rsa-4.7.2.dist-info/LICENSE | 13 + .../rsa-4.7.2.dist-info/METADATA | 87 + .../site-packages/rsa-4.7.2.dist-info/RECORD | 43 + .../site-packages/rsa-4.7.2.dist-info/WHEEL | 5 + .../rsa-4.7.2.dist-info/entry_points.txt | 8 + .../rsa-4.7.2.dist-info/top_level.txt | 1 + env/Lib/site-packages/rsa/__init__.py | 40 + env/Lib/site-packages/rsa/_compat.py | 48 + env/Lib/site-packages/rsa/asn1.py | 51 + env/Lib/site-packages/rsa/cli.py | 292 + env/Lib/site-packages/rsa/common.py | 185 + env/Lib/site-packages/rsa/core.py | 53 + env/Lib/site-packages/rsa/key.py | 831 ++ env/Lib/site-packages/rsa/parallel.py | 97 + env/Lib/site-packages/rsa/pem.py | 132 + env/Lib/site-packages/rsa/pkcs1.py | 470 + env/Lib/site-packages/rsa/pkcs1_v2.py | 100 + env/Lib/site-packages/rsa/prime.py | 198 + env/Lib/site-packages/rsa/randnum.py | 96 + env/Lib/site-packages/rsa/transform.py | 72 + env/Lib/site-packages/rsa/util.py | 75 + .../setuptools-49.2.1.dist-info/INSTALLER | 1 + .../setuptools-49.2.1.dist-info/LICENSE | 19 + .../setuptools-49.2.1.dist-info/METADATA | 109 + .../setuptools-49.2.1.dist-info/RECORD | 297 + .../setuptools-49.2.1.dist-info/REQUESTED | 0 .../setuptools-49.2.1.dist-info/WHEEL | 5 + .../dependency_links.txt | 2 + .../entry_points.txt | 68 + .../setuptools-49.2.1.dist-info/top_level.txt | 3 + .../setuptools-49.2.1.dist-info/zip-safe | 1 + env/Lib/site-packages/setuptools/__init__.py | 253 + .../setuptools/_deprecation_warning.py | 7 + .../setuptools/_distutils/__init__.py | 15 + .../setuptools/_distutils/_msvccompiler.py | 537 ++ .../setuptools/_distutils/archive_util.py | 256 + .../setuptools/_distutils/bcppcompiler.py | 393 + .../setuptools/_distutils/ccompiler.py | 1116 +++ .../setuptools/_distutils/cmd.py | 403 + .../setuptools/_distutils/command/__init__.py | 31 + .../setuptools/_distutils/command/bdist.py | 143 + .../_distutils/command/bdist_dumb.py | 123 + .../_distutils/command/bdist_msi.py | 749 ++ .../_distutils/command/bdist_rpm.py | 579 ++ .../_distutils/command/bdist_wininst.py | 377 + .../setuptools/_distutils/command/build.py | 157 + .../_distutils/command/build_clib.py | 209 + .../_distutils/command/build_ext.py | 754 ++ .../setuptools/_distutils/command/build_py.py | 416 + .../_distutils/command/build_scripts.py | 160 + .../setuptools/_distutils/command/check.py | 148 + .../setuptools/_distutils/command/clean.py | 76 + .../setuptools/_distutils/command/config.py | 344 + .../setuptools/_distutils/command/install.py | 677 ++ .../_distutils/command/install_data.py | 79 + .../_distutils/command/install_egg_info.py | 77 + .../_distutils/command/install_headers.py | 47 + .../_distutils/command/install_lib.py | 217 + .../_distutils/command/install_scripts.py | 60 + .../setuptools/_distutils/command/register.py | 304 + .../setuptools/_distutils/command/sdist.py | 494 + .../setuptools/_distutils/command/upload.py | 214 + .../setuptools/_distutils/config.py | 130 + .../setuptools/_distutils/core.py | 234 + .../setuptools/_distutils/cygwinccompiler.py | 403 + .../setuptools/_distutils/debug.py | 5 + .../setuptools/_distutils/dep_util.py | 92 + .../setuptools/_distutils/dir_util.py | 210 + .../setuptools/_distutils/dist.py | 1257 +++ .../setuptools/_distutils/errors.py | 97 + .../setuptools/_distutils/extension.py | 240 + .../setuptools/_distutils/fancy_getopt.py | 457 + .../setuptools/_distutils/file_util.py | 238 + .../setuptools/_distutils/filelist.py | 327 + .../setuptools/_distutils/log.py | 77 + .../setuptools/_distutils/msvc9compiler.py | 788 ++ .../setuptools/_distutils/msvccompiler.py | 643 ++ .../setuptools/_distutils/spawn.py | 125 + .../setuptools/_distutils/sysconfig.py | 573 ++ .../setuptools/_distutils/text_file.py | 286 + .../setuptools/_distutils/unixccompiler.py | 328 + .../setuptools/_distutils/util.py | 559 ++ .../setuptools/_distutils/version.py | 347 + .../setuptools/_distutils/versionpredicate.py | 166 + env/Lib/site-packages/setuptools/_imp.py | 82 + .../setuptools/_vendor/__init__.py | 0 .../setuptools/_vendor/ordered_set.py | 488 + .../setuptools/_vendor/packaging/__about__.py | 27 + .../setuptools/_vendor/packaging/__init__.py | 26 + .../setuptools/_vendor/packaging/_compat.py | 31 + .../_vendor/packaging/_structures.py | 68 + .../setuptools/_vendor/packaging/markers.py | 296 + .../_vendor/packaging/requirements.py | 138 + .../_vendor/packaging/specifiers.py | 749 ++ .../setuptools/_vendor/packaging/tags.py | 404 + .../setuptools/_vendor/packaging/utils.py | 57 + .../setuptools/_vendor/packaging/version.py | 420 + .../setuptools/_vendor/pyparsing.py | 5742 +++++++++++ .../site-packages/setuptools/_vendor/six.py | 868 ++ .../site-packages/setuptools/archive_util.py | 175 + .../site-packages/setuptools/build_meta.py | 271 + env/Lib/site-packages/setuptools/cli-32.exe | Bin 0 -> 65536 bytes env/Lib/site-packages/setuptools/cli-64.exe | Bin 0 -> 74752 bytes env/Lib/site-packages/setuptools/cli.exe | Bin 0 -> 65536 bytes .../setuptools/command/__init__.py | 17 + .../site-packages/setuptools/command/alias.py | 80 + .../setuptools/command/bdist_egg.py | 510 + .../setuptools/command/bdist_rpm.py | 43 + .../setuptools/command/bdist_wininst.py | 30 + .../setuptools/command/build_clib.py | 101 + .../setuptools/command/build_ext.py | 332 + .../setuptools/command/build_py.py | 276 + .../setuptools/command/develop.py | 220 + .../setuptools/command/dist_info.py | 36 + .../setuptools/command/easy_install.py | 2339 +++++ .../setuptools/command/egg_info.py | 721 ++ .../setuptools/command/install.py | 125 + .../setuptools/command/install_egg_info.py | 62 + .../setuptools/command/install_lib.py | 122 + .../setuptools/command/install_scripts.py | 68 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/py36compat.py | 136 + .../setuptools/command/register.py | 18 + .../setuptools/command/rotate.py | 66 + .../setuptools/command/saveopts.py | 22 + .../site-packages/setuptools/command/sdist.py | 252 + .../setuptools/command/setopt.py | 149 + .../setuptools/command/upload.py | 17 + .../setuptools/command/upload_docs.py | 206 + env/Lib/site-packages/setuptools/config.py | 701 ++ env/Lib/site-packages/setuptools/dep_util.py | 25 + env/Lib/site-packages/setuptools/depends.py | 176 + env/Lib/site-packages/setuptools/dist.py | 1035 ++ .../setuptools/distutils_patch.py | 61 + env/Lib/site-packages/setuptools/errors.py | 16 + env/Lib/site-packages/setuptools/extension.py | 57 + .../setuptools/extern/__init__.py | 66 + env/Lib/site-packages/setuptools/glob.py | 174 + env/Lib/site-packages/setuptools/gui-32.exe | Bin 0 -> 65536 bytes env/Lib/site-packages/setuptools/gui-64.exe | Bin 0 -> 75264 bytes env/Lib/site-packages/setuptools/gui.exe | Bin 0 -> 65536 bytes env/Lib/site-packages/setuptools/installer.py | 150 + env/Lib/site-packages/setuptools/launch.py | 36 + .../site-packages/setuptools/lib2to3_ex.py | 71 + env/Lib/site-packages/setuptools/monkey.py | 179 + env/Lib/site-packages/setuptools/msvc.py | 1831 ++++ .../site-packages/setuptools/namespaces.py | 111 + .../site-packages/setuptools/package_index.py | 1140 +++ .../site-packages/setuptools/py27compat.py | 60 + .../site-packages/setuptools/py31compat.py | 32 + .../site-packages/setuptools/py33compat.py | 59 + .../site-packages/setuptools/py34compat.py | 13 + env/Lib/site-packages/setuptools/sandbox.py | 492 + .../setuptools/script (dev).tmpl | 6 + env/Lib/site-packages/setuptools/script.tmpl | 3 + .../site-packages/setuptools/ssl_support.py | 265 + .../site-packages/setuptools/unicode_utils.py | 44 + env/Lib/site-packages/setuptools/version.py | 6 + env/Lib/site-packages/setuptools/wheel.py | 217 + .../setuptools/windows_support.py | 29 + .../six-1.16.0.dist-info/INSTALLER | 1 + .../six-1.16.0.dist-info/LICENSE | 18 + .../six-1.16.0.dist-info/METADATA | 49 + .../site-packages/six-1.16.0.dist-info/RECORD | 8 + .../site-packages/six-1.16.0.dist-info/WHEEL | 6 + .../six-1.16.0.dist-info/top_level.txt | 1 + env/Lib/site-packages/six.py | 998 ++ .../urllib3-1.23.dist-info/DESCRIPTION.rst | 1040 ++ .../urllib3-1.23.dist-info/INSTALLER | 1 + .../urllib3-1.23.dist-info/LICENSE.txt | 19 + .../urllib3-1.23.dist-info/METADATA | 1077 +++ .../urllib3-1.23.dist-info/RECORD | 80 + .../urllib3-1.23.dist-info/WHEEL | 6 + .../urllib3-1.23.dist-info/metadata.json | 1 + .../urllib3-1.23.dist-info/top_level.txt | 1 + env/Lib/site-packages/urllib3/__init__.py | 97 + env/Lib/site-packages/urllib3/_collections.py | 332 + env/Lib/site-packages/urllib3/connection.py | 403 + .../site-packages/urllib3/connectionpool.py | 906 ++ .../site-packages/urllib3/contrib/__init__.py | 0 .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 593 ++ .../contrib/_securetransport/low_level.py | 346 + .../urllib3/contrib/appengine.py | 305 + .../site-packages/urllib3/contrib/ntlmpool.py | 112 + .../urllib3/contrib/pyopenssl.py | 457 + .../urllib3/contrib/securetransport.py | 804 ++ .../site-packages/urllib3/contrib/socks.py | 192 + env/Lib/site-packages/urllib3/exceptions.py | 246 + env/Lib/site-packages/urllib3/fields.py | 178 + env/Lib/site-packages/urllib3/filepost.py | 98 + .../urllib3/packages/__init__.py | 5 + .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 53 + .../urllib3/packages/ordered_dict.py | 259 + env/Lib/site-packages/urllib3/packages/six.py | 868 ++ .../packages/ssl_match_hostname/__init__.py | 19 + .../ssl_match_hostname/_implementation.py | 157 + env/Lib/site-packages/urllib3/poolmanager.py | 449 + env/Lib/site-packages/urllib3/request.py | 150 + env/Lib/site-packages/urllib3/response.py | 676 ++ .../site-packages/urllib3/util/__init__.py | 54 + .../site-packages/urllib3/util/connection.py | 126 + env/Lib/site-packages/urllib3/util/queue.py | 21 + env/Lib/site-packages/urllib3/util/request.py | 118 + .../site-packages/urllib3/util/response.py | 81 + env/Lib/site-packages/urllib3/util/retry.py | 411 + env/Lib/site-packages/urllib3/util/ssl_.py | 396 + env/Lib/site-packages/urllib3/util/timeout.py | 242 + env/Lib/site-packages/urllib3/util/url.py | 230 + env/Lib/site-packages/urllib3/util/wait.py | 153 + env/Scripts/Activate.ps1 | 398 + env/Scripts/activate | 66 + env/Scripts/activate.bat | 33 + env/Scripts/chardetect.exe | Bin 0 -> 106361 bytes env/Scripts/deactivate.bat | 21 + env/Scripts/easy_install-3.9.exe | Bin 0 -> 106370 bytes env/Scripts/easy_install.exe | Bin 0 -> 106370 bytes env/Scripts/pip.exe | Bin 0 -> 106361 bytes env/Scripts/pip3.9.exe | Bin 0 -> 106361 bytes env/Scripts/pip3.exe | Bin 0 -> 106361 bytes env/Scripts/pyrsa-decrypt.exe | Bin 0 -> 106352 bytes env/Scripts/pyrsa-encrypt.exe | Bin 0 -> 106352 bytes env/Scripts/pyrsa-keygen.exe | Bin 0 -> 106350 bytes env/Scripts/pyrsa-priv2pub.exe | Bin 0 -> 106373 bytes env/Scripts/pyrsa-sign.exe | Bin 0 -> 106346 bytes env/Scripts/pyrsa-verify.exe | Bin 0 -> 106350 bytes env/Scripts/python.exe | Bin 0 -> 539312 bytes env/Scripts/pythonw.exe | Bin 0 -> 537776 bytes env/pyvenv.cfg | 3 + requirements.txt | 2 +- 1704 files changed, 461262 insertions(+), 1 deletion(-) create mode 100644 env/Lib/site-packages/Crypto/Cipher/AES.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/AES.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/ARC2.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/ARC2.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/ARC4.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/ARC4.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/Blowfish.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/Blowfish.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/CAST.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/CAST.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/ChaCha20.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/ChaCha20.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/DES.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/DES.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/DES3.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/DES3.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/Salsa20.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/Salsa20.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_ARC4.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_Salsa20.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_chacha20.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_cbc.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_cbc.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ccm.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ccm.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_cfb.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_cfb.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ctr.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ctr.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_eax.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_eax.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ecb.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ecb.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_gcm.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_gcm.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ocb.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ocb.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ofb.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_ofb.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_siv.py create mode 100644 env/Lib/site-packages/Crypto/Cipher/_mode_siv.pyi create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_aes.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_aesni.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_arc2.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_blowfish.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_cast.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_cbc.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_cfb.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_ctr.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_des.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_des3.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_ecb.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_eksblowfish.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_ocb.pyd create mode 100644 env/Lib/site-packages/Crypto/Cipher/_raw_ofb.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/BLAKE2b.py create mode 100644 env/Lib/site-packages/Crypto/Hash/BLAKE2b.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/BLAKE2s.py create mode 100644 env/Lib/site-packages/Crypto/Hash/BLAKE2s.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/CMAC.py create mode 100644 env/Lib/site-packages/Crypto/Hash/CMAC.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/HMAC.py create mode 100644 env/Lib/site-packages/Crypto/Hash/HMAC.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/MD2.py create mode 100644 env/Lib/site-packages/Crypto/Hash/MD2.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/MD4.py create mode 100644 env/Lib/site-packages/Crypto/Hash/MD4.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/MD5.py create mode 100644 env/Lib/site-packages/Crypto/Hash/MD5.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/Poly1305.py create mode 100644 env/Lib/site-packages/Crypto/Hash/Poly1305.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/RIPEMD.py create mode 100644 env/Lib/site-packages/Crypto/Hash/RIPEMD.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/RIPEMD160.py create mode 100644 env/Lib/site-packages/Crypto/Hash/RIPEMD160.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA1.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA1.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA224.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA224.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA256.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA256.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA384.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA384.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_224.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_224.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_256.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_256.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_384.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_384.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_512.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA3_512.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA512.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHA512.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHAKE128.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHAKE128.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/SHAKE256.py create mode 100644 env/Lib/site-packages/Crypto/Hash/SHAKE256.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/_BLAKE2b.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_BLAKE2s.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_MD2.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_MD4.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_MD5.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_RIPEMD160.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_SHA1.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_SHA224.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_SHA256.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_SHA384.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_SHA512.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Hash/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/Hash/_ghash_clmul.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_ghash_portable.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_keccak.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/_poly1305.pyd create mode 100644 env/Lib/site-packages/Crypto/Hash/keccak.py create mode 100644 env/Lib/site-packages/Crypto/Hash/keccak.pyi create mode 100644 env/Lib/site-packages/Crypto/IO/PEM.py create mode 100644 env/Lib/site-packages/Crypto/IO/PEM.pyi create mode 100644 env/Lib/site-packages/Crypto/IO/PKCS8.py create mode 100644 env/Lib/site-packages/Crypto/IO/PKCS8.pyi create mode 100644 env/Lib/site-packages/Crypto/IO/_PBES.py create mode 100644 env/Lib/site-packages/Crypto/IO/_PBES.pyi create mode 100644 env/Lib/site-packages/Crypto/IO/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Math/Numbers.py create mode 100644 env/Lib/site-packages/Crypto/Math/Numbers.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/Primality.py create mode 100644 env/Lib/site-packages/Crypto/Math/Primality.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerBase.py create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerBase.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerCustom.py create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerCustom.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerGMP.py create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerGMP.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerNative.py create mode 100644 env/Lib/site-packages/Crypto/Math/_IntegerNative.pyi create mode 100644 env/Lib/site-packages/Crypto/Math/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Math/_modexp.pyd create mode 100644 env/Lib/site-packages/Crypto/Protocol/KDF.py create mode 100644 env/Lib/site-packages/Crypto/Protocol/KDF.pyi create mode 100644 env/Lib/site-packages/Crypto/Protocol/SecretSharing.py create mode 100644 env/Lib/site-packages/Crypto/Protocol/SecretSharing.pyi create mode 100644 env/Lib/site-packages/Crypto/Protocol/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Protocol/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/Protocol/_scrypt.pyd create mode 100644 env/Lib/site-packages/Crypto/PublicKey/DSA.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/DSA.pyi create mode 100644 env/Lib/site-packages/Crypto/PublicKey/ECC.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/ECC.pyi create mode 100644 env/Lib/site-packages/Crypto/PublicKey/ElGamal.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/ElGamal.pyi create mode 100644 env/Lib/site-packages/Crypto/PublicKey/RSA.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/RSA.pyi create mode 100644 env/Lib/site-packages/Crypto/PublicKey/__init__.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/PublicKey/_ec_ws.pyd create mode 100644 env/Lib/site-packages/Crypto/PublicKey/_openssh.py create mode 100644 env/Lib/site-packages/Crypto/PublicKey/_openssh.pyi create mode 100644 env/Lib/site-packages/Crypto/Random/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Random/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/Random/random.py create mode 100644 env/Lib/site-packages/Crypto/Random/random.pyi create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/common.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CBC.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CCM.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CFB.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CTR.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_EAX.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_GCM.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OCB.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OFB.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_SIV.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/common.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_CMAC.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA1.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Hash/test_keccak.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/IO/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/IO/test_PBES.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/IO/test_PKCS8.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Math/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Math/test_Numbers.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Math/test_Primality.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Math/test_modexp.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ECC.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_RSA.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Random/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Random/test_random.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Signature/test_dss.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Signature/test_pss.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_Padding.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_number.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_rfc1751.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/Util/test_strxor.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/__init__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/__main__.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/loader.py create mode 100644 env/Lib/site-packages/Crypto/SelfTest/st_common.py create mode 100644 env/Lib/site-packages/Crypto/Signature/DSS.py create mode 100644 env/Lib/site-packages/Crypto/Signature/DSS.pyi create mode 100644 env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py create mode 100644 env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.pyi create mode 100644 env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py create mode 100644 env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.pyi create mode 100644 env/Lib/site-packages/Crypto/Signature/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Signature/pkcs1_15.py create mode 100644 env/Lib/site-packages/Crypto/Signature/pkcs1_15.pyi create mode 100644 env/Lib/site-packages/Crypto/Signature/pss.py create mode 100644 env/Lib/site-packages/Crypto/Signature/pss.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/Counter.py create mode 100644 env/Lib/site-packages/Crypto/Util/Counter.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/Padding.py create mode 100644 env/Lib/site-packages/Crypto/Util/Padding.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/RFC1751.py create mode 100644 env/Lib/site-packages/Crypto/Util/RFC1751.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/__init__.py create mode 100644 env/Lib/site-packages/Crypto/Util/_cpu_features.py create mode 100644 env/Lib/site-packages/Crypto/Util/_cpu_features.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/_cpuid_c.pyd create mode 100644 env/Lib/site-packages/Crypto/Util/_file_system.py create mode 100644 env/Lib/site-packages/Crypto/Util/_file_system.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/_raw_api.py create mode 100644 env/Lib/site-packages/Crypto/Util/_raw_api.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/_strxor.pyd create mode 100644 env/Lib/site-packages/Crypto/Util/asn1.py create mode 100644 env/Lib/site-packages/Crypto/Util/asn1.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/number.py create mode 100644 env/Lib/site-packages/Crypto/Util/number.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/py3compat.py create mode 100644 env/Lib/site-packages/Crypto/Util/py3compat.pyi create mode 100644 env/Lib/site-packages/Crypto/Util/strxor.py create mode 100644 env/Lib/site-packages/Crypto/Util/strxor.pyi create mode 100644 env/Lib/site-packages/Crypto/__init__.py create mode 100644 env/Lib/site-packages/Crypto/__init__.pyi create mode 100644 env/Lib/site-packages/Crypto/py.typed create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/LICENSE create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/METADATA create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/RECORD create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/WHEEL create mode 100644 env/Lib/site-packages/certifi-2021.5.30.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/certifi/__init__.py create mode 100644 env/Lib/site-packages/certifi/__main__.py create mode 100644 env/Lib/site-packages/certifi/cacert.pem create mode 100644 env/Lib/site-packages/certifi/core.py create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/METADATA create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/RECORD create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json create mode 100644 env/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/chardet/__init__.py create mode 100644 env/Lib/site-packages/chardet/big5freq.py create mode 100644 env/Lib/site-packages/chardet/big5prober.py create mode 100644 env/Lib/site-packages/chardet/chardistribution.py create mode 100644 env/Lib/site-packages/chardet/charsetgroupprober.py create mode 100644 env/Lib/site-packages/chardet/charsetprober.py create mode 100644 env/Lib/site-packages/chardet/cli/__init__.py create mode 100644 env/Lib/site-packages/chardet/cli/chardetect.py create mode 100644 env/Lib/site-packages/chardet/codingstatemachine.py create mode 100644 env/Lib/site-packages/chardet/compat.py create mode 100644 env/Lib/site-packages/chardet/cp949prober.py create mode 100644 env/Lib/site-packages/chardet/enums.py create mode 100644 env/Lib/site-packages/chardet/escprober.py create mode 100644 env/Lib/site-packages/chardet/escsm.py create mode 100644 env/Lib/site-packages/chardet/eucjpprober.py create mode 100644 env/Lib/site-packages/chardet/euckrfreq.py create mode 100644 env/Lib/site-packages/chardet/euckrprober.py create mode 100644 env/Lib/site-packages/chardet/euctwfreq.py create mode 100644 env/Lib/site-packages/chardet/euctwprober.py create mode 100644 env/Lib/site-packages/chardet/gb2312freq.py create mode 100644 env/Lib/site-packages/chardet/gb2312prober.py create mode 100644 env/Lib/site-packages/chardet/hebrewprober.py create mode 100644 env/Lib/site-packages/chardet/jisfreq.py create mode 100644 env/Lib/site-packages/chardet/jpcntx.py create mode 100644 env/Lib/site-packages/chardet/langbulgarianmodel.py create mode 100644 env/Lib/site-packages/chardet/langcyrillicmodel.py create mode 100644 env/Lib/site-packages/chardet/langgreekmodel.py create mode 100644 env/Lib/site-packages/chardet/langhebrewmodel.py create mode 100644 env/Lib/site-packages/chardet/langhungarianmodel.py create mode 100644 env/Lib/site-packages/chardet/langthaimodel.py create mode 100644 env/Lib/site-packages/chardet/langturkishmodel.py create mode 100644 env/Lib/site-packages/chardet/latin1prober.py create mode 100644 env/Lib/site-packages/chardet/mbcharsetprober.py create mode 100644 env/Lib/site-packages/chardet/mbcsgroupprober.py create mode 100644 env/Lib/site-packages/chardet/mbcssm.py create mode 100644 env/Lib/site-packages/chardet/sbcharsetprober.py create mode 100644 env/Lib/site-packages/chardet/sbcsgroupprober.py create mode 100644 env/Lib/site-packages/chardet/sjisprober.py create mode 100644 env/Lib/site-packages/chardet/universaldetector.py create mode 100644 env/Lib/site-packages/chardet/utf8prober.py create mode 100644 env/Lib/site-packages/chardet/version.py create mode 100644 env/Lib/site-packages/easy_install.py create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/gcloud-0.17.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/gcloud/__init__.py create mode 100644 env/Lib/site-packages/gcloud/_helpers.py create mode 100644 env/Lib/site-packages/gcloud/_testing.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/__init__.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/_helpers.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/client.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/connection.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/dataset.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/job.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/query.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/table.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test__helpers.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_client.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_dataset.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_job.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_query.py create mode 100644 env/Lib/site-packages/gcloud/bigquery/test_table.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/__init__.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/__init__.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_data.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service_messages.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_data.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service_messages.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_data.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service_messages.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/_operations.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_data_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_service_messages_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_service_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_data_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_messages_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_data_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated/operations_grpc_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/__init__.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_instance_admin.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_table_admin.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_common.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_data.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_instance.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_operations.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/_table.proto create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_instance_admin_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_table_admin_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/common_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/data_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/instance_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/operations_grpc_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_generated_v2/table_pb2.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/_testing.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/client.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/cluster.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/column_family.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/__init__.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/batch.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/connection.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/pool.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/table.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/test_batch.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/test_pool.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/happybase/test_table.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/instance.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/read-rows-acceptance-test.json create mode 100644 env/Lib/site-packages/gcloud/bigtable/row.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/row_data.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/row_filters.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/table.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_client.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_cluster.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_column_family.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_instance.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_row.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_row_data.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_row_filters.py create mode 100644 env/Lib/site-packages/gcloud/bigtable/test_table.py create mode 100644 env/Lib/site-packages/gcloud/client.py create mode 100644 env/Lib/site-packages/gcloud/connection.py create mode 100644 env/Lib/site-packages/gcloud/credentials.py create mode 100644 env/Lib/site-packages/gcloud/datastore/__init__.py create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/__init__.py create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/_datastore.proto create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/_entity.proto create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/_query.proto create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/datastore_grpc_pb2.py create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/datastore_pb2.py create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py create mode 100644 env/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py create mode 100644 env/Lib/site-packages/gcloud/datastore/batch.py create mode 100644 env/Lib/site-packages/gcloud/datastore/client.py create mode 100644 env/Lib/site-packages/gcloud/datastore/connection.py create mode 100644 env/Lib/site-packages/gcloud/datastore/entity.py create mode 100644 env/Lib/site-packages/gcloud/datastore/helpers.py create mode 100644 env/Lib/site-packages/gcloud/datastore/key.py create mode 100644 env/Lib/site-packages/gcloud/datastore/query.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_batch.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_client.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_entity.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_helpers.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_key.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_query.py create mode 100644 env/Lib/site-packages/gcloud/datastore/test_transaction.py create mode 100644 env/Lib/site-packages/gcloud/datastore/transaction.py create mode 100644 env/Lib/site-packages/gcloud/dns/__init__.py create mode 100644 env/Lib/site-packages/gcloud/dns/changes.py create mode 100644 env/Lib/site-packages/gcloud/dns/client.py create mode 100644 env/Lib/site-packages/gcloud/dns/connection.py create mode 100644 env/Lib/site-packages/gcloud/dns/resource_record_set.py create mode 100644 env/Lib/site-packages/gcloud/dns/test_changes.py create mode 100644 env/Lib/site-packages/gcloud/dns/test_client.py create mode 100644 env/Lib/site-packages/gcloud/dns/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/dns/test_resource_record_set.py create mode 100644 env/Lib/site-packages/gcloud/dns/test_zone.py create mode 100644 env/Lib/site-packages/gcloud/dns/zone.py create mode 100644 env/Lib/site-packages/gcloud/environment_vars.py create mode 100644 env/Lib/site-packages/gcloud/exceptions.py create mode 100644 env/Lib/site-packages/gcloud/iterator.py create mode 100644 env/Lib/site-packages/gcloud/logging/__init__.py create mode 100644 env/Lib/site-packages/gcloud/logging/_gax.py create mode 100644 env/Lib/site-packages/gcloud/logging/client.py create mode 100644 env/Lib/site-packages/gcloud/logging/connection.py create mode 100644 env/Lib/site-packages/gcloud/logging/entries.py create mode 100644 env/Lib/site-packages/gcloud/logging/logger.py create mode 100644 env/Lib/site-packages/gcloud/logging/metric.py create mode 100644 env/Lib/site-packages/gcloud/logging/sink.py create mode 100644 env/Lib/site-packages/gcloud/logging/test__gax.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_client.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_entries.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_logger.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_metric.py create mode 100644 env/Lib/site-packages/gcloud/logging/test_sink.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/__init__.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/_dataframe.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/client.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/connection.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/label.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/metric.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/query.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/resource.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test__dataframe.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_client.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_label.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_metric.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_query.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_resource.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/test_timeseries.py create mode 100644 env/Lib/site-packages/gcloud/monitoring/timeseries.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/__init__.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/_gax.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/_helpers.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/client.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/connection.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/iam.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/message.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/subscription.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test__gax.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test__helpers.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_client.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_iam.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_message.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_subscription.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/test_topic.py create mode 100644 env/Lib/site-packages/gcloud/pubsub/topic.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/__init__.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/client.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/connection.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/project.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/test_client.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/resource_manager/test_project.py create mode 100644 env/Lib/site-packages/gcloud/storage/__init__.py create mode 100644 env/Lib/site-packages/gcloud/storage/_helpers.py create mode 100644 env/Lib/site-packages/gcloud/storage/acl.py create mode 100644 env/Lib/site-packages/gcloud/storage/batch.py create mode 100644 env/Lib/site-packages/gcloud/storage/blob.py create mode 100644 env/Lib/site-packages/gcloud/storage/bucket.py create mode 100644 env/Lib/site-packages/gcloud/storage/client.py create mode 100644 env/Lib/site-packages/gcloud/storage/connection.py create mode 100644 env/Lib/site-packages/gcloud/storage/test__helpers.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_acl.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_batch.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_blob.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_bucket.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_client.py create mode 100644 env/Lib/site-packages/gcloud/storage/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/streaming/__init__.py create mode 100644 env/Lib/site-packages/gcloud/streaming/buffered_stream.py create mode 100644 env/Lib/site-packages/gcloud/streaming/exceptions.py create mode 100644 env/Lib/site-packages/gcloud/streaming/http_wrapper.py create mode 100644 env/Lib/site-packages/gcloud/streaming/stream_slice.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_buffered_stream.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_exceptions.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_http_wrapper.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_stream_slice.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_transfer.py create mode 100644 env/Lib/site-packages/gcloud/streaming/test_util.py create mode 100644 env/Lib/site-packages/gcloud/streaming/transfer.py create mode 100644 env/Lib/site-packages/gcloud/streaming/util.py create mode 100644 env/Lib/site-packages/gcloud/test__helpers.py create mode 100644 env/Lib/site-packages/gcloud/test_client.py create mode 100644 env/Lib/site-packages/gcloud/test_connection.py create mode 100644 env/Lib/site-packages/gcloud/test_credentials.py create mode 100644 env/Lib/site-packages/gcloud/test_exceptions.py create mode 100644 env/Lib/site-packages/gcloud/test_iterator.py create mode 100644 env/Lib/site-packages/gcloud/translate/__init__.py create mode 100644 env/Lib/site-packages/gcloud/translate/client.py create mode 100644 env/Lib/site-packages/gcloud/translate/connection.py create mode 100644 env/Lib/site-packages/gcloud/translate/test_client.py create mode 100644 env/Lib/site-packages/gcloud/translate/test_connection.py create mode 100644 env/Lib/site-packages/google/api/__init__.py create mode 100644 env/Lib/site-packages/google/api/annotations.proto create mode 100644 env/Lib/site-packages/google/api/annotations_pb2.py create mode 100644 env/Lib/site-packages/google/api/auth.proto create mode 100644 env/Lib/site-packages/google/api/auth_pb2.py create mode 100644 env/Lib/site-packages/google/api/backend.proto create mode 100644 env/Lib/site-packages/google/api/backend_pb2.py create mode 100644 env/Lib/site-packages/google/api/billing.proto create mode 100644 env/Lib/site-packages/google/api/billing_pb2.py create mode 100644 env/Lib/site-packages/google/api/client.proto create mode 100644 env/Lib/site-packages/google/api/client_pb2.py create mode 100644 env/Lib/site-packages/google/api/config_change.proto create mode 100644 env/Lib/site-packages/google/api/config_change_pb2.py create mode 100644 env/Lib/site-packages/google/api/consumer.proto create mode 100644 env/Lib/site-packages/google/api/consumer_pb2.py create mode 100644 env/Lib/site-packages/google/api/context.proto create mode 100644 env/Lib/site-packages/google/api/context_pb2.py create mode 100644 env/Lib/site-packages/google/api/control.proto create mode 100644 env/Lib/site-packages/google/api/control_pb2.py create mode 100644 env/Lib/site-packages/google/api/distribution.proto create mode 100644 env/Lib/site-packages/google/api/distribution_pb2.py create mode 100644 env/Lib/site-packages/google/api/documentation.proto create mode 100644 env/Lib/site-packages/google/api/documentation_pb2.py create mode 100644 env/Lib/site-packages/google/api/endpoint.proto create mode 100644 env/Lib/site-packages/google/api/endpoint_pb2.py create mode 100644 env/Lib/site-packages/google/api/field_behavior.proto create mode 100644 env/Lib/site-packages/google/api/field_behavior_pb2.py create mode 100644 env/Lib/site-packages/google/api/http.proto create mode 100644 env/Lib/site-packages/google/api/http_pb2.py create mode 100644 env/Lib/site-packages/google/api/httpbody.proto create mode 100644 env/Lib/site-packages/google/api/httpbody_pb2.py create mode 100644 env/Lib/site-packages/google/api/label.proto create mode 100644 env/Lib/site-packages/google/api/label_pb2.py create mode 100644 env/Lib/site-packages/google/api/launch_stage.proto create mode 100644 env/Lib/site-packages/google/api/launch_stage_pb2.py create mode 100644 env/Lib/site-packages/google/api/log.proto create mode 100644 env/Lib/site-packages/google/api/log_pb2.py create mode 100644 env/Lib/site-packages/google/api/logging.proto create mode 100644 env/Lib/site-packages/google/api/logging_pb2.py create mode 100644 env/Lib/site-packages/google/api/metric.proto create mode 100644 env/Lib/site-packages/google/api/metric_pb2.py create mode 100644 env/Lib/site-packages/google/api/monitored_resource.proto create mode 100644 env/Lib/site-packages/google/api/monitored_resource_pb2.py create mode 100644 env/Lib/site-packages/google/api/monitoring.proto create mode 100644 env/Lib/site-packages/google/api/monitoring_pb2.py create mode 100644 env/Lib/site-packages/google/api/quota.proto create mode 100644 env/Lib/site-packages/google/api/quota_pb2.py create mode 100644 env/Lib/site-packages/google/api/resource.proto create mode 100644 env/Lib/site-packages/google/api/resource_pb2.py create mode 100644 env/Lib/site-packages/google/api/service.proto create mode 100644 env/Lib/site-packages/google/api/service_pb2.py create mode 100644 env/Lib/site-packages/google/api/source_info.proto create mode 100644 env/Lib/site-packages/google/api/source_info_pb2.py create mode 100644 env/Lib/site-packages/google/api/system_parameter.proto create mode 100644 env/Lib/site-packages/google/api/system_parameter_pb2.py create mode 100644 env/Lib/site-packages/google/api/usage.proto create mode 100644 env/Lib/site-packages/google/api/usage_pb2.py create mode 100644 env/Lib/site-packages/google/gapic/metadata/__init__.py create mode 100644 env/Lib/site-packages/google/gapic/metadata/gapic_metadata.proto create mode 100644 env/Lib/site-packages/google/gapic/metadata/gapic_metadata_pb2.py create mode 100644 env/Lib/site-packages/google/logging/type/__init__.py create mode 100644 env/Lib/site-packages/google/logging/type/http_request.proto create mode 100644 env/Lib/site-packages/google/logging/type/http_request_pb2.py create mode 100644 env/Lib/site-packages/google/logging/type/log_severity.proto create mode 100644 env/Lib/site-packages/google/logging/type/log_severity_pb2.py create mode 100644 env/Lib/site-packages/google/longrunning/__init__.py create mode 100644 env/Lib/site-packages/google/longrunning/operations.proto create mode 100644 env/Lib/site-packages/google/longrunning/operations_grpc.py create mode 100644 env/Lib/site-packages/google/longrunning/operations_grpc_pb2.py create mode 100644 env/Lib/site-packages/google/longrunning/operations_pb2.py create mode 100644 env/Lib/site-packages/google/longrunning/operations_pb2_grpc.py create mode 100644 env/Lib/site-packages/google/longrunning/operations_proto.py create mode 100644 env/Lib/site-packages/google/longrunning/operations_proto_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/__init__.py create mode 100644 env/Lib/site-packages/google/protobuf/any_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/api_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/compiler/__init__.py create mode 100644 env/Lib/site-packages/google/protobuf/compiler/plugin_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/descriptor.py create mode 100644 env/Lib/site-packages/google/protobuf/descriptor_database.py create mode 100644 env/Lib/site-packages/google/protobuf/descriptor_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/descriptor_pool.py create mode 100644 env/Lib/site-packages/google/protobuf/duration_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/empty_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/field_mask_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/__init__.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/api_implementation.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/containers.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/decoder.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/encoder.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/enum_type_wrapper.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/extension_dict.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/message_listener.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/python_message.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/type_checkers.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/well_known_types.py create mode 100644 env/Lib/site-packages/google/protobuf/internal/wire_format.py create mode 100644 env/Lib/site-packages/google/protobuf/json_format.py create mode 100644 env/Lib/site-packages/google/protobuf/message.py create mode 100644 env/Lib/site-packages/google/protobuf/message_factory.py create mode 100644 env/Lib/site-packages/google/protobuf/proto_builder.py create mode 100644 env/Lib/site-packages/google/protobuf/pyext/__init__.py create mode 100644 env/Lib/site-packages/google/protobuf/pyext/cpp_message.py create mode 100644 env/Lib/site-packages/google/protobuf/reflection.py create mode 100644 env/Lib/site-packages/google/protobuf/service.py create mode 100644 env/Lib/site-packages/google/protobuf/service_reflection.py create mode 100644 env/Lib/site-packages/google/protobuf/source_context_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/struct_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/symbol_database.py create mode 100644 env/Lib/site-packages/google/protobuf/text_encoding.py create mode 100644 env/Lib/site-packages/google/protobuf/text_format.py create mode 100644 env/Lib/site-packages/google/protobuf/timestamp_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/type_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/util/__init__.py create mode 100644 env/Lib/site-packages/google/protobuf/util/json_format_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/util/json_format_proto3_pb2.py create mode 100644 env/Lib/site-packages/google/protobuf/wrappers_pb2.py create mode 100644 env/Lib/site-packages/google/rpc/__init__.py create mode 100644 env/Lib/site-packages/google/rpc/code.proto create mode 100644 env/Lib/site-packages/google/rpc/code_pb2.py create mode 100644 env/Lib/site-packages/google/rpc/context/__init__.py create mode 100644 env/Lib/site-packages/google/rpc/context/attribute_context.proto create mode 100644 env/Lib/site-packages/google/rpc/context/attribute_context_pb2.py create mode 100644 env/Lib/site-packages/google/rpc/error_details.proto create mode 100644 env/Lib/site-packages/google/rpc/error_details_pb2.py create mode 100644 env/Lib/site-packages/google/rpc/status.proto create mode 100644 env/Lib/site-packages/google/rpc/status_pb2.py create mode 100644 env/Lib/site-packages/google/type/__init__.py create mode 100644 env/Lib/site-packages/google/type/calendar_period.proto create mode 100644 env/Lib/site-packages/google/type/calendar_period_pb2.py create mode 100644 env/Lib/site-packages/google/type/color.proto create mode 100644 env/Lib/site-packages/google/type/color_pb2.py create mode 100644 env/Lib/site-packages/google/type/date.proto create mode 100644 env/Lib/site-packages/google/type/date_pb2.py create mode 100644 env/Lib/site-packages/google/type/datetime.proto create mode 100644 env/Lib/site-packages/google/type/datetime_pb2.py create mode 100644 env/Lib/site-packages/google/type/dayofweek.proto create mode 100644 env/Lib/site-packages/google/type/dayofweek_pb2.py create mode 100644 env/Lib/site-packages/google/type/expr.proto create mode 100644 env/Lib/site-packages/google/type/expr_pb2.py create mode 100644 env/Lib/site-packages/google/type/fraction.proto create mode 100644 env/Lib/site-packages/google/type/fraction_pb2.py create mode 100644 env/Lib/site-packages/google/type/latlng.proto create mode 100644 env/Lib/site-packages/google/type/latlng_pb2.py create mode 100644 env/Lib/site-packages/google/type/money.proto create mode 100644 env/Lib/site-packages/google/type/money_pb2.py create mode 100644 env/Lib/site-packages/google/type/month.proto create mode 100644 env/Lib/site-packages/google/type/month_pb2.py create mode 100644 env/Lib/site-packages/google/type/postal_address.proto create mode 100644 env/Lib/site-packages/google/type/postal_address_pb2.py create mode 100644 env/Lib/site-packages/google/type/quaternion.proto create mode 100644 env/Lib/site-packages/google/type/quaternion_pb2.py create mode 100644 env/Lib/site-packages/google/type/timeofday.proto create mode 100644 env/Lib/site-packages/google/type/timeofday_pb2.py create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0-py3.9-nspkg.pth create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/LICENSE create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/namespace_packages.txt create mode 100644 env/Lib/site-packages/googleapis_common_protos-1.53.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/LICENSE create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/METADATA create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/RECORD create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/WHEEL create mode 100644 env/Lib/site-packages/httplib2-0.19.1.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/httplib2/__init__.py create mode 100644 env/Lib/site-packages/httplib2/auth.py create mode 100644 env/Lib/site-packages/httplib2/cacerts.txt create mode 100644 env/Lib/site-packages/httplib2/certs.py create mode 100644 env/Lib/site-packages/httplib2/error.py create mode 100644 env/Lib/site-packages/httplib2/iri2uri.py create mode 100644 env/Lib/site-packages/httplib2/socks.py create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/LICENSE.txt create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/METADATA create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/RECORD create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/WHEEL create mode 100644 env/Lib/site-packages/idna-2.7.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/idna/__init__.py create mode 100644 env/Lib/site-packages/idna/codec.py create mode 100644 env/Lib/site-packages/idna/compat.py create mode 100644 env/Lib/site-packages/idna/core.py create mode 100644 env/Lib/site-packages/idna/idnadata.py create mode 100644 env/Lib/site-packages/idna/intranges.py create mode 100644 env/Lib/site-packages/idna/package_data.py create mode 100644 env/Lib/site-packages/idna/uts46data.py create mode 100644 env/Lib/site-packages/jws-0.1.3.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/jws-0.1.3.dist-info/METADATA create mode 100644 env/Lib/site-packages/jws-0.1.3.dist-info/RECORD create mode 100644 env/Lib/site-packages/jws-0.1.3.dist-info/WHEEL create mode 100644 env/Lib/site-packages/jws-0.1.3.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/jws/__init__.py create mode 100644 env/Lib/site-packages/jws/algos.py create mode 100644 env/Lib/site-packages/jws/exceptions.py create mode 100644 env/Lib/site-packages/jws/header.py create mode 100644 env/Lib/site-packages/jws/tests.py create mode 100644 env/Lib/site-packages/jws/utils.py create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/oauth2client-3.0.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/oauth2client/__init__.py create mode 100644 env/Lib/site-packages/oauth2client/_helpers.py create mode 100644 env/Lib/site-packages/oauth2client/_openssl_crypt.py create mode 100644 env/Lib/site-packages/oauth2client/_pure_python_crypt.py create mode 100644 env/Lib/site-packages/oauth2client/_pycrypto_crypt.py create mode 100644 env/Lib/site-packages/oauth2client/client.py create mode 100644 env/Lib/site-packages/oauth2client/clientsecrets.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/__init__.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/_appengine_ndb.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/_fcntl_opener.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/_metadata.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/_win32_opener.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/appengine.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/devshell.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/dictionary_storage.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/__init__.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/apps.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/decorators.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/models.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/signals.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/site.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/storage.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/django_util/views.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/flask_util.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/gce.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/keyring_storage.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/locked_file.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/multiprocess_file_storage.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/multistore_file.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/sqlalchemy.py create mode 100644 env/Lib/site-packages/oauth2client/contrib/xsrfutil.py create mode 100644 env/Lib/site-packages/oauth2client/crypt.py create mode 100644 env/Lib/site-packages/oauth2client/file.py create mode 100644 env/Lib/site-packages/oauth2client/service_account.py create mode 100644 env/Lib/site-packages/oauth2client/tools.py create mode 100644 env/Lib/site-packages/oauth2client/transport.py create mode 100644 env/Lib/site-packages/oauth2client/util.py create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/LICENSE.txt create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/METADATA create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/RECORD create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/WHEEL create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/entry_points.txt create mode 100644 env/Lib/site-packages/pip-20.2.3.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pip/__init__.py create mode 100644 env/Lib/site-packages/pip/__main__.py create mode 100644 env/Lib/site-packages/pip/_internal/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/build_env.py create mode 100644 env/Lib/site-packages/pip/_internal/cache.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/autocompletion.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/base_command.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/cmdoptions.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/command_context.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/main.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/main_parser.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/parser.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/req_command.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/spinners.py create mode 100644 env/Lib/site-packages/pip/_internal/cli/status_codes.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/cache.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/check.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/completion.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/configuration.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/debug.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/download.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/freeze.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/hash.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/help.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/install.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/list.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/search.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/show.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/uninstall.py create mode 100644 env/Lib/site-packages/pip/_internal/commands/wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/configuration.py create mode 100644 env/Lib/site-packages/pip/_internal/distributions/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/distributions/base.py create mode 100644 env/Lib/site-packages/pip/_internal/distributions/installed.py create mode 100644 env/Lib/site-packages/pip/_internal/distributions/sdist.py create mode 100644 env/Lib/site-packages/pip/_internal/distributions/wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/exceptions.py create mode 100644 env/Lib/site-packages/pip/_internal/index/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/index/collector.py create mode 100644 env/Lib/site-packages/pip/_internal/index/package_finder.py create mode 100644 env/Lib/site-packages/pip/_internal/locations.py create mode 100644 env/Lib/site-packages/pip/_internal/main.py create mode 100644 env/Lib/site-packages/pip/_internal/models/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/models/candidate.py create mode 100644 env/Lib/site-packages/pip/_internal/models/direct_url.py create mode 100644 env/Lib/site-packages/pip/_internal/models/format_control.py create mode 100644 env/Lib/site-packages/pip/_internal/models/index.py create mode 100644 env/Lib/site-packages/pip/_internal/models/link.py create mode 100644 env/Lib/site-packages/pip/_internal/models/scheme.py create mode 100644 env/Lib/site-packages/pip/_internal/models/search_scope.py create mode 100644 env/Lib/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 env/Lib/site-packages/pip/_internal/models/target_python.py create mode 100644 env/Lib/site-packages/pip/_internal/models/wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/network/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/network/auth.py create mode 100644 env/Lib/site-packages/pip/_internal/network/cache.py create mode 100644 env/Lib/site-packages/pip/_internal/network/download.py create mode 100644 env/Lib/site-packages/pip/_internal/network/lazy_wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/network/session.py create mode 100644 env/Lib/site-packages/pip/_internal/network/utils.py create mode 100644 env/Lib/site-packages/pip/_internal/network/xmlrpc.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/check.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/freeze.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/install/legacy.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/install/wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/operations/prepare.py create mode 100644 env/Lib/site-packages/pip/_internal/pyproject.py create mode 100644 env/Lib/site-packages/pip/_internal/req/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/req/constructors.py create mode 100644 env/Lib/site-packages/pip/_internal/req/req_file.py create mode 100644 env/Lib/site-packages/pip/_internal/req/req_install.py create mode 100644 env/Lib/site-packages/pip/_internal/req/req_set.py create mode 100644 env/Lib/site-packages/pip/_internal/req/req_tracker.py create mode 100644 env/Lib/site-packages/pip/_internal/req/req_uninstall.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/base.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 env/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py create mode 100644 env/Lib/site-packages/pip/_internal/self_outdated_check.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/appdirs.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/compat.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/datetime.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/deprecation.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/distutils_args.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/encoding.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/filesystem.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/filetypes.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/glibc.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/hashes.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/inject_securetransport.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/logging.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/misc.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/models.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/packaging.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/parallel.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/pkg_resources.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/setuptools_build.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/subprocess.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/temp_dir.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/typing.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/unpacking.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/urls.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 env/Lib/site-packages/pip/_internal/utils/wheel.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/__init__.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/bazaar.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/git.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/mercurial.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/subversion.py create mode 100644 env/Lib/site-packages/pip/_internal/vcs/versioncontrol.py create mode 100644 env/Lib/site-packages/pip/_internal/wheel_builder.py create mode 100644 env/Lib/site-packages/pip/_vendor/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/appdirs.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 env/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 env/Lib/site-packages/pip/_vendor/certifi/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/certifi/__main__.py create mode 100644 env/Lib/site-packages/pip/_vendor/certifi/cacert.pem create mode 100644 env/Lib/site-packages/pip/_vendor/certifi/core.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/big5freq.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/big5prober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/chardistribution.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/charsetprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/cli/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/cli/chardetect.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/codingstatemachine.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/cp949prober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/enums.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/escprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/escsm.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/euckrfreq.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/euckrprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/euctwfreq.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/euctwprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/gb2312freq.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/gb2312prober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/hebrewprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/jisfreq.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/jpcntx.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langbulgarianmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langcyrillicmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langgreekmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langhebrewmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langhungarianmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langthaimodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/latin1prober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/mbcharsetprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/mbcssm.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/sbcharsetprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/sbcsgroupprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/sjisprober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/universaldetector.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/utf8prober.py create mode 100644 env/Lib/site-packages/pip/_vendor/chardet/version.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/win32.py create mode 100644 env/Lib/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 env/Lib/site-packages/pip/_vendor/contextlib2.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/database.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/index.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/locators.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/markers.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/resources.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/util.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/version.py create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 env/Lib/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 env/Lib/site-packages/pip/_vendor/distro.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/_utils.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/base.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/serializer.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py create mode 100644 env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/codec.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/core.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/idnadata.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/intranges.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/package_data.py create mode 100644 env/Lib/site-packages/pip/_vendor/idna/uts46data.py create mode 100644 env/Lib/site-packages/pip/_vendor/ipaddress.py create mode 100644 env/Lib/site-packages/pip/_vendor/msgpack/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/msgpack/_version.py create mode 100644 env/Lib/site-packages/pip/_vendor/msgpack/exceptions.py create mode 100644 env/Lib/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 env/Lib/site-packages/pip/_vendor/msgpack/fallback.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/__about__.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/_compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/_typing.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/markers.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/tags.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/utils.py create mode 100644 env/Lib/site-packages/pip/_vendor/packaging/version.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/_in_process.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/build.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/check.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/colorlog.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/dirtools.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/envbuild.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/meta.py create mode 100644 env/Lib/site-packages/pip/_vendor/pep517/wrappers.py create mode 100644 env/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/pkg_resources/py31compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/progress/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/progress/bar.py create mode 100644 env/Lib/site-packages/pip/_vendor/progress/counter.py create mode 100644 env/Lib/site-packages/pip/_vendor/progress/spinner.py create mode 100644 env/Lib/site-packages/pip/_vendor/pyparsing.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/__version__.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/_internal_utils.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/adapters.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/api.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/auth.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/certs.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/compat.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/cookies.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/help.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/hooks.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/models.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/packages.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/sessions.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/structures.py create mode 100644 env/Lib/site-packages/pip/_vendor/requests/utils.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 env/Lib/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 env/Lib/site-packages/pip/_vendor/retrying.py create mode 100644 env/Lib/site-packages/pip/_vendor/six.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/common.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/decoder.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/encoder.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/ordered.py create mode 100644 env/Lib/site-packages/pip/_vendor/toml/tz.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/_collections.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/connection.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/exceptions.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/fields.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/filepost.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/six.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/request.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/response.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/connection.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/queue.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/request.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/response.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/retry.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/url.py create mode 100644 env/Lib/site-packages/pip/_vendor/urllib3/util/wait.py create mode 100644 env/Lib/site-packages/pip/_vendor/vendor.txt create mode 100644 env/Lib/site-packages/pip/_vendor/webencodings/__init__.py create mode 100644 env/Lib/site-packages/pip/_vendor/webencodings/labels.py create mode 100644 env/Lib/site-packages/pip/_vendor/webencodings/mklabels.py create mode 100644 env/Lib/site-packages/pip/_vendor/webencodings/tests.py create mode 100644 env/Lib/site-packages/pip/_vendor/webencodings/x_user_defined.py create mode 100644 env/Lib/site-packages/pkg_resources/__init__.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/__init__.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/appdirs.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/_compat.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/packaging/version.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/pyparsing.py create mode 100644 env/Lib/site-packages/pkg_resources/_vendor/six.py create mode 100644 env/Lib/site-packages/pkg_resources/extern/__init__.py create mode 100644 env/Lib/site-packages/protobuf-3.17.3-nspkg.pth create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/METADATA create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/RECORD create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/WHEEL create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/namespace_packages.txt create mode 100644 env/Lib/site-packages/protobuf-3.17.3.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/LICENSE.rst create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/METADATA create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/RECORD create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/WHEEL create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pyasn1-0.4.8.dist-info/zip-safe create mode 100644 env/Lib/site-packages/pyasn1/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/ber/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/ber/decoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/ber/encoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/ber/eoo.py create mode 100644 env/Lib/site-packages/pyasn1/codec/cer/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/cer/decoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/cer/encoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/der/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/der/decoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/der/encoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/native/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/codec/native/decoder.py create mode 100644 env/Lib/site-packages/pyasn1/codec/native/encoder.py create mode 100644 env/Lib/site-packages/pyasn1/compat/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/compat/binary.py create mode 100644 env/Lib/site-packages/pyasn1/compat/calling.py create mode 100644 env/Lib/site-packages/pyasn1/compat/dateandtime.py create mode 100644 env/Lib/site-packages/pyasn1/compat/integer.py create mode 100644 env/Lib/site-packages/pyasn1/compat/octets.py create mode 100644 env/Lib/site-packages/pyasn1/compat/string.py create mode 100644 env/Lib/site-packages/pyasn1/debug.py create mode 100644 env/Lib/site-packages/pyasn1/error.py create mode 100644 env/Lib/site-packages/pyasn1/type/__init__.py create mode 100644 env/Lib/site-packages/pyasn1/type/base.py create mode 100644 env/Lib/site-packages/pyasn1/type/char.py create mode 100644 env/Lib/site-packages/pyasn1/type/constraint.py create mode 100644 env/Lib/site-packages/pyasn1/type/error.py create mode 100644 env/Lib/site-packages/pyasn1/type/namedtype.py create mode 100644 env/Lib/site-packages/pyasn1/type/namedval.py create mode 100644 env/Lib/site-packages/pyasn1/type/opentype.py create mode 100644 env/Lib/site-packages/pyasn1/type/tag.py create mode 100644 env/Lib/site-packages/pyasn1/type/tagmap.py create mode 100644 env/Lib/site-packages/pyasn1/type/univ.py create mode 100644 env/Lib/site-packages/pyasn1/type/useful.py create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/LICENSE.txt create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/METADATA create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/RECORD create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/WHEEL create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pyasn1_modules-0.2.8.dist-info/zip-safe create mode 100644 env/Lib/site-packages/pyasn1_modules/__init__.py create mode 100644 env/Lib/site-packages/pyasn1_modules/pem.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc1155.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc1157.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc1901.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc1902.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc1905.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2251.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2314.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2315.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2437.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2459.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2511.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2560.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2631.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2634.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2985.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc2986.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3114.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3161.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3274.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3279.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3280.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3281.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3412.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3414.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3447.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3560.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3565.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3709.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3770.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3779.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc3852.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4043.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4055.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4073.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4108.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4210.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4211.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4334.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc4985.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5035.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5083.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5084.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5208.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5280.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5480.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5649.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5652.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5751.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5755.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5913.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5914.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5915.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5916.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5917.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5924.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5934.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5940.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5958.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc5990.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6010.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6019.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6031.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6032.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6120.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6170.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6187.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6210.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6211.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6402-1.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6402.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6482.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6486.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6487.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6664.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6955.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc6960.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7030.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7191.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7229.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7292.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7296.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7508.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7585.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7633.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7773.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7894-1.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7894.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7906.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc7914.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8017.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8018.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8103.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8209.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8226.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8358.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8360.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8398.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8410.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8418.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8419.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8479.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8494.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8520.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8619.py create mode 100644 env/Lib/site-packages/pyasn1_modules/rfc8649.py create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/AUTHORS.rst create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/LICENSE.rst create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/METADATA create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/RECORD create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/WHEEL create mode 100644 env/Lib/site-packages/pycryptodome-3.10.1.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/LICENSE create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/METADATA create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/RECORD create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/WHEEL create mode 100644 env/Lib/site-packages/pyparsing-2.4.7.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/pyparsing.py create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/DESCRIPTION.rst create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/METADATA create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/RECORD create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/WHEEL create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/metadata.json create mode 100644 env/Lib/site-packages/python_jwt-2.0.1.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/python_jwt/__init__.py create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/DESCRIPTION.rst create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/LICENSE.txt create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/metadata.json create mode 100644 env/Lib/site-packages/requests-2.19.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/requests/__init__.py create mode 100644 env/Lib/site-packages/requests/__version__.py create mode 100644 env/Lib/site-packages/requests/_internal_utils.py create mode 100644 env/Lib/site-packages/requests/adapters.py create mode 100644 env/Lib/site-packages/requests/api.py create mode 100644 env/Lib/site-packages/requests/auth.py create mode 100644 env/Lib/site-packages/requests/certs.py create mode 100644 env/Lib/site-packages/requests/compat.py create mode 100644 env/Lib/site-packages/requests/cookies.py create mode 100644 env/Lib/site-packages/requests/exceptions.py create mode 100644 env/Lib/site-packages/requests/help.py create mode 100644 env/Lib/site-packages/requests/hooks.py create mode 100644 env/Lib/site-packages/requests/models.py create mode 100644 env/Lib/site-packages/requests/packages.py create mode 100644 env/Lib/site-packages/requests/sessions.py create mode 100644 env/Lib/site-packages/requests/status_codes.py create mode 100644 env/Lib/site-packages/requests/structures.py create mode 100644 env/Lib/site-packages/requests/utils.py create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/DESCRIPTION.rst create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/metadata.json create mode 100644 env/Lib/site-packages/requests_toolbelt-0.7.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/requests_toolbelt/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/_compat.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/appengine.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/fingerprint.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/host_header_ssl.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/socket_options.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/source.py create mode 100644 env/Lib/site-packages/requests_toolbelt/adapters/ssl.py create mode 100644 env/Lib/site-packages/requests_toolbelt/auth/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/auth/_digest_auth_compat.py create mode 100644 env/Lib/site-packages/requests_toolbelt/auth/guess.py create mode 100644 env/Lib/site-packages/requests_toolbelt/auth/handler.py create mode 100644 env/Lib/site-packages/requests_toolbelt/auth/http_proxy_digest.py create mode 100644 env/Lib/site-packages/requests_toolbelt/cookies/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/cookies/forgetful.py create mode 100644 env/Lib/site-packages/requests_toolbelt/downloadutils/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/downloadutils/stream.py create mode 100644 env/Lib/site-packages/requests_toolbelt/downloadutils/tee.py create mode 100644 env/Lib/site-packages/requests_toolbelt/exceptions.py create mode 100644 env/Lib/site-packages/requests_toolbelt/multipart/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/multipart/decoder.py create mode 100644 env/Lib/site-packages/requests_toolbelt/multipart/encoder.py create mode 100644 env/Lib/site-packages/requests_toolbelt/sessions.py create mode 100644 env/Lib/site-packages/requests_toolbelt/streaming_iterator.py create mode 100644 env/Lib/site-packages/requests_toolbelt/threaded/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/threaded/pool.py create mode 100644 env/Lib/site-packages/requests_toolbelt/threaded/thread.py create mode 100644 env/Lib/site-packages/requests_toolbelt/utils/__init__.py create mode 100644 env/Lib/site-packages/requests_toolbelt/utils/deprecated.py create mode 100644 env/Lib/site-packages/requests_toolbelt/utils/dump.py create mode 100644 env/Lib/site-packages/requests_toolbelt/utils/formdata.py create mode 100644 env/Lib/site-packages/requests_toolbelt/utils/user_agent.py create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/LICENSE create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/METADATA create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/RECORD create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/WHEEL create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/entry_points.txt create mode 100644 env/Lib/site-packages/rsa-4.7.2.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/rsa/__init__.py create mode 100644 env/Lib/site-packages/rsa/_compat.py create mode 100644 env/Lib/site-packages/rsa/asn1.py create mode 100644 env/Lib/site-packages/rsa/cli.py create mode 100644 env/Lib/site-packages/rsa/common.py create mode 100644 env/Lib/site-packages/rsa/core.py create mode 100644 env/Lib/site-packages/rsa/key.py create mode 100644 env/Lib/site-packages/rsa/parallel.py create mode 100644 env/Lib/site-packages/rsa/pem.py create mode 100644 env/Lib/site-packages/rsa/pkcs1.py create mode 100644 env/Lib/site-packages/rsa/pkcs1_v2.py create mode 100644 env/Lib/site-packages/rsa/prime.py create mode 100644 env/Lib/site-packages/rsa/randnum.py create mode 100644 env/Lib/site-packages/rsa/transform.py create mode 100644 env/Lib/site-packages/rsa/util.py create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/LICENSE create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/METADATA create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/RECORD create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/REQUESTED create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/WHEEL create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/dependency_links.txt create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/entry_points.txt create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/setuptools-49.2.1.dist-info/zip-safe create mode 100644 env/Lib/site-packages/setuptools/__init__.py create mode 100644 env/Lib/site-packages/setuptools/_deprecation_warning.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/__init__.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/_msvccompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/archive_util.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/bcppcompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/ccompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/cmd.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/__init__.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/bdist.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/bdist_dumb.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/bdist_wininst.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/build.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/build_clib.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/build_ext.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/build_py.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/build_scripts.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/check.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/clean.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/config.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install_data.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install_headers.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install_lib.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/install_scripts.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/register.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/sdist.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/command/upload.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/config.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/core.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/cygwinccompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/debug.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/dep_util.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/dir_util.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/dist.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/errors.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/extension.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/fancy_getopt.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/file_util.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/filelist.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/log.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/msvc9compiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/msvccompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/spawn.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/sysconfig.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/text_file.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/unixccompiler.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/util.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/version.py create mode 100644 env/Lib/site-packages/setuptools/_distutils/versionpredicate.py create mode 100644 env/Lib/site-packages/setuptools/_imp.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/__init__.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/ordered_set.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/__about__.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/__init__.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/_compat.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/_structures.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/markers.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/requirements.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/tags.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/utils.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/packaging/version.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/pyparsing.py create mode 100644 env/Lib/site-packages/setuptools/_vendor/six.py create mode 100644 env/Lib/site-packages/setuptools/archive_util.py create mode 100644 env/Lib/site-packages/setuptools/build_meta.py create mode 100644 env/Lib/site-packages/setuptools/cli-32.exe create mode 100644 env/Lib/site-packages/setuptools/cli-64.exe create mode 100644 env/Lib/site-packages/setuptools/cli.exe create mode 100644 env/Lib/site-packages/setuptools/command/__init__.py create mode 100644 env/Lib/site-packages/setuptools/command/alias.py create mode 100644 env/Lib/site-packages/setuptools/command/bdist_egg.py create mode 100644 env/Lib/site-packages/setuptools/command/bdist_rpm.py create mode 100644 env/Lib/site-packages/setuptools/command/bdist_wininst.py create mode 100644 env/Lib/site-packages/setuptools/command/build_clib.py create mode 100644 env/Lib/site-packages/setuptools/command/build_ext.py create mode 100644 env/Lib/site-packages/setuptools/command/build_py.py create mode 100644 env/Lib/site-packages/setuptools/command/develop.py create mode 100644 env/Lib/site-packages/setuptools/command/dist_info.py create mode 100644 env/Lib/site-packages/setuptools/command/easy_install.py create mode 100644 env/Lib/site-packages/setuptools/command/egg_info.py create mode 100644 env/Lib/site-packages/setuptools/command/install.py create mode 100644 env/Lib/site-packages/setuptools/command/install_egg_info.py create mode 100644 env/Lib/site-packages/setuptools/command/install_lib.py create mode 100644 env/Lib/site-packages/setuptools/command/install_scripts.py create mode 100644 env/Lib/site-packages/setuptools/command/launcher manifest.xml create mode 100644 env/Lib/site-packages/setuptools/command/py36compat.py create mode 100644 env/Lib/site-packages/setuptools/command/register.py create mode 100644 env/Lib/site-packages/setuptools/command/rotate.py create mode 100644 env/Lib/site-packages/setuptools/command/saveopts.py create mode 100644 env/Lib/site-packages/setuptools/command/sdist.py create mode 100644 env/Lib/site-packages/setuptools/command/setopt.py create mode 100644 env/Lib/site-packages/setuptools/command/upload.py create mode 100644 env/Lib/site-packages/setuptools/command/upload_docs.py create mode 100644 env/Lib/site-packages/setuptools/config.py create mode 100644 env/Lib/site-packages/setuptools/dep_util.py create mode 100644 env/Lib/site-packages/setuptools/depends.py create mode 100644 env/Lib/site-packages/setuptools/dist.py create mode 100644 env/Lib/site-packages/setuptools/distutils_patch.py create mode 100644 env/Lib/site-packages/setuptools/errors.py create mode 100644 env/Lib/site-packages/setuptools/extension.py create mode 100644 env/Lib/site-packages/setuptools/extern/__init__.py create mode 100644 env/Lib/site-packages/setuptools/glob.py create mode 100644 env/Lib/site-packages/setuptools/gui-32.exe create mode 100644 env/Lib/site-packages/setuptools/gui-64.exe create mode 100644 env/Lib/site-packages/setuptools/gui.exe create mode 100644 env/Lib/site-packages/setuptools/installer.py create mode 100644 env/Lib/site-packages/setuptools/launch.py create mode 100644 env/Lib/site-packages/setuptools/lib2to3_ex.py create mode 100644 env/Lib/site-packages/setuptools/monkey.py create mode 100644 env/Lib/site-packages/setuptools/msvc.py create mode 100644 env/Lib/site-packages/setuptools/namespaces.py create mode 100644 env/Lib/site-packages/setuptools/package_index.py create mode 100644 env/Lib/site-packages/setuptools/py27compat.py create mode 100644 env/Lib/site-packages/setuptools/py31compat.py create mode 100644 env/Lib/site-packages/setuptools/py33compat.py create mode 100644 env/Lib/site-packages/setuptools/py34compat.py create mode 100644 env/Lib/site-packages/setuptools/sandbox.py create mode 100644 env/Lib/site-packages/setuptools/script (dev).tmpl create mode 100644 env/Lib/site-packages/setuptools/script.tmpl create mode 100644 env/Lib/site-packages/setuptools/ssl_support.py create mode 100644 env/Lib/site-packages/setuptools/unicode_utils.py create mode 100644 env/Lib/site-packages/setuptools/version.py create mode 100644 env/Lib/site-packages/setuptools/wheel.py create mode 100644 env/Lib/site-packages/setuptools/windows_support.py create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/LICENSE create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/METADATA create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/RECORD create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/WHEEL create mode 100644 env/Lib/site-packages/six-1.16.0.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/six.py create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/DESCRIPTION.rst create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/INSTALLER create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/LICENSE.txt create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/METADATA create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/RECORD create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/WHEEL create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/metadata.json create mode 100644 env/Lib/site-packages/urllib3-1.23.dist-info/top_level.txt create mode 100644 env/Lib/site-packages/urllib3/__init__.py create mode 100644 env/Lib/site-packages/urllib3/_collections.py create mode 100644 env/Lib/site-packages/urllib3/connection.py create mode 100644 env/Lib/site-packages/urllib3/connectionpool.py create mode 100644 env/Lib/site-packages/urllib3/contrib/__init__.py create mode 100644 env/Lib/site-packages/urllib3/contrib/_securetransport/__init__.py create mode 100644 env/Lib/site-packages/urllib3/contrib/_securetransport/bindings.py create mode 100644 env/Lib/site-packages/urllib3/contrib/_securetransport/low_level.py create mode 100644 env/Lib/site-packages/urllib3/contrib/appengine.py create mode 100644 env/Lib/site-packages/urllib3/contrib/ntlmpool.py create mode 100644 env/Lib/site-packages/urllib3/contrib/pyopenssl.py create mode 100644 env/Lib/site-packages/urllib3/contrib/securetransport.py create mode 100644 env/Lib/site-packages/urllib3/contrib/socks.py create mode 100644 env/Lib/site-packages/urllib3/exceptions.py create mode 100644 env/Lib/site-packages/urllib3/fields.py create mode 100644 env/Lib/site-packages/urllib3/filepost.py create mode 100644 env/Lib/site-packages/urllib3/packages/__init__.py create mode 100644 env/Lib/site-packages/urllib3/packages/backports/__init__.py create mode 100644 env/Lib/site-packages/urllib3/packages/backports/makefile.py create mode 100644 env/Lib/site-packages/urllib3/packages/ordered_dict.py create mode 100644 env/Lib/site-packages/urllib3/packages/six.py create mode 100644 env/Lib/site-packages/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 env/Lib/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 env/Lib/site-packages/urllib3/poolmanager.py create mode 100644 env/Lib/site-packages/urllib3/request.py create mode 100644 env/Lib/site-packages/urllib3/response.py create mode 100644 env/Lib/site-packages/urllib3/util/__init__.py create mode 100644 env/Lib/site-packages/urllib3/util/connection.py create mode 100644 env/Lib/site-packages/urllib3/util/queue.py create mode 100644 env/Lib/site-packages/urllib3/util/request.py create mode 100644 env/Lib/site-packages/urllib3/util/response.py create mode 100644 env/Lib/site-packages/urllib3/util/retry.py create mode 100644 env/Lib/site-packages/urllib3/util/ssl_.py create mode 100644 env/Lib/site-packages/urllib3/util/timeout.py create mode 100644 env/Lib/site-packages/urllib3/util/url.py create mode 100644 env/Lib/site-packages/urllib3/util/wait.py create mode 100644 env/Scripts/Activate.ps1 create mode 100644 env/Scripts/activate create mode 100644 env/Scripts/activate.bat create mode 100644 env/Scripts/chardetect.exe create mode 100644 env/Scripts/deactivate.bat create mode 100644 env/Scripts/easy_install-3.9.exe create mode 100644 env/Scripts/easy_install.exe create mode 100644 env/Scripts/pip.exe create mode 100644 env/Scripts/pip3.9.exe create mode 100644 env/Scripts/pip3.exe create mode 100644 env/Scripts/pyrsa-decrypt.exe create mode 100644 env/Scripts/pyrsa-encrypt.exe create mode 100644 env/Scripts/pyrsa-keygen.exe create mode 100644 env/Scripts/pyrsa-priv2pub.exe create mode 100644 env/Scripts/pyrsa-sign.exe create mode 100644 env/Scripts/pyrsa-verify.exe create mode 100644 env/Scripts/python.exe create mode 100644 env/Scripts/pythonw.exe create mode 100644 env/pyvenv.cfg diff --git a/env/Lib/site-packages/Crypto/Cipher/AES.py b/env/Lib/site-packages/Crypto/Cipher/AES.py new file mode 100644 index 0000000..1237a8c --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/AES.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# +# Cipher/AES.py : AES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with AES: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_CCM: :ref:`Counter with CBC-MAC (CCM) Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +:var MODE_GCM: :ref:`Galois Counter Mode (GCM) ` +:var MODE_SIV: :ref:`Syntethic Initialization Vector (SIV) ` +:var MODE_OCB: :ref:`Offset Code Book (OCB) ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +from Crypto.Util import _cpu_features +from Crypto.Random import get_random_bytes + + +_cproto = """ + int AES_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int AES_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int AES_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int AES_stop_operation(void *state); + """ + + +# Load portable AES +_raw_aes_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_aes", + _cproto) + +# Try to load AES with AES NI instructions +try: + _raw_aesni_lib = None + if _cpu_features.have_aes_ni(): + _raw_aesni_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_aesni", + _cproto.replace("AES", + "AESNI")) +# _raw_aesni may not have been compiled in +except OSError: + pass + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + use_aesni = dict_parameters.pop("use_aesni", True) + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect AES key length (%d bytes)" % len(key)) + + if use_aesni and _raw_aesni_lib: + start_operation = _raw_aesni_lib.AESNI_start_operation + stop_operation = _raw_aesni_lib.AESNI_stop_operation + else: + start_operation = _raw_aes_lib.AES_start_operation + stop_operation = _raw_aes_lib.AES_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the AES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def _derive_Poly1305_key_pair(key, nonce): + """Derive a tuple (r, s, nonce) for a Poly1305 MAC. + + If nonce is ``None``, a new 16-byte nonce is generated. + """ + + if len(key) != 32: + raise ValueError("Poly1305 with AES requires a 32-byte key") + + if nonce is None: + nonce = get_random_bytes(16) + elif len(nonce) != 16: + raise ValueError("Poly1305 with AES requires a 16-byte nonce") + + s = new(key[:16], MODE_ECB).encrypt(nonce) + return key[16:], s, nonce + + +def new(key, mode, *args, **kwargs): + """Create a new AES cipher. + + :param key: + The secret key to use in the symmetric cipher. + + It must be 16, 24 or 32 bytes long (respectively for *AES-128*, + *AES-192* or *AES-256*). + + For ``MODE_SIV`` only, it doubles to 32, 48, or 64 bytes. + :type key: bytes/bytearray/memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + If in doubt, use ``MODE_EAX``. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 16 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 16 bytes long for encryption + and 18 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CCM``, ``MODE_EAX``, ``MODE_GCM``, + ``MODE_SIV``, ``MODE_OCB``, and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key (except possibly for ``MODE_SIV``, see below). + + For ``MODE_EAX``, ``MODE_GCM`` and ``MODE_SIV`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CCM``, its length must be in the range **[7..13]**. + Bear in mind that with CCM there is a trade-off between nonce + length and maximum message size. Recommendation: **11** bytes. + + For ``MODE_OCB``, its length must be in the range **[1..15]** + (recommended: **15**). + + For ``MODE_CTR``, its length must be in the range **[0..15]** + (recommended: **8**). + + For ``MODE_SIV``, the nonce is optional, if it is not specified, + then no nonce is being used, which renders the encryption + deterministic. + + If not provided, for modes other than ``MODE_SIV```, a random + byte string of the recommended length is used (you must then + read its value with the :attr:`nonce` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``, ``MODE_GCM``, ``MODE_OCB``, ``MODE_CCM``) + Length of the authentication tag, in bytes. + + It must be even and in the range **[4..16]**. + The recommended value (and the default, if not specified) is **16**. + + * **msg_len** : (*integer*) -- + (Only ``MODE_CCM``). Length of the message to (de)cipher. + If not specified, ``encrypt`` must be called with the entire message. + Similarly, ``decrypt`` can only be called once. + + * **assoc_len** : (*integer*) -- + (Only ``MODE_CCM``). Length of the associated data. + If not specified, all associated data is buffered internally, + which may represent a problem for very large messages. + + * **initial_value** : (*integer* or *bytes/bytearray/memoryview*) -- + (Only ``MODE_CTR``). + The initial value for the counter. If not present, the cipher will + start counting from 0. The value is incremented by one for each block. + The counter number is encoded in big endian mode. + + * **counter** : (*object*) -- + Instance of ``Crypto.Util.Counter``, which allows full customization + of the counter block. This parameter is incompatible to both ``nonce`` + and ``initial_value``. + + * **use_aesni** : (*boolean*) -- + Use Intel AES-NI hardware extensions (default: use if available). + + :Return: an AES object, of the applicable mode. + """ + + kwargs["add_aes_modes"] = True + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_CCM = 8 +MODE_EAX = 9 +MODE_SIV = 10 +MODE_GCM = 11 +MODE_OCB = 12 + +# Size of a data block (in bytes) +block_size = 16 +# Size of a key (in bytes) +key_size = (16, 24, 32) diff --git a/env/Lib/site-packages/Crypto/Cipher/AES.pyi b/env/Lib/site-packages/Crypto/Cipher/AES.pyi new file mode 100644 index 0000000..8f655cf --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/AES.pyi @@ -0,0 +1,47 @@ +from typing import Union, Tuple, Optional, Dict + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_ccm import CcmMode +from Crypto.Cipher._mode_eax import EaxMode +from Crypto.Cipher._mode_gcm import GcmMode +from Crypto.Cipher._mode_siv import SivMode +from Crypto.Cipher._mode_ocb import OcbMode + +AESMode = int + +MODE_ECB: AESMode +MODE_CBC: AESMode +MODE_CFB: AESMode +MODE_OFB: AESMode +MODE_CTR: AESMode +MODE_OPENPGP: AESMode +MODE_CCM: AESMode +MODE_EAX: AESMode +MODE_GCM: AESMode +MODE_SIV: AESMode +MODE_OCB: AESMode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: AESMode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + assoc_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ..., + use_aesni : bool = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, + OpenPgpMode, CcmMode, EaxMode, GcmMode, + SivMode, OcbMode]: ... + +block_size: int +key_size: Tuple[int, int, int] diff --git a/env/Lib/site-packages/Crypto/Cipher/ARC2.py b/env/Lib/site-packages/Crypto/Cipher/ARC2.py new file mode 100644 index 0000000..0ba7e33 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ARC2.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC2.py : ARC2.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with ARC2: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_arc2_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_arc2", + """ + int ARC2_start_operation(const uint8_t key[], + size_t key_len, + size_t effective_key_len, + void **pResult); + int ARC2_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ARC2_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ARC2_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + effective_keylen = dict_parameters.pop("effective_keylen", 1024) + + if len(key) not in key_size: + raise ValueError("Incorrect ARC2 key length (%d bytes)" % len(key)) + + if not (40 <= effective_keylen <= 1024): + raise ValueError("'effective_key_len' must be at least 40 and no larger than 1024 " + "(not %d)" % effective_keylen) + + start_operation = _raw_arc2_lib.ARC2_start_operation + stop_operation = _raw_arc2_lib.ARC2_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(effective_keylen), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the ARC2 cipher" + % result) + + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new RC2 cipher. + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 128 bytes; the actual search space + (and the cipher strength) can be reduced with the ``effective_keylen`` parameter. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **effective_keylen** (*integer*) -- + Optional. Maximum strength in bits of the actual key used by the ARC2 algorithm. + If the supplied ``key`` parameter is longer (in bits) of the value specified + here, it will be weakened to match it. + If not specified, no limitation is applied. + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: an ARC2 object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(5, 128 + 1) diff --git a/env/Lib/site-packages/Crypto/Cipher/ARC2.pyi b/env/Lib/site-packages/Crypto/Cipher/ARC2.pyi new file mode 100644 index 0000000..055c424 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ARC2.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +ARC2Mode = int + +MODE_ECB: ARC2Mode +MODE_CBC: ARC2Mode +MODE_CFB: ARC2Mode +MODE_OFB: ARC2Mode +MODE_CTR: ARC2Mode +MODE_OPENPGP: ARC2Mode +MODE_EAX: ARC2Mode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: ARC2Mode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Iterable[int] diff --git a/env/Lib/site-packages/Crypto/Cipher/ARC4.py b/env/Lib/site-packages/Crypto/Cipher/ARC4.py new file mode 100644 index 0000000..7150ea6 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ARC4.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC4.py : ARC4 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import b + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr) + + +_raw_arc4_lib = load_pycryptodome_raw_lib("Crypto.Cipher._ARC4", """ + int ARC4_stream_encrypt(void *rc4State, const uint8_t in[], + uint8_t out[], size_t len); + int ARC4_stream_init(uint8_t *key, size_t keylen, + void **pRc4State); + int ARC4_stream_destroy(void *rc4State); + """) + + +class ARC4Cipher: + """ARC4 cipher object. Do not create it directly. Use + :func:`Crypto.Cipher.ARC4.new` instead. + """ + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC4 cipher object + + See also `new()` at the module level.""" + + if len(args) > 0: + ndrop = args[0] + args = args[1:] + else: + ndrop = kwargs.pop('drop', 0) + + if len(key) not in key_size: + raise ValueError("Incorrect ARC4 key length (%d bytes)" % + len(key)) + + self._state = VoidPointer() + result = _raw_arc4_lib.ARC4_stream_init(c_uint8_ptr(key), + c_size_t(len(key)), + self._state.address_of()) + if result != 0: + raise ValueError("Error %d while creating the ARC4 cipher" + % result) + self._state = SmartPointer(self._state.get(), + _raw_arc4_lib.ARC4_stream_destroy) + + if ndrop > 0: + # This is OK even if the cipher is used for decryption, + # since encrypt and decrypt are actually the same thing + # with ARC4. + self.encrypt(b'\x00' * ndrop) + + self.block_size = 1 + self.key_size = len(key) + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :param plaintext: The data to encrypt, of any size. + :type plaintext: bytes, bytearray, memoryview + :returns: the encrypted byte string, of equal length as the + plaintext. + """ + + ciphertext = create_string_buffer(len(plaintext)) + result = _raw_arc4_lib.ARC4_stream_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + ciphertext, + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with RC4" % result) + return get_raw_buffer(ciphertext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :param ciphertext: The data to decrypt, of any size. + :type ciphertext: bytes, bytearray, memoryview + :returns: the decrypted byte string, of equal length as the + ciphertext. + """ + + try: + return self.encrypt(ciphertext) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + +def new(key, *args, **kwargs): + """Create a new ARC4 cipher. + + :param key: + The secret key to use in the symmetric cipher. + Its length must be in the range ``[5..256]``. + The recommended length is 16 bytes. + :type key: bytes, bytearray, memoryview + + :Keyword Arguments: + * *drop* (``integer``) -- + The amount of bytes to discard from the initial part of the keystream. + In fact, such part has been found to be distinguishable from random + data (while it shouldn't) and also correlated to key. + + The recommended value is 3072_ bytes. The default value is 0. + + :Return: an `ARC4Cipher` object + + .. _3072: http://eprint.iacr.org/2002/067.pdf + """ + return ARC4Cipher(key, *args, **kwargs) + +# Size of a data block (in bytes) +block_size = 1 +# Size of a key (in bytes) +key_size = range(5, 256+1) diff --git a/env/Lib/site-packages/Crypto/Cipher/ARC4.pyi b/env/Lib/site-packages/Crypto/Cipher/ARC4.pyi new file mode 100644 index 0000000..2e75d6f --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ARC4.pyi @@ -0,0 +1,16 @@ +from typing import Any, Union, Iterable + +Buffer = Union[bytes, bytearray, memoryview] + +class ARC4Cipher: + block_size: int + key_size: int + + def __init__(self, key: Buffer, *args: Any, **kwargs: Any) -> None: ... + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer) -> bytes: ... + +def new(key: Buffer, drop : int = ...) -> ARC4Cipher: ... + +block_size: int +key_size: Iterable[int] diff --git a/env/Lib/site-packages/Crypto/Cipher/Blowfish.py b/env/Lib/site-packages/Crypto/Cipher/Blowfish.py new file mode 100644 index 0000000..6005ffe --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/Blowfish.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Blowfish.py : Blowfish +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Blowfish: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, c_size_t, + c_uint8_ptr) + +_raw_blowfish_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_blowfish", + """ + int Blowfish_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int Blowfish_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int Blowfish_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int Blowfish_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a smart pointer to + a low-level base cipher. It will absorb named parameters in + the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect Blowfish key length (%d bytes)" % len(key)) + + start_operation = _raw_blowfish_lib.Blowfish_start_operation + stop_operation = _raw_blowfish_lib.Blowfish_stop_operation + + void_p = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + void_p.address_of()) + if result: + raise ValueError("Error %X while instantiating the Blowfish cipher" + % result) + return SmartPointer(void_p.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new Blowfish cipher + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 56 bytes. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a Blowfish object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(4, 56 + 1) diff --git a/env/Lib/site-packages/Crypto/Cipher/Blowfish.pyi b/env/Lib/site-packages/Crypto/Cipher/Blowfish.pyi new file mode 100644 index 0000000..eff9da9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/Blowfish.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +BlowfishMode = int + +MODE_ECB: BlowfishMode +MODE_CBC: BlowfishMode +MODE_CFB: BlowfishMode +MODE_OFB: BlowfishMode +MODE_CTR: BlowfishMode +MODE_OPENPGP: BlowfishMode +MODE_EAX: BlowfishMode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: BlowfishMode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Iterable[int] diff --git a/env/Lib/site-packages/Crypto/Cipher/CAST.py b/env/Lib/site-packages/Crypto/Cipher/CAST.py new file mode 100644 index 0000000..c7e82c1 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/CAST.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# +# Cipher/CAST.py : CAST +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with CAST: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_cast_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_cast", + """ + int CAST_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int CAST_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CAST_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CAST_stop_operation(void *state); + """) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect CAST key length (%d bytes)" % len(key)) + + start_operation = _raw_cast_lib.CAST_start_operation + stop_operation = _raw_cast_lib.CAST_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the CAST cipher" + % result) + + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new CAST cipher + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 16 bytes. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a CAST object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(5, 16 + 1) diff --git a/env/Lib/site-packages/Crypto/Cipher/CAST.pyi b/env/Lib/site-packages/Crypto/Cipher/CAST.pyi new file mode 100644 index 0000000..a0cb6af --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/CAST.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +CASTMode = int + +MODE_ECB: CASTMode +MODE_CBC: CASTMode +MODE_CFB: CASTMode +MODE_OFB: CASTMode +MODE_CTR: CASTMode +MODE_OPENPGP: CASTMode +MODE_EAX: CASTMode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: CASTMode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size : Iterable[int] diff --git a/env/Lib/site-packages/Crypto/Cipher/ChaCha20.py b/env/Lib/site-packages/Crypto/Cipher/ChaCha20.py new file mode 100644 index 0000000..9bd2252 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ChaCha20.py @@ -0,0 +1,287 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Random import get_random_bytes + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, VoidPointer, + SmartPointer, c_size_t, + c_uint8_ptr, c_ulong, + is_writeable_buffer) + +_raw_chacha20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._chacha20", + """ + int chacha20_init(void **pState, + const uint8_t *key, + size_t keySize, + const uint8_t *nonce, + size_t nonceSize); + + int chacha20_destroy(void *state); + + int chacha20_encrypt(void *state, + const uint8_t in[], + uint8_t out[], + size_t len); + + int chacha20_seek(void *state, + unsigned long block_high, + unsigned long block_low, + unsigned offset); + int hchacha20( const uint8_t key[32], + const uint8_t nonce16[16], + uint8_t subkey[32]); + """) + + +def _HChaCha20(key, nonce): + + assert(len(key) == 32) + assert(len(nonce) == 16) + + subkey = bytearray(32) + result = _raw_chacha20_lib.hchacha20( + c_uint8_ptr(key), + c_uint8_ptr(nonce), + c_uint8_ptr(subkey)) + if result: + raise ValueError("Error %d when deriving subkey with HChaCha20" % result) + + return subkey + + +class ChaCha20Cipher(object): + """ChaCha20 (or XChaCha20) cipher object. + Do not create it directly. Use :py:func:`new` instead. + + :var nonce: The nonce with length 8, 12 or 24 bytes + :vartype nonce: bytes + """ + + block_size = 1 + + def __init__(self, key, nonce): + """Initialize a ChaCha20/XChaCha20 cipher object + + See also `new()` at the module level.""" + + self.nonce = _copy_bytes(None, None, nonce) + + # XChaCha20 requires a key derivation with HChaCha20 + # See 2.3 in https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + if len(nonce) == 24: + key = _HChaCha20(key, nonce[:16]) + nonce = b'\x00' * 4 + nonce[16:] + self._name = "XChaCha20" + else: + self._name = "ChaCha20" + nonce = self.nonce + + self._next = ( self.encrypt, self.decrypt ) + + self._state = VoidPointer() + result = _raw_chacha20_lib.chacha20_init( + self._state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + nonce, + c_size_t(len(nonce))) + if result: + raise ValueError("Error %d instantiating a %s cipher" % (result, + self._name)) + self._state = SmartPointer(self._state.get(), + _raw_chacha20_lib.chacha20_destroy) + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("Cipher object can only be used for decryption") + self._next = ( self.encrypt, ) + return self._encrypt(plaintext, output) + + def _encrypt(self, plaintext, output): + """Encrypt without FSM checks""" + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = _raw_chacha20_lib.chacha20_encrypt( + self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with %s" % (result, self._name)) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("Cipher object can only be used for encryption") + self._next = ( self.decrypt, ) + + try: + return self._encrypt(ciphertext, output) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + def seek(self, position): + """Seek to a certain position in the key stream. + + Args: + position (integer): + The absolute position within the key stream, in bytes. + """ + + position, offset = divmod(position, 64) + block_low = position & 0xFFFFFFFF + block_high = position >> 32 + + result = _raw_chacha20_lib.chacha20_seek( + self._state.get(), + c_ulong(block_high), + c_ulong(block_low), + offset + ) + if result: + raise ValueError("Error %d while seeking with %s" % (result, self._name)) + + +def _derive_Poly1305_key_pair(key, nonce): + """Derive a tuple (r, s, nonce) for a Poly1305 MAC. + + If nonce is ``None``, a new 12-byte nonce is generated. + """ + + if len(key) != 32: + raise ValueError("Poly1305 with ChaCha20 requires a 32-byte key") + + if nonce is None: + padded_nonce = nonce = get_random_bytes(12) + elif len(nonce) == 8: + # See RFC7538, 2.6: [...] ChaCha20 as specified here requires a 96-bit + # nonce. So if the provided nonce is only 64-bit, then the first 32 + # bits of the nonce will be set to a constant number. + # This will usually be zero, but for protocols with multiple senders it may be + # different for each sender, but should be the same for all + # invocations of the function with the same key by a particular + # sender. + padded_nonce = b'\x00\x00\x00\x00' + nonce + elif len(nonce) == 12: + padded_nonce = nonce + else: + raise ValueError("Poly1305 with ChaCha20 requires an 8- or 12-byte nonce") + + rs = new(key=key, nonce=padded_nonce).encrypt(b'\x00' * 32) + return rs[:16], rs[16:], nonce + + +def new(**kwargs): + """Create a new ChaCha20 or XChaCha20 cipher + + Keyword Args: + key (bytes/bytearray/memoryview): The secret key to use. + It must be 32 bytes long. + nonce (bytes/bytearray/memoryview): A mandatory value that + must never be reused for any other encryption + done with this key. + + For ChaCha20, it must be 8 or 12 bytes long. + + For XChaCha20, it must be 24 bytes long. + + If not provided, 8 bytes will be randomly generated + (you can find them back in the ``nonce`` attribute). + + :Return: a :class:`Crypto.Cipher.ChaCha20.ChaCha20Cipher` object + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter %s" % e) + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(8) + + if len(key) != 32: + raise ValueError("ChaCha20/XChaCha20 key must be 32 bytes long") + + if len(nonce) not in (8, 12, 24): + raise ValueError("Nonce must be 8/12 bytes(ChaCha20) or 24 bytes (XChaCha20)") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return ChaCha20Cipher(key, nonce) + +# Size of a data block (in bytes) +block_size = 1 + +# Size of a key (in bytes) +key_size = 32 diff --git a/env/Lib/site-packages/Crypto/Cipher/ChaCha20.pyi b/env/Lib/site-packages/Crypto/Cipher/ChaCha20.pyi new file mode 100644 index 0000000..3d00a1d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ChaCha20.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload + +Buffer = Union[bytes, bytearray, memoryview] + +def _HChaCha20(key: Buffer, nonce: Buffer) -> bytearray: ... + +class ChaCha20Cipher: + block_size: int + nonce: bytes + + def __init__(self, key: Buffer, nonce: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + def seek(self, position: int) -> None: ... + +def new(key: Buffer, nonce: Buffer = ...) -> ChaCha20Cipher: ... + +block_size: int +key_size: int diff --git a/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py b/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py new file mode 100644 index 0000000..21ddca3 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py @@ -0,0 +1,336 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Cipher import ChaCha20 +from Crypto.Cipher.ChaCha20 import _HChaCha20 +from Crypto.Hash import Poly1305, BLAKE2s + +from Crypto.Random import get_random_bytes + +from Crypto.Util.number import long_to_bytes +from Crypto.Util.py3compat import _copy_bytes, bord +from Crypto.Util._raw_api import is_buffer + + +def _enum(**enums): + return type('Enum', (), enums) + + +_CipherStatus = _enum(PROCESSING_AUTH_DATA=1, + PROCESSING_CIPHERTEXT=2, + PROCESSING_DONE=3) + + +class ChaCha20Poly1305Cipher(object): + """ChaCha20-Poly1305 and XChaCha20-Poly1305 cipher object. + Do not create it directly. Use :py:func:`new` instead. + + :var nonce: The nonce with length 8, 12 or 24 bytes + :vartype nonce: byte string + """ + + def __init__(self, key, nonce): + """Initialize a ChaCha20-Poly1305 AEAD cipher object + + See also `new()` at the module level.""" + + self.nonce = _copy_bytes(None, None, nonce) + + self._next = (self.update, self.encrypt, self.decrypt, self.digest, + self.verify) + + self._authenticator = Poly1305.new(key=key, nonce=nonce, cipher=ChaCha20) + + self._cipher = ChaCha20.new(key=key, nonce=nonce) + self._cipher.seek(64) # Block counter starts at 1 + + self._len_aad = 0 + self._len_ct = 0 + self._mac_tag = None + self._status = _CipherStatus.PROCESSING_AUTH_DATA + + def update(self, data): + """Protect the associated data. + + Associated data (also known as *additional authenticated data* - AAD) + is the piece of the message that must stay in the clear, while + still allowing the receiver to verify its integrity. + An example is packet headers. + + The associated data (possibly split into multiple segments) is + fed into :meth:`update` before any call to :meth:`decrypt` or :meth:`encrypt`. + If there is no associated data, :meth:`update` is not called. + + :param bytes/bytearray/memoryview assoc_data: + A piece of associated data. There are no restrictions on its size. + """ + + if self.update not in self._next: + raise TypeError("update() method cannot be called") + + self._len_aad += len(data) + self._authenticator.update(data) + + def _pad_aad(self): + + assert(self._status == _CipherStatus.PROCESSING_AUTH_DATA) + if self._len_aad & 0x0F: + self._authenticator.update(b'\x00' * (16 - (self._len_aad & 0x0F))) + self._status = _CipherStatus.PROCESSING_CIPHERTEXT + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() method cannot be called") + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + self._next = (self.encrypt, self.digest) + + result = self._cipher.encrypt(plaintext, output=output) + self._len_ct += len(plaintext) + if output is None: + self._authenticator.update(result) + else: + self._authenticator.update(output) + return result + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() method cannot be called") + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + self._next = (self.decrypt, self.verify) + + self._len_ct += len(ciphertext) + self._authenticator.update(ciphertext) + return self._cipher.decrypt(ciphertext, output=output) + + def _compute_mac(self): + """Finalize the cipher (if not done already) and return the MAC.""" + + if self._mac_tag: + assert(self._status == _CipherStatus.PROCESSING_DONE) + return self._mac_tag + + assert(self._status != _CipherStatus.PROCESSING_DONE) + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + if self._len_ct & 0x0F: + self._authenticator.update(b'\x00' * (16 - (self._len_ct & 0x0F))) + + self._status = _CipherStatus.PROCESSING_DONE + + self._authenticator.update(long_to_bytes(self._len_aad, 8)[::-1]) + self._authenticator.update(long_to_bytes(self._len_ct, 8)[::-1]) + self._mac_tag = self._authenticator.digest() + return self._mac_tag + + def digest(self): + """Compute the *binary* authentication tag (MAC). + + :Return: the MAC tag, as 16 ``bytes``. + """ + + if self.digest not in self._next: + raise TypeError("digest() method cannot be called") + self._next = (self.digest,) + + return self._compute_mac() + + def hexdigest(self): + """Compute the *printable* authentication tag (MAC). + + This method is like :meth:`digest`. + + :Return: the MAC tag, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* authentication tag (MAC). + + The receiver invokes this method at the very end, to + check if the associated data (if any) and the decrypted + messages are valid. + + :param bytes/bytearray/memoryview received_mac_tag: + This is the 16-byte *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = (self.verify,) + + secret = get_random_bytes(16) + + self._compute_mac() + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, + data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, + data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* authentication tag (MAC). + + This method is like :meth:`verify`. + + :param string hex_mac_tag: + This is the *printable* MAC. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext): + """Perform :meth:`encrypt` and :meth:`digest` in one step. + + :param plaintext: The data to encrypt, of any size. + :type plaintext: bytes/bytearray/memoryview + :return: a tuple with two ``bytes`` objects: + + - the ciphertext, of equal length as the plaintext + - the 16-byte MAC tag + """ + + return self.encrypt(plaintext), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag): + """Perform :meth:`decrypt` and :meth:`verify` in one step. + + :param ciphertext: The piece of data to decrypt. + :type ciphertext: bytes/bytearray/memoryview + :param bytes received_mac_tag: + This is the 16-byte *binary* MAC, as received from the sender. + :return: the decrypted data (as ``bytes``) + :raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext) + self.verify(received_mac_tag) + return plaintext + + +def new(**kwargs): + """Create a new ChaCha20-Poly1305 or XChaCha20-Poly1305 AEAD cipher. + + :keyword key: The secret key to use. It must be 32 bytes long. + :type key: byte string + + :keyword nonce: + A value that must never be reused for any other encryption + done with this key. + + For ChaCha20-Poly1305, it must be 8 or 12 bytes long. + + For XChaCha20-Poly1305, it must be 24 bytes long. + + If not provided, 12 ``bytes`` will be generated randomly + (you can find them back in the ``nonce`` attribute). + :type nonce: bytes, bytearray, memoryview + + :Return: a :class:`Crypto.Cipher.ChaCha20.ChaCha20Poly1305Cipher` object + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter %s" % e) + + self._len_ct += len(plaintext) + + if len(key) != 32: + raise ValueError("Key must be 32 bytes long") + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(12) + + if len(nonce) in (8, 12): + pass + elif len(nonce) == 24: + key = _HChaCha20(key, nonce[:16]) + nonce = b'\x00\x00\x00\x00' + nonce[16:] + else: + raise ValueError("Nonce must be 8, 12 or 24 bytes long") + + if not is_buffer(nonce): + raise TypeError("nonce must be bytes, bytearray or memoryview") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return ChaCha20Poly1305Cipher(key, nonce) + + +# Size of a key (in bytes) +key_size = 32 diff --git a/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi b/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi new file mode 100644 index 0000000..ef0450f --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi @@ -0,0 +1,28 @@ +from typing import Union, Tuple, overload + +Buffer = Union[bytes, bytearray, memoryview] + +class ChaCha20Poly1305Cipher: + nonce: bytes + + def __init__(self, key: Buffer, nonce: Buffer) -> None: ... + def update(self, data: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, received_mac_tag: str) -> None: ... + def encrypt_and_digest(self, plaintext: Buffer) -> Tuple[bytes, bytes]: ... + def decrypt_and_verify(self, ciphertext: Buffer, received_mac_tag: Buffer) -> bytes: ... + +def new(key: Buffer, nonce: Buffer = ...) -> ChaCha20Poly1305Cipher: ... + +block_size: int +key_size: int diff --git a/env/Lib/site-packages/Crypto/Cipher/DES.py b/env/Lib/site-packages/Crypto/Cipher/DES.py new file mode 100644 index 0000000..5cc286a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/DES.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES.py : DES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Single DES: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_des_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_des", + """ + int DES_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int DES_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES_stop_operation(void *state); + """) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) != key_size: + raise ValueError("Incorrect DES key length (%d bytes)" % len(key)) + + start_operation = _raw_des_lib.DES_start_operation + stop_operation = _raw_des_lib.DES_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the DES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new DES cipher. + + :param key: + The secret key to use in the symmetric cipher. + It must be 8 byte long. The parity bits will be ignored. + :type key: bytes/bytearray/memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*byte string*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*byte string*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a DES object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = 8 diff --git a/env/Lib/site-packages/Crypto/Cipher/DES.pyi b/env/Lib/site-packages/Crypto/Cipher/DES.pyi new file mode 100644 index 0000000..1047f13 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/DES.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +DESMode = int + +MODE_ECB: DESMode +MODE_CBC: DESMode +MODE_CFB: DESMode +MODE_OFB: DESMode +MODE_CTR: DESMode +MODE_OPENPGP: DESMode +MODE_EAX: DESMode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: DESMode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: int diff --git a/env/Lib/site-packages/Crypto/Cipher/DES3.py b/env/Lib/site-packages/Crypto/Cipher/DES3.py new file mode 100644 index 0000000..c0d9367 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/DES3.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES3.py : DES3 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Triple DES: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string, bchr, bord, bstr +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t) + +_raw_des3_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_des3", + """ + int DES3_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int DES3_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES3_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES3_stop_operation(void *state); + """) + + +def adjust_key_parity(key_in): + """Set the parity bits in a TDES key. + + :param key_in: the TDES key whose bits need to be adjusted + :type key_in: byte string + + :returns: a copy of ``key_in``, with the parity bits correctly set + :rtype: byte string + + :raises ValueError: if the TDES key is not 16 or 24 bytes long + :raises ValueError: if the TDES key degenerates into Single DES + """ + + def parity_byte(key_byte): + parity = 1 + for i in range(1, 8): + parity ^= (key_byte >> i) & 1 + return (key_byte & 0xFE) | parity + + if len(key_in) not in key_size: + raise ValueError("Not a valid TDES key") + + key_out = b"".join([ bchr(parity_byte(bord(x))) for x in key_in ]) + + if key_out[:8] == key_out[8:16] or key_out[-16:-8] == key_out[-8:]: + raise ValueError("Triple DES key degenerates to single DES") + + return key_out + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level base cipher. + It will absorb named parameters in the process.""" + + try: + key_in = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + key = adjust_key_parity(bstr(key_in)) + + start_operation = _raw_des3_lib.DES3_start_operation + stop_operation = _raw_des3_lib.DES3_stop_operation + + cipher = VoidPointer() + result = start_operation(key, + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the TDES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new Triple DES cipher. + + :param key: + The secret key to use in the symmetric cipher. + It must be 16 or 24 byte long. The parity bits will be ignored. + :type key: bytes/bytearray/memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a Triple DES object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = (16, 24) diff --git a/env/Lib/site-packages/Crypto/Cipher/DES3.pyi b/env/Lib/site-packages/Crypto/Cipher/DES3.pyi new file mode 100644 index 0000000..a89db9c --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/DES3.pyi @@ -0,0 +1,37 @@ +from typing import Union, Dict, Tuple + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +def adjust_key_parity(key_in: bytes) -> bytes: ... + +DES3Mode = int + +MODE_ECB: DES3Mode +MODE_CBC: DES3Mode +MODE_CFB: DES3Mode +MODE_OFB: DES3Mode +MODE_CTR: DES3Mode +MODE_OPENPGP: DES3Mode +MODE_EAX: DES3Mode + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: DES3Mode, + iv : Buffer = ..., + IV : Buffer = ..., + nonce : Buffer = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Tuple[int, int] diff --git a/env/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py b/env/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py new file mode 100644 index 0000000..4fdf76d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Signature.pss import MGF1 +import Crypto.Hash.SHA1 + +from Crypto.Util.py3compat import bord, _copy_bytes +import Crypto.Util.number +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes +from Crypto.Util.strxor import strxor +from Crypto import Random + +class PKCS1OAEP_Cipher: + """Cipher object for PKCS#1 v1.5 OAEP. + Do not create directly: use :func:`new` instead.""" + + def __init__(self, key, hashAlgo, mgfunc, label, randfunc): + """Initialize this PKCS#1 OAEP cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + hashAlgo : hash object + The hash function to use. This can be a module under `Crypto.Hash` + or an existing hash object created from any of such modules. If not specified, + `Crypto.Hash.SHA1` is used. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 consistent with ``hashAlgo`` is used (a safe choice). + label : bytes/bytearray/memoryview + A label to apply to this particular encryption. If not specified, + an empty string is used. Specifying a label does not improve + security. + randfunc : callable + A function that returns random bytes. + + :attention: Modify the mask generation function only if you know what you are doing. + Sender and receiver must use the same one. + """ + self._key = key + + if hashAlgo: + self._hashObj = hashAlgo + else: + self._hashObj = Crypto.Hash.SHA1 + + if mgfunc: + self._mgf = mgfunc + else: + self._mgf = lambda x,y: MGF1(x,y,self._hashObj) + + self._label = _copy_bytes(None, None, label) + self._randfunc = randfunc + + def can_encrypt(self): + """Legacy function to check if you can call :meth:`encrypt`. + + .. deprecated:: 3.0""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Legacy function to check if you can call :meth:`decrypt`. + + .. deprecated:: 3.0""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Encrypt a message with PKCS#1 OAEP. + + :param message: + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) + minus 2, minus twice the hash output size. + For instance, if you use RSA 2048 and SHA-256, the longest message + you can encrypt is 190 byte long. + :type message: bytes/bytearray/memoryview + + :returns: The ciphertext, as large as the RSA modulus. + :rtype: bytes + + :raises ValueError: + if the message is too long. + """ + + # See 7.1.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits, 8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + mLen = len(message) + + # Step 1b + ps_len = k - mLen - 2 * hLen - 2 + if ps_len < 0: + raise ValueError("Plaintext is too long.") + # Step 2a + lHash = self._hashObj.new(self._label).digest() + # Step 2b + ps = b'\x00' * ps_len + # Step 2c + db = lHash + ps + b'\x01' + _copy_bytes(None, None, message) + # Step 2d + ros = self._randfunc(hLen) + # Step 2e + dbMask = self._mgf(ros, k-hLen-1) + # Step 2f + maskedDB = strxor(db, dbMask) + # Step 2g + seedMask = self._mgf(maskedDB, hLen) + # Step 2h + maskedSeed = strxor(ros, seedMask) + # Step 2i + em = b'\x00' + maskedSeed + maskedDB + # Step 3a (OS2IP) + em_int = bytes_to_long(em) + # Step 3b (RSAEP) + m_int = self._key._encrypt(em_int) + # Step 3c (I2OSP) + c = long_to_bytes(m_int, k) + return c + + def decrypt(self, ciphertext): + """Decrypt a message with PKCS#1 OAEP. + + :param ciphertext: The encrypted message. + :type ciphertext: bytes/bytearray/memoryview + + :returns: The original message (plaintext). + :rtype: bytes + + :raises ValueError: + if the ciphertext has the wrong length, or if decryption + fails the integrity check (in which case, the decryption + key is probably wrong). + :raises TypeError: + if the RSA key has no private half (i.e. you are trying + to decrypt using a public key). + """ + + # See 7.1.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + + # Step 1b and 1c + if len(ciphertext) != k or k Any: ... + +class HashLikeModule(Protocol): + digest_size : int + @staticmethod + def new(data: Optional[bytes] = ...) -> Any: ... + +HashLike = Union[HashLikeClass, HashLikeModule] + +Buffer = Union[bytes, bytearray, memoryview] + +class PKCS1OAEP_Cipher: + def __init__(self, + key: RsaKey, + hashAlgo: HashLike, + mgfunc: Callable[[bytes, int], bytes], + label: Buffer, + randfunc: Callable[[int], bytes]) -> None: ... + def can_encrypt(self) -> bool: ... + def can_decrypt(self) -> bool: ... + def encrypt(self, message: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer) -> bytes: ... + +def new(key: RsaKey, + hashAlgo: Optional[HashLike] = ..., + mgfunc: Optional[Callable[[bytes, int], bytes]] = ..., + label: Optional[Buffer] = ..., + randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS1OAEP_Cipher: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py b/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py new file mode 100644 index 0000000..1b9912f --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1-v1_5.py : PKCS#1 v1.5 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = [ 'new', 'PKCS115_Cipher' ] + +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes +from Crypto.Util.py3compat import bord, _copy_bytes +import Crypto.Util.number +from Crypto import Random + +class PKCS115_Cipher: + """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption. + Do not instantiate directly. Use :func:`Crypto.Cipher.PKCS1_v1_5.new` instead.""" + + def __init__(self, key, randfunc): + """Initialize this PKCS#1 v1.5 cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + randfunc : callable + Function that returns random bytes. + """ + + self._key = key + self._randfunc = randfunc + + def can_encrypt(self): + """Return True if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 v1.5 encryption of a message. + + This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and it is specified in + `section 7.2.1 of RFC8017 + `_. + + :param message: + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) minus 11. + :type message: bytes/bytearray/memoryview + + :Returns: A byte string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + + :Raises ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + """ + + # See 7.2.1 in RFC8017 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + mLen = len(message) + + # Step 1 + if mLen > k - 11: + raise ValueError("Plaintext is too long.") + # Step 2a + ps = [] + while len(ps) != k - mLen - 3: + new_byte = self._randfunc(1) + if bord(new_byte[0]) == 0x00: + continue + ps.append(new_byte) + ps = b"".join(ps) + assert(len(ps) == k - mLen - 3) + # Step 2b + em = b'\x00\x02' + ps + b'\x00' + _copy_bytes(None, None, message) + # Step 3a (OS2IP) + em_int = bytes_to_long(em) + # Step 3b (RSAEP) + m_int = self._key._encrypt(em_int) + # Step 3c (I2OSP) + c = long_to_bytes(m_int, k) + return c + + def decrypt(self, ciphertext, sentinel): + r"""Decrypt a PKCS#1 v1.5 ciphertext. + + This function is named ``RSAES-PKCS1-V1_5-DECRYPT``, and is specified in + `section 7.2.2 of RFC8017 + `_. + + :param ciphertext: + The ciphertext that contains the message to recover. + :type ciphertext: bytes/bytearray/memoryview + + :param sentinel: + The object to return whenever an error is detected. + :type sentinel: any type + + :Returns: A byte string. It is either the original message or the ``sentinel`` (in case of an error). + + :Raises ValueError: + If the ciphertext length is incorrect + :Raises TypeError: + If the RSA key has no private half (i.e. it cannot be used for + decyption). + + .. warning:: + You should **never** let the party who submitted the ciphertext know that + this function returned the ``sentinel`` value. + Armed with such knowledge (for a fair amount of carefully crafted but invalid ciphertexts), + an attacker is able to recontruct the plaintext of any other encryption that were carried out + with the same RSA public key (see `Bleichenbacher's`__ attack). + + In general, it should not be possible for the other party to distinguish + whether processing at the server side failed because the value returned + was a ``sentinel`` as opposed to a random, invalid message. + + In fact, the second option is not that unlikely: encryption done according to PKCS#1 v1.5 + embeds no good integrity check. There is roughly one chance + in 2\ :sup:`16` for a random ciphertext to be returned as a valid message + (although random looking). + + It is therefore advisabled to: + + 1. Select as ``sentinel`` a value that resembles a plausable random, invalid message. + 2. Not report back an error as soon as you detect a ``sentinel`` value. + Put differently, you should not explicitly check if the returned value is the ``sentinel`` or not. + 3. Cover all possible errors with a single, generic error indicator. + 4. Embed into the definition of ``message`` (at the protocol level) a digest (e.g. ``SHA-1``). + It is recommended for it to be the rightmost part ``message``. + 5. Where possible, monitor the number of errors due to ciphertexts originating from the same party, + and slow down the rate of the requests from such party (or even blacklist it altogether). + + **If you are designing a new protocol, consider using the more robust PKCS#1 OAEP.** + + .. __: http://www.bell-labs.com/user/bleichen/papers/pkcs.ps + + """ + + # See 7.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + if len(ciphertext) != k: + raise ValueError("Ciphertext with incorrect length.") + # Step 2a (O2SIP) + ct_int = bytes_to_long(ciphertext) + # Step 2b (RSADP) + m_int = self._key._decrypt(ct_int) + # Complete step 2c (I2OSP) + em = long_to_bytes(m_int, k) + # Step 3 + sep = em.find(b'\x00', 2) + if not em.startswith(b'\x00\x02') or sep < 10: + return sentinel + # Step 4 + return em[sep + 1:] + + +def new(key, randfunc=None): + """Create a cipher for performing PKCS#1 v1.5 encryption or decryption. + + :param key: + The key to use to encrypt or decrypt the message. This is a `Crypto.PublicKey.RSA` object. + Decryption is only possible if *key* is a private RSA key. + :type key: RSA key object + + :param randfunc: + Function that return random bytes. + The default is :func:`Crypto.Random.get_random_bytes`. + :type randfunc: callable + + :returns: A cipher object `PKCS115_Cipher`. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + return PKCS115_Cipher(key, randfunc) + diff --git a/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi b/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi new file mode 100644 index 0000000..d640736 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi @@ -0,0 +1,17 @@ +from typing import Callable, Union, Any, Optional + +from Crypto.PublicKey.RSA import RsaKey + +Buffer = Union[bytes, bytearray, memoryview] + +class PKCS115_Cipher: + def __init__(self, + key: RsaKey, + randfunc: Callable[[int], bytes]) -> None: ... + def can_encrypt(self) -> bool: ... + def can_decrypt(self) -> bool: ... + def encrypt(self, message: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer, sentinel: Buffer) -> bytes: ... + +def new(key: RsaKey, + randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS115_Cipher: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/Salsa20.py b/env/Lib/site-packages/Crypto/Cipher/Salsa20.py new file mode 100644 index 0000000..62d0b29 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/Salsa20.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Salsa20.py : Salsa20 stream cipher (http://cr.yp.to/snuffle.html) +# +# Contributed by Fabrizio Tarizzo . +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, VoidPointer, + SmartPointer, c_size_t, + c_uint8_ptr, is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +_raw_salsa20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._Salsa20", + """ + int Salsa20_stream_init(uint8_t *key, size_t keylen, + uint8_t *nonce, size_t nonce_len, + void **pSalsaState); + int Salsa20_stream_destroy(void *salsaState); + int Salsa20_stream_encrypt(void *salsaState, + const uint8_t in[], + uint8_t out[], size_t len); + """) + + +class Salsa20Cipher: + """Salsa20 cipher object. Do not create it directly. Use :py:func:`new` + instead. + + :var nonce: The nonce with length 8 + :vartype nonce: byte string + """ + + def __init__(self, key, nonce): + """Initialize a Salsa20 cipher object + + See also `new()` at the module level.""" + + if len(key) not in key_size: + raise ValueError("Incorrect key length for Salsa20 (%d bytes)" % len(key)) + + if len(nonce) != 8: + raise ValueError("Incorrect nonce length for Salsa20 (%d bytes)" % + len(nonce)) + + self.nonce = _copy_bytes(None, None, nonce) + + self._state = VoidPointer() + result = _raw_salsa20_lib.Salsa20_stream_init( + c_uint8_ptr(key), + c_size_t(len(key)), + c_uint8_ptr(nonce), + c_size_t(len(nonce)), + self._state.address_of()) + if result: + raise ValueError("Error %d instantiating a Salsa20 cipher") + self._state = SmartPointer(self._state.get(), + _raw_salsa20_lib.Salsa20_stream_destroy) + + self.block_size = 1 + self.key_size = len(key) + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = _raw_salsa20_lib.Salsa20_stream_encrypt( + self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with Salsa20" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + try: + return self.encrypt(ciphertext, output=output) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + +def new(key, nonce=None): + """Create a new Salsa20 cipher + + :keyword key: The secret key to use. It must be 16 or 32 bytes long. + :type key: bytes/bytearray/memoryview + + :keyword nonce: + A value that must never be reused for any other encryption + done with this key. It must be 8 bytes long. + + If not provided, a random byte string will be generated (you can read + it back via the ``nonce`` attribute of the returned object). + :type nonce: bytes/bytearray/memoryview + + :Return: a :class:`Crypto.Cipher.Salsa20.Salsa20Cipher` object + """ + + if nonce is None: + nonce = get_random_bytes(8) + + return Salsa20Cipher(key, nonce) + +# Size of a data block (in bytes) +block_size = 1 + +# Size of a key (in bytes) +key_size = (16, 32) + diff --git a/env/Lib/site-packages/Crypto/Cipher/Salsa20.pyi b/env/Lib/site-packages/Crypto/Cipher/Salsa20.pyi new file mode 100644 index 0000000..9178f0d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/Salsa20.pyi @@ -0,0 +1,27 @@ +from typing import Union, Tuple, Optional, overload + + +Buffer = Union[bytes, bytearray, memoryview] + +class Salsa20Cipher: + nonce: bytes + block_size: int + key_size: int + + def __init__(self, + key: Buffer, + nonce: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + +def new(key: Buffer, nonce: Optional[Buffer] = ...) -> Salsa20Cipher: ... + +block_size: int +key_size: Tuple[int, int] + diff --git a/env/Lib/site-packages/Crypto/Cipher/_ARC4.pyd b/env/Lib/site-packages/Crypto/Cipher/_ARC4.pyd new file mode 100644 index 0000000000000000000000000000000000000000..e5b3486b38d81a7ff18c3fb150e1e2e9a1ff8274 GIT binary patch literal 11264 zcmeHNe{@q-p1*06{w_(e!HPe4XgU;?PH4JXtQ``X>g#OK3I$md)6l*`#x}_$FWR!q z0-M%pc#|33!STnqv%+|GJsxKc&d$M7oii;#+A1hk_YCN$9CgL4-}_#m z1)Z~h?EJB#_niB_KkoN_@ArQ1_x?;4uY8bYGseuwn#Nc^ASE~d{^?JiiLq(32Bxv! z=I@)`Z}RP%-Mls!6C%;@nrNU?XbpryVOdx$3DI~+2!;gj?MsEuaGO+FTwLHVR1fUi z@bfR8Eg8w=Q*ZwL$QjV?cWk3)0gnvZMnC7}wvo3uzjX8y4(+2SIh@YR+uyxxR0G}q zZixYpjp+0xoL&-aT}y3zRgan)7;AeVhjl$WyCowt!LAkxv!-3aY~Wm_bDJ*#B$}~} z=)R6;F_uF(QUOHz{&IlRVrnvO9oH1;zK4=Hu?dKCxNk+%4l0a4ls6Y5m;cb zX~?C>lad=&SL$L+B*!?BsSj5pPfBja1}h`FAnzj>yiMxhq~s=j(O9$f!wtYm1N1V(dhGx;QjsYd!uWM@(`BeP$%inB;cG$$^dWYayjs^r#IpI@|yGGSxP|`DK z?V+iw#x0C(KG5fNxZTg25_MOWgK6!33EyE$F$gk29fVNNpnSXfT0W>Vh+cIzSYpb4 z8=_fE6)yx(ZHV07!T4Ntl`bxz{3^ZNO65liNohXLMh&l^JXtd0y>=~d6jmoQ{lI>H9pFxj3+UuI-bu1y=N8?$pK~cHe;S?3w zQ6nnr9U^=ocgc7Fi#rHw-Gx_CCIK%1Qd|I(Qg!;XM8;bSHZd7sl)7nVV(uD%` zE_9Q7Mo84LlLsz;JMn=@{XV3X z^!PU2h+D~s2ltA~ZZWw+@Fm@4$tH}Cn78~+K0xk9WkNZFA?h8ATNl_rxstIBxvs&( z$IwJ-k(T(>bbo`WSc=el5pFIR_Bxz2I^(69{Shn{%7G*}2!x*BfD5lFe}K!>CqLJ; z^F@zLE-FU!=|u_66u$vVt5D4~M%9kP%BRX(qH-LQByLfE4k!4OVdV{x0_0o}VEp}ue*)7{-@R0y00&*8n*DDP zrlRtSX8#@#%`S6tWFX(nXkH0-iCXzu;w62ULy4m`J1>RLR6E8SozLA#p}21@Vm=!K zq&@>mRJ;hfq~!=0Vv6v@AZDLs9np&A4FXAv7n=%6x`B&e4O^}Phgu!BlmVlm9JX9- zVEvd4OuToHCf7`=Iczpj%jfw@@#B|+pY)l94AUek#Xra}QSmUkq2dstpr3{Mm2|rrE@zi2Lr{5&HK|nQ|ICMx5UBF!pHqkXC z?nIZ7p#2&UuJcWaR__qpA0QJDDWZ64KgjPAiIHEGhKUsvR5meD_e-?R;yIdKK||+P z;=Sl*j>x61!3oj&T=5ntj32Ag*Zl3K@l!M(NbkLR-CT7C)}!&u z)TN-YZ2nAVo2v%LbJd@K*lywtQXMS*b%--8{txT=GP*WEu4gb#m8Ppvoyb*#K*o)I{q^IQ~;HTwB0 zgFgHe5PkkzcUW$x6@T;vnrl{Xy3ewZ7W(_J45~kXnBjp^HKG&w@RtO1{K#gg#nPb9 zA+#WDtTUr3Qx3-0Bu<;||DO5|1_2{k(ffAX0V|JThO3vNF1k)upCme$({d%e>}t1$ zEdg>sVnqAcy20{0eU;b)KqNtg=4$|ZERW%9;vRKJO_NQ+Je!+53U!h^eBD zzEsi6VzSiHSo^wMrhK=^S349fP!{E75BX{bqgV`gRgAyq8WXWWQKXb6);ZV@2rEEb z<^X2#FfB{#{@#309foUNWAIC1&)e}_@rU+3w3cyK>M|SaUI2$}J}8@e#^OclNmRgo z`d9#2=(#zw2&NA4QZ96U zA;erysZ>BqA?YMUpnWErtL`14LM)a|VzQT-AwxLR)Bh;7%J+)iKC&}Sd#<9_z+T>h z9lptq=ml)1kk#x~=)^&Qb{XiS%_P+@Vci0UcpV}&`U^OMT61IF0*dG>g6H#6Vz&Bw zzKuWO*iRyxc9Y9iA)wEli3BlqiK)7c0OQkf+9C&F39O;6B+_|e?R(L~`b?$y zsgDDn@u+*3Ubp#zx@W1mFR8l=0ZeV5Up8#6LD#5tn^zfXaJ}tSUh!<22+2~1XVdXp zrBK|fj42bYQBSJg)KEBL-R9YJA{deylV(Rl;mgotR!+n7dC6u+QM^e^6#o==C#`&9 zp=PZc%rP;!qOK(;i^;R=A_Qhywx2$WGnH_^L)>Y?=1cw+t-lVMYf`KuR^C5bEa+Uzz6xUf`-B&)SZhe!xjw}T+;4M z7M}%b9r&o!ns`WOd<4b|a4t(;c}Oq2Jk||~D`GLIH|-aP-WSBe{UUlL(BVP2I(<_* zey3vj4WttzdBc{iKq$zQmVW}~IhhvZn#71OZ0Q3D*Q4M8VEvHV@4e=LxAs81jB9cn zy7^GNI8nR?rS9aFs{m+ZE9fvR<0PHfZS&K5$G1KpuE7PCCJ15cAojVWl`lg3T6quu zKES_y{QENhKE=QP%)d|b?=$?nn}2`JzuWQMZc3vKeP7h<%fW#qe5X9u6i1Ah>2Q?s zTTLUb49XE42I&gaGk^|@y`$Ei=YR}k!R;7anmE&-PfX(WFoKYdmwHm3Eb1*bn+|=m zxhIXFquY|xqof;^QO^JknspZI!)$9bX@j2HUD1!*8;p zT_#ks?kII;|G|R=b;lXk*dpr=v$d~Yqli8A-?#LctzPA=M%?b=xC5Xaa`aRSIISNw z&YLy0_xgOroI(F-zs21S25_4iTw}mYu&z-#?o&=I?j4m^QjcsN<;?l}D4FBYcA?gR z2J4QZ!bK@_A#fO5=;<>Tdi#nBJw*7jM*xS=r@Tc~r8h3>E6s%-vG$AT@5p=`c}~!S zrJ|2FOXin(`*a8Kb355iV|sBqZ;y89;?li)CzY2EpiAy$wCiG|H!pV6{;}%tqo;AS zpTri_2f%rd7pBh_=)xbJ$_V3%h-fh>a3L0KnpeJ5@%jl(^C=AxWf{)7i(AwRh?6EK z7DzO~Yq&-?)nGmiTDJ@-L*OH z1BMLPXTYZnIAFkxF@H4ZPYhU!Q}*O^lR-N#;NND@cN;Kdz&-;mGTPVx#?PU4KiKxkZKTZe~y%Q zfnDd?n62AwGvGOX2Kh7DlTrV_g?Wa3Wd;-s=)6IlhMf)EJww=zg?myW*z#n^2AbjvE8&erxdz(LdHxUzl;t`7Ac z5^+xDtt4K}6ka2jb5G@QIdKZFiOV%j<#D+cQ+Ul>t_3`F)v%?R%SFI@&;*|Go=dSs zF{aawb?er(twuMPyWHqURBm-NdbmDu3U4LX*EE^etjn#K!qeqiz+==-m;UaXdH1fA zj0fWi{`8&kZ55~a3jQs7{4Ul<{cH{b*hoem8 z>R615nOlEf&%8s^yL$`R{Jq)XoDY<>fJcX5gU4+m{+~*4Ei19-F}s}4>}|QsUY*12 zRV9Z`;${F7*^3yE>q;1pEzhkk-4oPe_6V^e7!D{Vt=&Sj-&qqNEvG1)Ef-2k(j`dJRi zaDU{mf~uT|ZVzm!wlM4T!U*|-?59L9(@!qfoCi7BURBsK+mgjBat>?3h4n?`qu=7w zcM5GP$z>&N7FN=1W+m0xtmF`6cs=~8gFE$)k%c)%y-75+ok_Hy6TTgCWDkuU$W$|nWi!8LNqu!?q`Qp~;*ZHuDbVR}lYoS~r|_gu zYqUEePvHeaL76pmHv;&%CVl>pdZbrr4;FBTC{nN}d=xAQ8sOP2ONEuo@H> z_61i*1JUmKKu1T6g|jJOgMp6VI%%md7?VY)W4*A}E3J;NStCU=R{nLvlG_@5uBytm zjt<(d{Qi1>EF!fA+k>rs-Ffg^r-)(V@#t+W%hM)iDhyb{h;cX zx}?^)EcvGt%s_i2$5(7@9rNeSpJ(*zf^z&~FJoAyhbxgR(|ydQA&KrSE|U>;1kTpnNS3d_X$>od;oRv zM#1k5hk(=Fl+O82qQ3;`4nR0TI=>T6kj~oIL!O|D{3GDsq;EW+d%(o+*O&?$r+S3$ z9{dV@5g-2uU?R%sZv4>HN}RfRVOM8|aGw;7;ka5iXMUw~j)0p^DXmg z<_Iwv=h{FAjxKd`x~15hh2JSIxH%AuNu8@Zx&q7qoJE(t!t&uKx|%Tur(Tv zh1=zMt>Mm_0+&^C43#I~=sfBwQd|350C13mx% literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.py b/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.py new file mode 100644 index 0000000..a844fae --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.py @@ -0,0 +1,131 @@ +# =================================================================== +# +# Copyright (c) 2019, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, c_size_t, + c_uint8_ptr, c_uint) + +_raw_blowfish_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_eksblowfish", + """ + int EKSBlowfish_start_operation(const uint8_t key[], + size_t key_len, + const uint8_t salt[16], + size_t salt_len, + unsigned cost, + unsigned invert, + void **pResult); + int EKSBlowfish_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int EKSBlowfish_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int EKSBlowfish_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a smart pointer to + a low-level base cipher. It will absorb named parameters in + the process.""" + + try: + key = dict_parameters.pop("key") + salt = dict_parameters.pop("salt") + cost = dict_parameters.pop("cost") + except KeyError as e: + raise TypeError("Missing EKSBlowfish parameter: " + str(e)) + invert = dict_parameters.pop("invert", True) + + if len(key) not in key_size: + raise ValueError("Incorrect EKSBlowfish key length (%d bytes)" % len(key)) + + start_operation = _raw_blowfish_lib.EKSBlowfish_start_operation + stop_operation = _raw_blowfish_lib.EKSBlowfish_stop_operation + + void_p = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + c_uint8_ptr(salt), + c_size_t(len(salt)), + c_uint(cost), + c_uint(int(invert)), + void_p.address_of()) + if result: + raise ValueError("Error %X while instantiating the EKSBlowfish cipher" + % result) + return SmartPointer(void_p.get(), stop_operation) + + +def new(key, mode, salt, cost, invert): + """Create a new EKSBlowfish cipher + + Args: + + key (bytes, bytearray, memoryview): + The secret key to use in the symmetric cipher. + Its length can vary from 0 to 72 bytes. + + mode (one of the supported ``MODE_*`` constants): + The chaining mode to use for encryption or decryption. + + salt (bytes, bytearray, memoryview): + The salt that bcrypt uses to thwart rainbow table attacks + + cost (integer): + The complexity factor in bcrypt + + invert (bool): + If ``False``, in the inner loop use ``ExpandKey`` first over the salt + and then over the key, as defined in + the `original bcrypt specification `_. + If ``True``, reverse the order, as in the first implementation of + `bcrypt` in OpenBSD. + + :Return: an EKSBlowfish object + """ + + kwargs = { 'salt':salt, 'cost':cost, 'invert':invert } + return _create_cipher(sys.modules[__name__], key, mode, **kwargs) + + +MODE_ECB = 1 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(0, 72 + 1) diff --git a/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.pyi b/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.pyi new file mode 100644 index 0000000..95db379 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_EKSBlowfish.pyi @@ -0,0 +1,15 @@ +from typing import Union, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode + +MODE_ECB: int + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: int, + salt: Buffer, + cost: int) -> EcbMode: ... + +block_size: int +key_size: Iterable[int] diff --git a/env/Lib/site-packages/Crypto/Cipher/_Salsa20.pyd b/env/Lib/site-packages/Crypto/Cipher/_Salsa20.pyd new file mode 100644 index 0000000000000000000000000000000000000000..e492d932d163a1983d9c839add6f87ac67aa7a17 GIT binary patch literal 13824 zcmeHO4|G)3nZJ{nNhTy=2AIJSQ64%-REQyyYLKMNn@seD4mv_WR>dSF6Nu)|bmk2P zR#)gGT_3m8;+AgJb-RMyw%cQSsOwQs_jE#l1QiYFwxPl?r`q;mSR36Q#MV0d`|f)~ z66|(+&Th}y*1nVbzVCkbyWjo3@80`;_q};pdHYUgWsKQSvRTIV0n!x^_y6H$VQl)m z=cco#3y;p|7YA;sd|8{^HLQcH7JSGOu{h)VHfm(<=Rg>G3bb#}K#%SuY7 z`ApF_9(?A@dH+0eDp!h5eeP5~>My*0;v@r{{mO}x7SVp<)RlsM>SUpSCr?fj@D0)a z=#Pg_T2X)W$0H_Oc*>~%MATQcw``&@F0?38$5`tZ^Vyas=Qrir#@XdkQQq{qYz`=w z8PtYZfG&Y=gV_e2$5_4~O+dB{1ac;>Jk|tOqf-uivTjfURg4Xxk7FtHq0UW=og)}w zER1o^UopnWcbO%O%{@>2Br=NjDxs*^Rt`A1enFQqHQJ+s*5k#ER3_U^JAjExf>vem z)@HStv5FFqNal1D(?`I>6~OW-GuW6y4gvwrU~Cb}q$>b3%3=niurY#08nbK?6VL$3 ziznhOz=cnmfRMZjlu1`W@I^bi!AL%0F%;sfq{_qs~8Ij>hZLRnVw z-nERqpPZ@m_q*CY=+9=~Pv*Tpp!6Sco&Gu^1^AWJnbN-F>eAFZ7PXXavs1sesMGe* z@={UkhuuEva%xHH~`#VJ>bRX6ZEula~ zcE?Nl@d9AI+2nHF>wqLaqNJ*r>QnfTJ`YV8)Enl9pQ8@-{1t^;!@T6X^B9w}$FlZg zSk}TvV|o6J!teBzD_r$eDf~X40$)h-T?6|egy}EizLWe+ez^_KC=cxTklPiN}T#vUl}XHLJ1A=1HY>YU}?I)&SdVZ8!3Pa6#R%4v4S+*!}p5it0O1_~BJ z?^B?{Yy8J>nf~~1v)PHJuTO3&!Rj-bQdvv#ItZ;mH~(?!_8wfW^qs}3(hq-}&0^rM zVhjwdPS5dVJrYQLS=FA(T9bkEBbH4XQN0R1|H|l@EKIMnU zneM@S!}x;zv=onCg*9)*1nJ*JP2nLdI?etL2uhmpR0g}xzLo0S{u%+z9zv`p&H!)) zq(S>-pwOs;b}ulR%0c_(Cbkc|fhqT7Xmib>o`W_EjXXi;C68VMx)!!cIif}3CHLou z=y)1t=s1LAVOJITF|c|qIWVB`6IsvdOVFMFE5(0*whntB-Qa7253?SbT43x_CDrPy zf(l{ZCWWIl%!kM=<(NJq#;8*2KSJ9|%6ev^TXry&&AaahG z`)UY(zGBatiyt9XmklIUB^C8zl$GABXSPtP&Z}s3uB?Zl#xu$i!p|Uo0#C1$jQZD1?g9y;-45wafbTg{$^4ED@9R07)EskRWbAH2cTjp zEh?9is(Auqvt)kO!!giABz_rwJ%MFQAH& za`^P08%Q3;Djg6pU;j2jF&sTVs&Lx^O8yF09AkE6*^uh?XT}xh4@!1HVDjAxBj)e5 zjD0}+f#lv}^exbzK|C6}SYM4g!sg!_WLrgMtU&(?Dtj$rKS^p7xqEH1ME(IR*O72!s0lU^6}7 z)?-Gc5dMk+9eZa7#3D2pdk7;)+iGm+%J_-o#?*(Fd+*jyVG=NtrG0NCebDk<>~Q@e z^o8l9^;xPHh`L$^FZBE7l)v>HrdTFiY4B{T zc^(wDiQ zBU0akz8KNQUpji#e&9?$gpCKa(<{g2eZ!QzZwHyGSon9;{x#23oRNj}tO+5TGmrd6Z1jb>q%ma7f6f(4gM{KaCbZ z+yaW#R}9ZP(u!68p-AJ8`G(;uW&1&L*?KJK^JgMSNnct(;Wu4Fad&JcPFv&vgup5? zB{kqsmc1E2ZR}LqpT<0h9gn_mARMAE#pTI;KO&8ON8bZjz%=$m*l_tX{v*ykA>Lo- ze3UD1$pkG%c;_O58XX7kk*y$Crr{vkZ? z&>DQj$%v9F`Kk{NTQI&n>#WJ-TbNo})0Cgb)cG|r0(0zpKl})1D(POIvfqN_Oa4`y zPxL3;A#RT$Bgm;COGvXvfsPe`b|;X1i193xR(`W={JtwmSBxIwbZW8X#&$mUR$*dV zT~R;!UWG1$_Usi#at~=G9|3hf_Y1c(^$-ctN`3*t^9ZL*yL8lOyCks%9CH&1h_?(Y z{XdnIqG1JANpyISE+4pIAbA_NKLzg8kYmuk8;F$z*#8Nbe0D%mt5QSKp#4EqF~w;3 zBCvhn8V+4C5?VHr^a@G7(dyA;NvdQcS_9gp>j7wHOX)DI;v}6q=xU^RCsH2}e`cvY z0w$ymBA>sq3xar(&o1Z{_b-b3gW~?8xPMdJza{S95%=$k`$2L4zPRtjeXnHzV<0c` zgJDe(=VQe@AY+3=e}+{NhO5`D>^tJ@eas4dwRPv}aPHGKjVoGASG1ndu+}sZ);0}? zwb)Qt>&ZBu*}e^3sb3Qw$wG`(2b_JcVWS-u4UAj(FgA~y%eD~D0P}L$5#mF@=5o1M z9FHX~yZAX!d|Y<(aUi8!_J#zuh|6SR1jAqXxEu zB^cM{ldXqDmmz`o3x4954%U!g$11d0K`ro)2y{R&pM&UmVD%4S=%s>l*iba&&(PR5 zFt`l4kYyp(5wI2sb04yNl2d0@Jpm`Lz)p`^?Yk!A&Ah^qwGC?w1$E*)Hp zO#R4$147)0%nu0P%2q?mVMv_={+yu)&}9kiNx{7zJNtGcLc%7u^#FErvBJOP%fRQF z901WPTQiy*0ybB+_G_}(L)HOJ4uj;g4r+1)NU8O(CN}_CBonp{Sh-A%cLJ-Ftp_!^ z30Rf&kS4bRsfM#Pc@wZ=D^`XkcK{7R-7vP|I!%tj0yHRLOjVO6tC$j0!wpm3`)7m$L(6pE#dsa0C?I>l=asg zA zMz$ZMT{|UMAZpP0EbH0q6f8K$eWE@0+QJ(KCdnebgb|V=EBQIR!%+B#EAd>~`vDC` zk>Fh*^!}swXGBQ{$CZEiB@(_`u#W=k*fo@7%5j3Qy3m~Zn9`hsP-keQKJ}qPl|)oG z&!6V&g8&U^yhCUlc&-<6kR&JpAD)gE5n8^2cQ44T_H;0B>=4pq`WBa##|yNejo)O` zf)0L@Lkkx3n~Jrdi`RnUmZ{Y%i{`m-juwjcFr_THFT&?)=ssZN+;ilKn9Va(u?~6c$ zzxsi2i$4!6h;TN4&;|^V$KelBG(wCk_6I2hK`oa*=mG}S-2R{&7}WFngI-_=a&!H` zxxf(QB!5r>h8XAb2YtX0<4XO(QecQ~i~K=~f?-5Jf3O@_1QF05q#)RU*yj&Y_*;kA zx8DKX{K0B^CM9`y>YpJ(MnO<)Qv5IL-#SANRR?pLcpKJ9R zylH8W*T`$dqFNW?aCJXlfxv%^Z*}uHLV6s*d@I8E1|Pp0q5JMdAwDex36&v@Qax0~ z8H2At$UZ^jD!QRR6i39sk=YK1o$LWJhn}2l$Oi~%j$BkMG-4-$p%YO~ zL|&T@3{@eFN8^eRNg*1k1M${O9fcxcTrRp$>X=FK7WqZw1j7*Ye;obI)G%UJMO-cM zkE73J)JHdm5h;WcgfXfaeJQ$MCI-==Q<=~ugRVtF7i1`d8*S5s9hAWoBjiJ`i% ztiV4mG6s^LnK6)eAe=IkNEnNRMBU67U^@p<*NUM>fmP>pC6f@>$+MzI=DhyMSwu5D zlZT#5ruso-4ntoh)!|bt&b|b)$N721+$emPh=@E72}9NxBDMpWN!9{3pvCYVPzO+# zz>0yni80^;=Cuw2a|4@eJq*kXOhQfpIu|HX9JNb8OJ(appgy3BtcQS=0z=XvbP)_g zlBfh)4k{&#D&Upo^r2dfYE^EUus~6*P6b6i3Hq?X*2#_XUGjSQ&c?gei^t!Ijb&nV zW}AF*oZ}{6JWrcK$<-+jti+~Q_>1-Ym|lXZF!r*U69TL2I8LfRkyCG-3-crLS!N6K zW9lrK2OKL~BDN1k!q#I-%TV}=WA!ZqE4bUG@LCsMp8z<{hAdX6TI75NFf}?Jwho2; z80N?wr#toE88s)~*Jnp3YoEg>wJ*3DuL1*W=q-T4-`TM;K+m)5Pdna&;7MdjKRcey z?i7s!6AcFQZOBjt4X>=W;dc#5<#=Gj z5Tb|w$o48be(!Pa>gWBS!+(Fd{vUWCc7vfruL-|l!lz95iV4q{kS#O#-6kwI;R+Kr znQ*fScbf1?6CN;O&h)p<`bQ?5Tb*k$V9=~rT|i%J)>}=u*@QbyxWa_5nDFN&e9DBm zQA#gi+mNkT4rUe__0bDpj{pA(Mgqn-<0j0>#VKjZ743;NceP6Xip3kYs8Q)2v%cgy zvrheTzg<&M3gB6Bn;&JqA+d^#`y*VA3-mre#1g|UmkH0)bI2b_pPcwV6go`(ye5=P zSbn3S{~s#nf6I4MZ9|xTZ{LfbZNzWzzRoU^xpyIB*8%2ezVdSRbI|_oJgvw0{T==+ zYp37PeRSqz48QxA5>I(~OWQ`^sMG1RtacS%UDwLQQa+V-JJG79(CP(WU@A@UDN|?> z!55iI6MXBY&>93^6KF8i)TKf2#fXMAoBOVS)-J7Kqwd?fb!+Pen87lzPS_DwTYUAh zkT*1icDs-_Jek&D@QqBN8GNIlHPJXd#`g!g?=Q-tMxDOHh>)&^Z!3VZb)b1CXwVd& z7=T7^<4ivBr2)m%10>t%OVbFW#2?Fj7ZE_+fhWGQ4T%Ib=f3mEeGkE!pdZNMy9Xc5 z80sKAJV6sW5&idFP>s#-IG9H*WS-Un=BdnQo{AZxXXhHfYrlXAxn>3vYm4N38TccU z>K43y%&*9c%)rmc(<&Wo+N%5*^~Gyq_8R!!NBJ$*C0#?nr_E$+HZ4G0vw&ZQl53~> zyh-gAn}yke_*HREKAThNWOL%PN8PbmO(dW6UIX4`vlzP?@(2oDR^>%3&V1&CEl$|t ztSE|E3N5T~1KJ%n=7`&3)~392Q}1$1iH((17P69x{74~0nkJ+JX0G%E}c{9LZg;z&_BY$9I)(nSf2;Giqz_xMP+vas;HS`C~ zg}t4-#zA`=w9-j5R}CHvOuyv#Cc{trdF$V8tJIF#=9rp{M{B$Bg9*MmJ4v)~clSNX z*p11q7PY;*tFB9pZ(-~$YFXPJSCh>hYr5`j?`lORu&?Rbgfme`w6(6MB^o348{0e7 zXdGXl*jGo@|F&7myD6&HCgbsFml}z8w?q+ozhQ5P#yi`)n$>8I#J*&$PnebN_>IwK z(lru~CeV$ovfY}D#GiE_!6a(KCalg@5ZQkcb@i5n z8trUo?~KX`7!W0lEwF^z6U`e?6U^cE4e{pqmfGfyjs)wr(k5tc?r7f{T^(*us0zff zK4=??Zb)w27>(z&{9N6to9n{TaQo*cOjQ>9hhF z?J8W+$*!Nk8>4JFYfMBpHMWUKY9y7s4EwgLt$iar+t}3{k9XhO*c{)u886L@o`#XR zrfApZ_IP(!Crto8<`d(X9Nk^f9s~xJT*lbttTEaXZAq%p#wiW6F&^T@YOUK(@&t`L9xa4BF1(qRO zuxlmL#B&BD`d_X;9}l=(cw|7K^Kd0#FUoy5u@Za~=ef5*e+*EY2dp6hfqd<6MV$Y*g@cr0hW01<2i5_;0q|DzzM#ILSq%rW^5q};iZ7hCf*A8 zu!$4=o{8@T95Hc%|6$_efDt@LxpDR(xDAD565M6t1W%!mo}+;Gcn!P*&~`CnXFw-d zib6aDZ#D5&!0*oi?Rq?)0n#^KRlo`2{ei$20UpKqG6eeMvm2f13UKD>0G;3rl*fS6 zd1wjBlf)00JG&8l0c8kug2z!#0w>slbJQ8&1Upd}&P{Z-qF>Hkz;^?)^o&X8bqV|g=`2q;K{|&MPLR&bPXZ^{gHnvX zf4TnDJzx>fw_Yr#sg}^0TpWo%LB|)NOhH>y6V@)^g;(|Rp3V+wb2Og7F|B6dlCtuJ z63*e>t?gYKYZk6)xN&jSLMfr*JlEWTlwGrMOEj@?`DaU}RW~OR(asGWTO base_cipher_state +# - AES_encrypt(base_cipher_state, in, out, length) +# - AES_decrypt(base_cipher_state, in, out, length) +# - AES_stop_operation(base_cipher_state) +# +# Where base_cipher_state is AES_State, a struct with BlockBase (set of +# pointers to encrypt/decrypt/stop) followed by cipher-specific data. +# +# The API of #2 is (replace "CBC" with the name of the actual mode): +# - CBC_start_operation(base_cipher_state) --> mode_state +# - CBC_encrypt(mode_state, in, out, length) +# - CBC_decrypt(mode_state, in, out, length) +# - CBC_stop_operation(mode_state) +# +# where mode_state is a a pointer to base_cipher_state plus mode-specific data. + +import os + +from Crypto.Cipher._mode_ecb import _create_ecb_cipher +from Crypto.Cipher._mode_cbc import _create_cbc_cipher +from Crypto.Cipher._mode_cfb import _create_cfb_cipher +from Crypto.Cipher._mode_ofb import _create_ofb_cipher +from Crypto.Cipher._mode_ctr import _create_ctr_cipher +from Crypto.Cipher._mode_openpgp import _create_openpgp_cipher +from Crypto.Cipher._mode_ccm import _create_ccm_cipher +from Crypto.Cipher._mode_eax import _create_eax_cipher +from Crypto.Cipher._mode_siv import _create_siv_cipher +from Crypto.Cipher._mode_gcm import _create_gcm_cipher +from Crypto.Cipher._mode_ocb import _create_ocb_cipher + +_modes = { 1:_create_ecb_cipher, + 2:_create_cbc_cipher, + 3:_create_cfb_cipher, + 5:_create_ofb_cipher, + 6:_create_ctr_cipher, + 7:_create_openpgp_cipher, + 9:_create_eax_cipher + } + +_extra_modes = { 8:_create_ccm_cipher, + 10:_create_siv_cipher, + 11:_create_gcm_cipher, + 12:_create_ocb_cipher + } + +def _create_cipher(factory, key, mode, *args, **kwargs): + + kwargs["key"] = key + + modes = dict(_modes) + if kwargs.pop("add_aes_modes", False): + modes.update(_extra_modes) + if not mode in modes: + raise ValueError("Mode not supported") + + if args: + if mode in (8, 9, 10, 11, 12): + if len(args) > 1: + raise TypeError("Too many arguments for this mode") + kwargs["nonce"] = args[0] + elif mode in (2, 3, 5, 7): + if len(args) > 1: + raise TypeError("Too many arguments for this mode") + kwargs["IV"] = args[0] + elif mode == 6: + if len(args) > 0: + raise TypeError("Too many arguments for this mode") + elif mode == 1: + raise TypeError("IV is not meaningful for the ECB mode") + + return modes[mode](factory, **kwargs) diff --git a/env/Lib/site-packages/Crypto/Cipher/__init__.pyi b/env/Lib/site-packages/Crypto/Cipher/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/env/Lib/site-packages/Crypto/Cipher/_chacha20.pyd b/env/Lib/site-packages/Crypto/Cipher/_chacha20.pyd new file mode 100644 index 0000000000000000000000000000000000000000..2184e4bd4d2fe337e4b1af0d7ebfd184a4b7e035 GIT binary patch literal 13312 zcmeHNeRLDom4C7%Ti7xZAQ3@G7!nao0Y_MFV*&&Xmh5pNf&(UD^JN=b1~;~>N~3^j zvVkKzWq46bTGBSzY!_&|?e?TSw40NfBt4Zd!8QpExZN!T*m#qE$T*udv`ZkRtNq=X zkw3`pwtsB@*rs#z=6<~U?z`{4_wKthqc!*MVMfN72_>6ltOt-TpL+iTHv?l+=e|6Z zy*TCgydFdF_`JF;tuY}I?bsY`Y!{ju!{H8D*c1|?@vzVu7W`}13+)}vq0-{wLbtBE z>c~r<&->=7bGcG9Y)Q=1)$aHK_Hc&)uT(&z+s3!n0=!Rd`0Nf9~fe z&Kf~K_w!RaTyRdK-%{yytxa2~k84L%8(^&Y$$ZxN!n}rD-6*?BD9W2Uhq=JHQRCLl z0JN+44h&nv^BBulS>urH0E3)~D~~lmRBMz2AEhpXxH>1&#l%AsvCcqP~2MkdqP^>F}GF9BASMw=Vu zM#cuMV3E$LDAyR`!}2NB#F#`50t3!q>=P&xE+1p5(ugLgj<&fFSvZMNmktjEM2e$Jpv|>f?>Qp8pCCAFgV37pl@HCFL=xMP=rJB_*AA3j~T+0c7l0!QH9&!puXOxPbZ_wR#)XpD!Y%J>}XG@|9(Gzn&Q^ zz!K}q##d!r7HHz9q(lXi-4gH5%taMCbp`p+mubz_@SjNB805v@nah}%J()G1EJb)F z14r__DTzPm_DEcIS4jLZw*+4Z;={d%poC@Kz z4^2Gvz1e=nybTqZ2QW<4GeTlzJ{cXqEz^OKWeREh26!Sd|9vmiV;Du~z&q&M{bpu9 zuuG?|Ka+UZkogRxdGE*p&4_9;V$T~AKO!j`g`nbdDYcj%$#U;~>IC^}_$dDnQ`9{e zw=Hx2b_ruo6nIm^@1u+4>TKe&VQWC*W-CT7!Oewzez!**yd&%U3W6#>rGSHl(Dehf zfY49&cQ)a(LWE_GkRs>&Sw&ukVQe+hk{ zV|i*$ChHWy>dwmMMAjJhjrCYMp+|WkTK<*RGCn1_(SRB7j5u<#S?9SMayE?Qd(TiU z@1?2ShS~6m57$53o0~UvJ=5d~HT*JvOX3%>N#f?r-@pk$-p79`(E=G;1epK&;V-~+ zwD+yoHo$4`K-T$L1Xqc_o^^g2NY*KA6DjOI^LC=S`KJUFvmY^-Bz?dou=>n5 zf>E&EIc>Uo^ci=SHrz7jMEIi^A4#gFHhXgG{v zWRQepF|G>oqi_Aas>&Y7xF0hgo%>{%xR#Z%H!!rz;%;>k6FM*ocHfPiLp zDLo`eUf|wA; zklshMwgs7&5RXP?W!8g6*!;4_HkGAD3Nl{+anPW4NNo`OS7MzZ_&=lR%jtRoa$Tu7 zHJYA@YD7V%70AeY6WV9)11GZ%H=RlCDW*zF8=W<>+of;ND-_JIM~5><5CjofW+r}T zh)+{Phs2Ldyf5o~XBLgO&b^Tb-L?A8ZKLolW0R=cm)fdheL__t+u`iJh|M&h;U9yS zIi_hhXMU)P!m;`|zpK;3-vFZRf7?suwG{COUZK5a^Y`vDucAQzD8gXoafs<2aAYDH zF$Mk#0UbHN8)^|6v^|6#gdHnQXv+BM_~yhF!`4q{&S4TTlO^3};%->^K6ZHKI<&>m z$?8i)7pSyc3NL$GY#ruCazLU#`y1O6<{v=-Yw;)mkpw}$32@Z>Eba!?<1pb6l;8}; zOS8Rg{h`ko?0kd|elD_$2|blCPo z(IF-8aY@D#K=Y-fbw@C1Jtip*cg^yXvWwqo4K7bd3;Ak`F&$iuE*W{-J1F6R zqLt!EYUrKpmjih_kHMc=ZzTo!#4ny4e9wjLJv=*r! zL0j}_;;$cHcnCPlyoQYjvy*E^#hw#%c;62;ky+>6GlB8H%|J{_K646{I2sTPv(6P% z13fEpK`=R_)~Z6|fe?E=nV|{_g`|@ZfzBDDs=B+MDiJJeC8e9XAwyKA*z;BDRXr=Z zcafdFbmnrc1Rhi%ZUUjdBf)r(xl|d9RMTKk!_I!5CSV` zD2ZN+wEXSpu(nfae`@of?s&9)KxW*K2l=RXKulH|0!96{1LDq$t>I9OVsZzHjzN!!UxDW>N}byp zuay$TUvk4?^G8=@Z7Wmx1}2xRY{<`J^1PK10<+BruUth=C2Vy|hYUD;$-k2A>*<)o z&&?5>2x6k&;8)Bcpd$s~JqV-+F`oI8((jg!K6VS)ir)R4vKCXWZ`;f7PKgx;iqdGi z5Ka2b*_*Y)-LDj11!{Zw*A83aX;P#V{~C-}5KftL!;n^YeXJ7_b7C>5H=K~t?+8-S z2??VTD0vWW>b<=;ejhjg0MdzmOP_f^5F;rt{~a*#Qm-IaB>IIu^DdB>VpKc{tOrsj z{5PNSFFzG`shZqFD~965iQ>(u^(i+z1VA%eLdmd+x z?6E65e>I-uv-7*u`;+Q@mwG>@-oK&V|6aX+TfKiry&qBU-&5}gaX)D2MISg9CqK4C zkU@R_CfMdh+KfGc+%L#4BKecL2W(v>m^OqXTi0A5^bpiF6UfVZFm6eC-n|15vr9Xt z7+uig1<8zn)5k*FB8`;u2z@?NxLNRGB>`JA;s~k$Dn^(D%$`W2RN2y#alDtP$8Nmt5n+o>Xd2+z(%Fo1@NF!JqP=7I^qcCNgJVr z^Rq$nzL9zQLN*&rKIo3r@R+NHf3~ECzl&#}9`wdWPHIm|gJTn?BkCTcJxrdio;U7k zkzYZc2sg*gNzpKJr^IUzZp6#dKOp92b2xMGQ$mG$K#CL9j4Rk!-# z4Gb`|920;zRH%Gb8Zq}2!Im+Dhei>-F$#))Sa1<_!ChmZYTpJmZzT)ADU4fK!-tV( zNc=<1PFvT0(nHIEBCEO_zCau<`lLk0GTHFQ*K`la9bA+WmwoYRs>Nh&5)EK&Jzp;! z@=@bO$9f1ZI_&4KNy~@*w#t+@6+m2c;}PGFdAv(9_GeSJmx8l`Wi~0>w`;1DJu2eB z{3UdVNM9Fa#H0ypMvCwCrr-ivkYg@5_wSkuw6g#%c$k9ngp2k*VkAP_dU!#s6E}$W ziZovpHoGB+vEy_Q3Zc@rFwbVMvD+5jX}8%$!M1R<0NMi@i?X^zi7ftat#o^SxSgy4Qj@>+mPbXP^s{XgnS8 zzVG+;LI{E>yU0)Yxn`%|cv$3@Memh#pVjy_f8BeGJKX8C1x|ETC9MWOHj>3}OsNtD zBJUTyZz|fbR2#jA(<$rf4Sx#lL- zwGAn2*{NA+R`!A|1;43jXX9n!^ zAPUkf!tLa$BUYF#CKp@%-q&dAAdO*_+XLQTk;A1#n_Dp0x+{<>AgI{kHpE`f^bzs7Y3v0!jy-@n3N7fMh_q&$EV~LxVX1@1X9k zHH+Q`V?{vOY!{V!A)sLCDQjJ#vegn$sw+gL!zwEGEw;_YqN_7r#jHI8Rm@m>RZP}k zwVI4l(r2$!>J|q5M&D?a;$IkC4IvA4$oM z;1l}-5~)Wlu*k)@TjO)YM8IPhv8o_G6`YK8P)=J{f46 zmYzQ-Xj%|75z|U~vrYy5BU}?PVPos)cn+k!rmI2BRo~R92i#Hw3#t3egp+OxI5kLI zY~6Uk#VPj~z4dww>0q)Nq4_rm&9?3{>R7T4Th~coVzM?OCZ&JPIxR4yM#;z9HJpR- zSxGUEnxg%K6CS&8VrYk>elC9GP!!ceQB)5_Q9Tqz^-vVmLs3)@MNvHz#l=!0Ymv)v zx5{&Hx5q2FQ4u}7(xiyxywakG6}-|q;vz0+a6wx}EJOnXy#k(7I(>bjdUOuog#oH3 z&#*`sRTV+$gJKH6YnBJ84xcqQ;?sOXNcTqS<_T&`ruU1cv5QW58iyTrm6Q%?yyT7>4jtg zuw3#Rj5fK*_7Y%XXf$Z-4|>tfzyrfS8dl$6O|+2k@%PKwp=;}|sh__ci`KUR_6rg} zzk3bNLcHr3-gps;C$NP~6*zm;%HHt`P54_ls5KrI%Xx_#xOya&P+l5L7O#MjK^}mq|>GqdVf0f= z1Kt^^-$32%VXAAmg|SZo=6G(8hkY5mFOKoHY2P0)-qKe3hUxw!wPX11T0$}&PgBcg z;GpT0FRfgMSBI4%iFhXS?k8TwBwmdw=bOw^<)lfxT2-!gGEbG;IEh!M$~Az8q3X8O zsd5qU_87p^zw6T4qBW+`?(N&RH*dl)m=B{<#}SpA+%=-APnyKLU)5JTkyoe5ZJfl@ z`ouJ|e@WYILHt}W8@dEn1ZpbdEalV6rsI}M-l3d=35 za9w_c+Tt}G`zhouqI?(YlCFN>h12m0R;OnGzY!%@PxRag^#+rHnO5Vg&g^_PyWGZR zM`sQ>A~PCDKiRzjvdd>M_7?OJRBc(8S8K54GaJTY!&q!(MG*s%yeXSdZ!s}T)D$r` zbnbXk2@<~SZM?Nbo%a3UGz?O0|vrR9GkT1x7x(Lpio6qd>3}$bh#_WUE zp;Pm1I9Eu| zL3{)yKdeJt6N-$|jq)Xw{V1d3qld_R#**Th|#)uq`hAKPoLL2`II!nA@N5>=aNL4)CB)4{i z17SJZ$=JJ8b8l-@jyJY#2ybl-HzQ>>ZwPN`3^%uhngiRKLJ<20~YonnUnz41J zd*Y#JXKg6j(h+TM3^!pEaSS8M=6zIqO*5N6fm^pF8ft6?ll_Lc>pNp|sJ*VWJtW33 zfDmD9zQNxbYup4%6%Mv;iZ(_&D;wL|VyweRE5Eg|t#x~7eXunqOHjwUVXZ&3DZY7g zD4MhKuE4sx13_(f}ovZcrmG!YmsHwH3wW(fn9=tUl{FvNaFLy>l^{wHSj@%Ba zZ-a8i?qKbq_E-qmy_M@W++A0*Ca|>Jqv^bkHNmEiCi){+OEeUsYz8`7Ww@f9-8PQb zhuA7s9}8`%Z&4?yo=kFKPj`e{S~tV9_2I^7v}0?1V|4SQlzOUKv_a;oLg7bSqaERP zngCWlNunbh+J7D z+LkU^xHTC#sp^RynB$C_8e}R952aZ1^47U6%o6Y^hA$L~LIe}A#-74rxE^CnZ zf4TmAJYcuukp+eFailq{3*{3pnzQP+rSClgIv&rT#5b@`!q^9@V$Wj zC>wzjeAa<{1NifR#ZLU?3Ai2bRg?zc1m8xXzN|ADTYy4%31Fj+Hv@iE#|eH{#}5LY z(s6?SrsJc4wfOtf-cM<82XN9!aIcONJcmMd4go&m((pDw(=5i?z$aLOLNWyJ(eY-$ z@6CoEz&{8`|MBSoP7v=8RD3buapbp0z@L~$QhpglPIVQ0f?q_j-=@K5feXM1?n5aD zPWjgz$i@7?DJQEGU=QJdAKZ+;wgRVo?M9Scz$fN@lqViR*#|zscI0+10w?$^Nn1E7p`cG#X{|y+ByX& z2**|~h)2V>$C|c;+8bj_+FP5V9kGrUc}Y`8`|XXf_R>d}E)bCJwYK0HL`9Z`p6(@7gcy-?o3pe%Ardfr +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Ciphertext Block Chaining (CBC) mode. +""" + +__all__ = ['CbcMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_cbc_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_cbc", """ + int CBC_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + void **pResult); + int CBC_encrypt(void *cbcState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CBC_decrypt(void *cbcState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CBC_stop_operation(void *state); + """ + ) + + +class CbcMode(object): + """*Cipher-Block Chaining (CBC)*. + + Each of the ciphertext blocks depends on the current + and all previous plaintext blocks. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.2 . + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv): + """Create a new block cipher, configured in CBC mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be unpredictable**. Ideally it is picked randomly. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + """ + + self._state = VoidPointer() + result = raw_cbc_lib.CBC_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the CBC mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_cbc_lib.CBC_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = [ self.encrypt, self.decrypt ] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + Its lenght must be multiple of the cipher block size. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = [ self.encrypt ] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cbc_lib.CBC_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 3: + raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) + raise ValueError("Error %d while encrypting in CBC mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + Its length must be multiple of the cipher block size. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = [ self.decrypt ] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cbc_lib.CBC_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 3: + raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) + raise ValueError("Error %d while decrypting in CBC mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_cbc_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CBC encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for CBC. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + if kwargs: + raise TypeError("Unknown parameters for CBC: %s" % str(kwargs)) + + return CbcMode(cipher_state, iv) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_cbc.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_cbc.pyi new file mode 100644 index 0000000..8b9fb16 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_cbc.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CbcMode'] + +class CbcMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.py b/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.py new file mode 100644 index 0000000..64077de --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.py @@ -0,0 +1,650 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Counter with CBC-MAC (CCM) mode. +""" + +__all__ = ['CcmMode'] + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import (byte_string, bord, + _copy_bytes) +from Crypto.Util._raw_api import is_writeable_buffer + +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + + +def enum(**enums): + return type('Enum', (), enums) + +MacStatus = enum(NOT_STARTED=0, PROCESSING_AUTH_DATA=1, PROCESSING_PLAINTEXT=2) + + +class CcmMode(object): + """Counter with CBC-MAC (CCM). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + This mode requires a nonce. The nonce shall never repeat for two + different messages encrypted with the same key, but it does not need + to be random. + Note that there is a trade-off between the size of the nonce and the + maximum size of a single message you can encrypt. + + It is important to use a large nonce if the key is reused across several + messages and the nonce is chosen randomly. + + It is acceptable to us a short nonce if the key is only used a few times or + if the nonce is taken from a counter. + + The following table shows the trade-off when the nonce is chosen at + random. The column on the left shows how many messages it takes + for the keystream to repeat **on average**. In practice, you will want to + stop using the key way before that. + + +--------------------+---------------+-------------------+ + | Avg. # of messages | nonce | Max. message | + | before keystream | size | size | + | repeats | (bytes) | (bytes) | + +====================+===============+===================+ + | 2^52 | 13 | 64K | + +--------------------+---------------+-------------------+ + | 2^48 | 12 | 16M | + +--------------------+---------------+-------------------+ + | 2^44 | 11 | 4G | + +--------------------+---------------+-------------------+ + | 2^40 | 10 | 1T | + +--------------------+---------------+-------------------+ + | 2^36 | 9 | 64P | + +--------------------+---------------+-------------------+ + | 2^32 | 8 | 16E | + +--------------------+---------------+-------------------+ + + This mode is only available for ciphers that operate on 128 bits blocks + (e.g. AES but not TDES). + + See `NIST SP800-38C`_ or RFC3610_. + + .. _`NIST SP800-38C`: http://csrc.nist.gov/publications/nistpubs/800-38C/SP800-38C.pdf + .. _RFC3610: https://tools.ietf.org/html/rfc3610 + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, msg_len, assoc_len, + cipher_params): + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """The nonce used for this cipher instance""" + + self._factory = factory + self._key = _copy_bytes(None, None, key) + self._mac_len = mac_len + self._msg_len = msg_len + self._assoc_len = assoc_len + self._cipher_params = cipher_params + + self._mac_tag = None # Cache for MAC tag + + if self.block_size != 16: + raise ValueError("CCM mode is only available for ciphers" + " that operate on 128 bits blocks") + + # MAC tag length (Tlen) + if mac_len not in (4, 6, 8, 10, 12, 14, 16): + raise ValueError("Parameter 'mac_len' must be even" + " and in the range 4..16 (not %d)" % mac_len) + + # Nonce value + if not (nonce and 7 <= len(nonce) <= 13): + raise ValueError("Length of parameter 'nonce' must be" + " in the range 7..13 bytes") + + # Create MAC object (the tag will be the last block + # bytes worth of ciphertext) + self._mac = self._factory.new(key, + factory.MODE_CBC, + iv=b'\x00' * 16, + **cipher_params) + self._mac_status = MacStatus.NOT_STARTED + self._t = None + + # Allowed transitions after initialization + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + # Cumulative lengths + self._cumul_assoc_len = 0 + self._cumul_msg_len = 0 + + # Cache for unaligned associated data/plaintext. + # This is a list with byte strings, but when the MAC starts, + # it will become a binary string no longer than the block size. + self._cache = [] + + # Start CTR cipher, by formatting the counter (A.3) + q = 15 - len(nonce) # length of Q, the encoded message length + self._cipher = self._factory.new(key, + self._factory.MODE_CTR, + nonce=struct.pack("B", q - 1) + self.nonce, + **cipher_params) + + # S_0, step 6 in 6.1 for j=0 + self._s_0 = self._cipher.encrypt(b'\x00' * 16) + + # Try to start the MAC + if None not in (assoc_len, msg_len): + self._start_mac() + + def _start_mac(self): + + assert(self._mac_status == MacStatus.NOT_STARTED) + assert(None not in (self._assoc_len, self._msg_len)) + assert(isinstance(self._cache, list)) + + # Formatting control information and nonce (A.2.1) + q = 15 - len(self.nonce) # length of Q, the encoded message length + flags = (64 * (self._assoc_len > 0) + 8 * ((self._mac_len - 2) // 2) + + (q - 1)) + b_0 = struct.pack("B", flags) + self.nonce + long_to_bytes(self._msg_len, q) + + # Formatting associated data (A.2.2) + # Encoded 'a' is concatenated with the associated data 'A' + assoc_len_encoded = b'' + if self._assoc_len > 0: + if self._assoc_len < (2 ** 16 - 2 ** 8): + enc_size = 2 + elif self._assoc_len < (2 ** 32): + assoc_len_encoded = b'\xFF\xFE' + enc_size = 4 + else: + assoc_len_encoded = b'\xFF\xFF' + enc_size = 8 + assoc_len_encoded += long_to_bytes(self._assoc_len, enc_size) + + # b_0 and assoc_len_encoded must be processed first + self._cache.insert(0, b_0) + self._cache.insert(1, assoc_len_encoded) + + # Process all the data cached so far + first_data_to_mac = b"".join(self._cache) + self._cache = b"" + self._mac_status = MacStatus.PROCESSING_AUTH_DATA + self._update(first_data_to_mac) + + def _pad_cache_and_update(self): + + assert(self._mac_status != MacStatus.NOT_STARTED) + assert(len(self._cache) < self.block_size) + + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + len_cache = len(self._cache) + if len_cache > 0: + self._update(b'\x00' * (self.block_size - len_cache)) + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + In CCM, the *associated data* is also called + *additional authenticated data* (AAD). + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if self.update not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + self._cumul_assoc_len += len(assoc_data) + if self._assoc_len is not None and \ + self._cumul_assoc_len > self._assoc_len: + raise ValueError("Associated data is too long") + + self._update(assoc_data) + return self + + def _update(self, assoc_data_pt=b""): + """Update the MAC with associated data or plaintext + (without FSM checks)""" + + # If MAC has not started yet, we just park the data into a list. + # If the data is mutable, we create a copy and store that instead. + if self._mac_status == MacStatus.NOT_STARTED: + if is_writeable_buffer(assoc_data_pt): + assoc_data_pt = _copy_bytes(None, None, assoc_data_pt) + self._cache.append(assoc_data_pt) + return + + assert(len(self._cache) < self.block_size) + + if len(self._cache) > 0: + filler = min(self.block_size - len(self._cache), + len(assoc_data_pt)) + self._cache += _copy_bytes(None, filler, assoc_data_pt) + assoc_data_pt = _copy_bytes(filler, None, assoc_data_pt) + + if len(self._cache) < self.block_size: + return + + # The cache is exactly one block + self._t = self._mac.encrypt(self._cache) + self._cache = b"" + + update_len = len(assoc_data_pt) // self.block_size * self.block_size + self._cache = _copy_bytes(update_len, None, assoc_data_pt) + if update_len > 0: + self._t = self._mac.encrypt(assoc_data_pt[:update_len])[-16:] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + This method can be called only **once** if ``msg_len`` was + not passed at initialization. + + If ``msg_len`` was given, the data to encrypt can be broken + up in two or more pieces and `encrypt` can be called + multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = [self.encrypt, self.digest] + + # No more associated data allowed from now + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + # Only once piece of plaintext accepted if message length was + # not declared in advance + if self._msg_len is None: + self._msg_len = len(plaintext) + self._start_mac() + self._next = [self.digest] + + self._cumul_msg_len += len(plaintext) + if self._cumul_msg_len > self._msg_len: + raise ValueError("Message is too long") + + if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + self._pad_cache_and_update() + self._mac_status = MacStatus.PROCESSING_PLAINTEXT + + self._update(plaintext) + return self._cipher.encrypt(plaintext, output=output) + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + This method can be called only **once** if ``msg_len`` was + not passed at initialization. + + If ``msg_len`` was given, the data to decrypt can be + broken up in two or more pieces and `decrypt` can be + called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = [self.decrypt, self.verify] + + # No more associated data allowed from now + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + # Only once piece of ciphertext accepted if message length was + # not declared in advance + if self._msg_len is None: + self._msg_len = len(ciphertext) + self._start_mac() + self._next = [self.verify] + + self._cumul_msg_len += len(ciphertext) + if self._cumul_msg_len > self._msg_len: + raise ValueError("Message is too long") + + if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + self._pad_cache_and_update() + self._mac_status = MacStatus.PROCESSING_PLAINTEXT + + # Encrypt is equivalent to decrypt with the CTR mode + plaintext = self._cipher.encrypt(ciphertext, output=output) + if output is None: + self._update(plaintext) + else: + self._update(output) + return plaintext + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if self.digest not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = [self.digest] + return self._digest() + + def _digest(self): + if self._mac_tag: + return self._mac_tag + + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + if self._msg_len is None: + self._msg_len = 0 + self._start_mac() + + if self._cumul_msg_len != self._msg_len: + raise ValueError("Message is too short") + + # Both associated data and payload are concatenated with the least + # number of zero bytes (possibly none) that align it to the + # 16 byte boundary (A.2.2 and A.2.3) + self._pad_cache_and_update() + + # Step 8 in 6.1 (T xor MSB_Tlen(S_0)) + self._mac_tag = strxor(self._t, self._s_0)[:self._mac_len] + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = [self.verify] + + self._digest() + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return plaintext + + +def _create_ccm_cipher(factory, **kwargs): + """Create a new block cipher, configured in CCM mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` (like + `Crypto.Cipher.AES`). + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + + Its length must be in the range ``[7..13]``. + 11 or 12 bytes are reasonable values in general. Bear in + mind that with CCM there is a trade-off between nonce length and + maximum message size. + + If not specified, a 11 byte long random string is used. + + mac_len : integer + Length of the MAC, in bytes. It must be even and in + the range ``[4..16]``. The default is 16. + + msg_len : integer + Length of the message to (de)cipher. + If not specified, ``encrypt`` or ``decrypt`` may only be called once. + + assoc_len : integer + Length of the associated data. + If not specified, all data is internally buffered. + """ + + try: + key = key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + nonce = kwargs.pop("nonce", None) # N + if nonce is None: + nonce = get_random_bytes(11) + mac_len = kwargs.pop("mac_len", factory.block_size) + msg_len = kwargs.pop("msg_len", None) # p + assoc_len = kwargs.pop("assoc_len", None) # a + cipher_params = dict(kwargs) + + return CcmMode(factory, key, nonce, mac_len, msg_len, + assoc_len, cipher_params) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.pyi new file mode 100644 index 0000000..4b9f620 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ccm.pyi @@ -0,0 +1,47 @@ +from types import ModuleType +from typing import Union, overload, Dict, Tuple, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CcmMode'] + +class CcmMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + msg_len: int, + assoc_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> CcmMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.py b/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.py new file mode 100644 index 0000000..b3ee1c7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_cfb.py : CFB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Counter Feedback (CFB) mode. +""" + +__all__ = ['CfbMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_cfb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_cfb",""" + int CFB_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + size_t segment_len, /* In bytes */ + void **pResult); + int CFB_encrypt(void *cfbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CFB_decrypt(void *cfbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CFB_stop_operation(void *state);""" + ) + + +class CfbMode(object): + """*Cipher FeedBack (CFB)*. + + This mode is similar to CFB, but it transforms + the underlying block cipher into a stream cipher. + + Plaintext and ciphertext are processed in *segments* + of **s** bits. The mode is therefore sometimes + labelled **s**-bit CFB. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.3. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv, segment_size): + """Create a new block cipher, configured in CFB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be unpredictable**. Ideally it is picked randomly. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + + segment_size : integer + The number of bytes the plaintext and ciphertext are segmented in. + """ + + self._state = VoidPointer() + result = raw_cfb_lib.CFB_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + c_size_t(segment_size), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the CFB mode" % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_cfb_lib.CFB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = [ self.encrypt, self.decrypt ] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = [ self.encrypt ] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cfb_lib.CFB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting in CFB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = [ self.decrypt ] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cfb_lib.CFB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + raise ValueError("Error %d while decrypting in CFB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_cfb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CFB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for CFB. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + segment_size : integer + The number of bit the plaintext and ciphertext are segmented in. + If not present, the default is 8. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + segment_size_bytes, rem = divmod(kwargs.pop("segment_size", 8), 8) + if segment_size_bytes == 0 or rem != 0: + raise ValueError("'segment_size' must be positive and multiple of 8 bits") + + if kwargs: + raise TypeError("Unknown parameters for CFB: %s" % str(kwargs)) + return CfbMode(cipher_state, iv, segment_size_bytes) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.pyi new file mode 100644 index 0000000..e13a909 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_cfb.pyi @@ -0,0 +1,26 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CfbMode'] + + +class CfbMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer, + segment_size: int) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.py b/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.py new file mode 100644 index 0000000..15c7e83 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ctr.py : CTR mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Counter (CTR) mode. +""" + +__all__ = ['CtrMode'] + +import struct + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes +from Crypto.Util.py3compat import _copy_bytes, is_native_int +from Crypto.Util.number import long_to_bytes + +raw_ctr_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ctr", """ + int CTR_start_operation(void *cipher, + uint8_t initialCounterBlock[], + size_t initialCounterBlock_len, + size_t prefix_len, + unsigned counter_len, + unsigned littleEndian, + void **pResult); + int CTR_encrypt(void *ctrState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CTR_decrypt(void *ctrState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CTR_stop_operation(void *ctrState);""" + ) + + +class CtrMode(object): + """*CounTeR (CTR)* mode. + + This mode is very similar to ECB, in that + encryption of one block is done independently of all other blocks. + + Unlike ECB, the block *position* contributes to the encryption + and no information leaks about symbol frequency. + + Each message block is associated to a *counter* which + must be unique across all messages that get encrypted + with the same key (not just within the same message). + The counter is as big as the block size. + + Counters can be generated in several ways. The most + straightword one is to choose an *initial counter block* + (which can be made public, similarly to the *IV* for the + other modes) and increment its lowest **m** bits by one + (modulo *2^m*) for each block. In most cases, **m** is + chosen to be half the block size. + + See `NIST SP800-38A`_, Section 6.5 (for the mode) and + Appendix B (for how to manage the *initial counter block*). + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, initial_counter_block, + prefix_len, counter_len, little_endian): + """Create a new block cipher, configured in CTR mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + initial_counter_block : bytes/bytearray/memoryview + The initial plaintext to use to generate the key stream. + + It is as large as the cipher block, and it embeds + the initial value of the counter. + + This value must not be reused. + It shall contain a nonce or a random component. + Reusing the *initial counter block* for encryptions + performed with the same key compromises confidentiality. + + prefix_len : integer + The amount of bytes at the beginning of the counter block + that never change. + + counter_len : integer + The length in bytes of the counter embedded in the counter + block. + + little_endian : boolean + True if the counter in the counter block is an integer encoded + in little endian mode. If False, it is big endian. + """ + + if len(initial_counter_block) == prefix_len + counter_len: + self.nonce = _copy_bytes(None, prefix_len, initial_counter_block) + """Nonce; not available if there is a fixed suffix""" + + self._state = VoidPointer() + result = raw_ctr_lib.CTR_start_operation(block_cipher.get(), + c_uint8_ptr(initial_counter_block), + c_size_t(len(initial_counter_block)), + c_size_t(prefix_len), + counter_len, + little_endian, + self._state.address_of()) + if result: + raise ValueError("Error %X while instantiating the CTR mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_ctr_lib.CTR_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(initial_counter_block) + """The block size of the underlying cipher, in bytes.""" + + self._next = [self.encrypt, self.decrypt] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = [self.encrypt] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ctr_lib.CTR_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 0x60002: + raise OverflowError("The counter has wrapped around in" + " CTR mode") + raise ValueError("Error %X while encrypting in CTR mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = [self.decrypt] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ctr_lib.CTR_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 0x60002: + raise OverflowError("The counter has wrapped around in" + " CTR mode") + raise ValueError("Error %X while decrypting in CTR mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ctr_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CTR encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + nonce : bytes/bytearray/memoryview + The fixed part at the beginning of the counter block - the rest is + the counter number that gets increased when processing the next block. + The nonce must be such that no two messages are encrypted under the + same key and the same nonce. + + The nonce must be shorter than the block size (it can have + zero length; the counter is then as long as the block). + + If this parameter is not present, a random nonce will be created with + length equal to half the block size. No random nonce shorter than + 64 bits will be created though - you must really think through all + security consequences of using such a short block size. + + initial_value : posive integer or bytes/bytearray/memoryview + The initial value for the counter. If not present, the cipher will + start counting from 0. The value is incremented by one for each block. + The counter number is encoded in big endian mode. + + counter : object + Instance of ``Crypto.Util.Counter``, which allows full customization + of the counter block. This parameter is incompatible to both ``nonce`` + and ``initial_value``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + + counter = kwargs.pop("counter", None) + nonce = kwargs.pop("nonce", None) + initial_value = kwargs.pop("initial_value", None) + if kwargs: + raise TypeError("Invalid parameters for CTR mode: %s" % str(kwargs)) + + if counter is not None and (nonce, initial_value) != (None, None): + raise TypeError("'counter' and 'nonce'/'initial_value'" + " are mutually exclusive") + + if counter is None: + # Crypto.Util.Counter is not used + if nonce is None: + if factory.block_size < 16: + raise TypeError("Impossible to create a safe nonce for short" + " block sizes") + nonce = get_random_bytes(factory.block_size // 2) + else: + if len(nonce) >= factory.block_size: + raise ValueError("Nonce is too long") + + # What is not nonce is counter + counter_len = factory.block_size - len(nonce) + + if initial_value is None: + initial_value = 0 + + if is_native_int(initial_value): + if (1 << (counter_len * 8)) - 1 < initial_value: + raise ValueError("Initial counter value is too large") + initial_counter_block = nonce + long_to_bytes(initial_value, counter_len) + else: + if len(initial_value) != counter_len: + raise ValueError("Incorrect length for counter byte string (%d bytes, expected %d)" % + (len(initial_value), counter_len)) + initial_counter_block = nonce + initial_value + + return CtrMode(cipher_state, + initial_counter_block, + len(nonce), # prefix + counter_len, + False) # little_endian + + # Crypto.Util.Counter is used + + # 'counter' used to be a callable object, but now it is + # just a dictionary for backward compatibility. + _counter = dict(counter) + try: + counter_len = _counter.pop("counter_len") + prefix = _counter.pop("prefix") + suffix = _counter.pop("suffix") + initial_value = _counter.pop("initial_value") + little_endian = _counter.pop("little_endian") + except KeyError: + raise TypeError("Incorrect counter object" + " (use Crypto.Util.Counter.new)") + + # Compute initial counter block + words = [] + while initial_value > 0: + words.append(struct.pack('B', initial_value & 255)) + initial_value >>= 8 + words += [b'\x00'] * max(0, counter_len - len(words)) + if not little_endian: + words.reverse() + initial_counter_block = prefix + b"".join(words) + suffix + + if len(initial_counter_block) != factory.block_size: + raise ValueError("Size of the counter block (%d bytes) must match" + " block size (%d)" % (len(initial_counter_block), + factory.block_size)) + + return CtrMode(cipher_state, initial_counter_block, + len(prefix), counter_len, little_endian) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.pyi new file mode 100644 index 0000000..ce70855 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ctr.pyi @@ -0,0 +1,27 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CtrMode'] + +class CtrMode(object): + block_size: int + nonce: bytes + + def __init__(self, + block_cipher: SmartPointer, + initial_counter_block: Buffer, + prefix_len: int, + counter_len: int, + little_endian: bool) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_eax.py b/env/Lib/site-packages/Crypto/Cipher/_mode_eax.py new file mode 100644 index 0000000..d5fb135 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_eax.py @@ -0,0 +1,408 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +EAX mode. +""" + +__all__ = ['EaxMode'] + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import byte_string, bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Hash import CMAC, BLAKE2s +from Crypto.Random import get_random_bytes + + +class EaxMode(object): + """*EAX* mode. + + This is an Authenticated Encryption with Associated Data + (`AEAD`_) mode. It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, + and it will still be subject to authentication. + + The decryption step tells the receiver if the message comes + from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - + including the header - has been modified or corrupted. + + This mode requires a *nonce*. + + This mode is only available for ciphers that operate on 64 or + 128 bits blocks. + + There are no official standards defining EAX. + The implementation is based on `a proposal`__ that + was presented to NIST. + + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + .. __: http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/eax/eax-spec.pdf + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, cipher_params): + """EAX cipher mode""" + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """The nonce originally used to create the object.""" + + self._mac_len = mac_len + self._mac_tag = None # Cache for MAC tag + + # Allowed transitions after initialization + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + # MAC tag length + if not (4 <= self._mac_len <= self.block_size): + raise ValueError("Parameter 'mac_len' must not be larger than %d" + % self.block_size) + + # Nonce cannot be empty and must be a byte string + if len(self.nonce) == 0: + raise ValueError("Nonce cannot be empty in EAX mode") + if not is_buffer(nonce): + raise TypeError("nonce must be bytes, bytearray or memoryview") + + self._omac = [ + CMAC.new(key, + b'\x00' * (self.block_size - 1) + struct.pack('B', i), + ciphermod=factory, + cipher_params=cipher_params) + for i in range(0, 3) + ] + + # Compute MAC of nonce + self._omac[0].update(self.nonce) + self._signer = self._omac[1] + + # MAC of the nonce is also the initial counter for CTR encryption + counter_int = bytes_to_long(self._omac[0].digest()) + self._cipher = factory.new(key, + factory.MODE_CTR, + initial_value=counter_int, + nonce=b"", + **cipher_params) + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if self.update not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + self._signer.update(assoc_data) + return self + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = [self.encrypt, self.digest] + ct = self._cipher.encrypt(plaintext, output=output) + if output is None: + self._omac[2].update(ct) + else: + self._omac[2].update(output) + return ct + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = [self.decrypt, self.verify] + self._omac[2].update(ciphertext) + return self._cipher.decrypt(ciphertext, output=output) + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if self.digest not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = [self.digest] + + if not self._mac_tag: + tag = b'\x00' * self.block_size + for i in range(3): + tag = strxor(tag, self._omac[i].digest()) + self._mac_tag = tag[:self._mac_len] + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = [self.verify] + + if not self._mac_tag: + tag = b'\x00' * self.block_size + for i in range(3): + tag = strxor(tag, self._omac[i].digest()) + self._mac_tag = tag[:self._mac_len] + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + pt = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return pt + + +def _create_eax_cipher(factory, **kwargs): + """Create a new block cipher, configured in EAX mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` (like + `Crypto.Cipher.AES`). + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + There are no restrictions on its length, but it is recommended to use + at least 16 bytes. + + The nonce shall never repeat for two different messages encrypted with + the same key, but it does not need to be random. + + If not specified, a 16 byte long random string is used. + + mac_len : integer + Length of the MAC, in bytes. It must be no larger than the cipher + block bytes (which is the default). + """ + + try: + key = kwargs.pop("key") + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(16) + mac_len = kwargs.pop("mac_len", factory.block_size) + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + return EaxMode(factory, key, nonce, mac_len, kwargs) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_eax.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_eax.pyi new file mode 100644 index 0000000..cbfa467 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_eax.pyi @@ -0,0 +1,45 @@ +from types import ModuleType +from typing import Any, Union, Tuple, Dict, overload, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['EaxMode'] + +class EaxMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> EaxMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.py b/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.py new file mode 100644 index 0000000..3783357 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ecb.py : ECB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Electronic Code Book (ECB) mode. +""" + +__all__ = [ 'EcbMode' ] + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, create_string_buffer, + get_raw_buffer, SmartPointer, + c_size_t, c_uint8_ptr, + is_writeable_buffer) + +raw_ecb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ecb", """ + int ECB_start_operation(void *cipher, + void **pResult); + int ECB_encrypt(void *ecbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ECB_decrypt(void *ecbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ECB_stop_operation(void *state); + """ + ) + + +class EcbMode(object): + """*Electronic Code Book (ECB)*. + + This is the simplest encryption mode. Each of the plaintext blocks + is directly encrypted into a ciphertext block, independently of + any other block. + + This mode is dangerous because it exposes frequency of symbols + in your plaintext. Other modes (e.g. *CBC*) should be used instead. + + See `NIST SP800-38A`_ , Section 6.1. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher): + """Create a new block cipher, configured in ECB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + """ + self.block_size = block_cipher.block_size + + self._state = VoidPointer() + result = raw_ecb_lib.ECB_start_operation(block_cipher.get(), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the ECB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher + # mode + self._state = SmartPointer(self._state.get(), + raw_ecb_lib.ECB_stop_operation) + + # Memory allocated for the underlying block cipher is now owned + # by the cipher mode + block_cipher.release() + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key set at initialization. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + The length must be multiple of the cipher block length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ecb_lib.ECB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 3: + raise ValueError("Data must be aligned to block boundary in ECB mode") + raise ValueError("Error %d while encrypting in ECB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key set at initialization. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + The length must be multiple of the cipher block length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ecb_lib.ECB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 3: + raise ValueError("Data must be aligned to block boundary in ECB mode") + raise ValueError("Error %d while decrypting in ECB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ecb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs ECB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + All keywords are passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present""" + + cipher_state = factory._create_base_cipher(kwargs) + cipher_state.block_size = factory.block_size + if kwargs: + raise TypeError("Unknown parameters for ECB: %s" % str(kwargs)) + return EcbMode(cipher_state) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.pyi new file mode 100644 index 0000000..1772b23 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ecb.pyi @@ -0,0 +1,19 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = [ 'EcbMode' ] + +class EcbMode(object): + def __init__(self, block_cipher: SmartPointer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.py b/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.py new file mode 100644 index 0000000..da8e337 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.py @@ -0,0 +1,620 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Galois/Counter Mode (GCM). +""" + +__all__ = ['GcmMode'] + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr) + +from Crypto.Util import _cpu_features + + +# C API by module implementing GHASH +_ghash_api_template = """ + int ghash_%imp%(uint8_t y_out[16], + const uint8_t block_data[], + size_t len, + const uint8_t y_in[16], + const void *exp_key); + int ghash_expand_%imp%(const uint8_t h[16], + void **ghash_tables); + int ghash_destroy_%imp%(void *ghash_tables); +""" + +def _build_impl(lib, postfix): + from collections import namedtuple + + funcs = ( "ghash", "ghash_expand", "ghash_destroy" ) + GHASH_Imp = namedtuple('_GHash_Imp', funcs) + try: + imp_funcs = [ getattr(lib, x + "_" + postfix) for x in funcs ] + except AttributeError: # Make sphinx stop complaining with its mocklib + imp_funcs = [ None ] * 3 + params = dict(zip(funcs, imp_funcs)) + return GHASH_Imp(**params) + + +def _get_ghash_portable(): + api = _ghash_api_template.replace("%imp%", "portable") + lib = load_pycryptodome_raw_lib("Crypto.Hash._ghash_portable", api) + result = _build_impl(lib, "portable") + return result +_ghash_portable = _get_ghash_portable() + + +def _get_ghash_clmul(): + """Return None if CLMUL implementation is not available""" + + if not _cpu_features.have_clmul(): + return None + try: + api = _ghash_api_template.replace("%imp%", "clmul") + lib = load_pycryptodome_raw_lib("Crypto.Hash._ghash_clmul", api) + result = _build_impl(lib, "clmul") + except OSError: + result = None + return result +_ghash_clmul = _get_ghash_clmul() + + +class _GHASH(object): + """GHASH function defined in NIST SP 800-38D, Algorithm 2. + + If X_1, X_2, .. X_m are the blocks of input data, the function + computes: + + X_1*H^{m} + X_2*H^{m-1} + ... + X_m*H + + in the Galois field GF(2^256) using the reducing polynomial + (x^128 + x^7 + x^2 + x + 1). + """ + + def __init__(self, subkey, ghash_c): + assert len(subkey) == 16 + + self.ghash_c = ghash_c + + self._exp_key = VoidPointer() + result = ghash_c.ghash_expand(c_uint8_ptr(subkey), + self._exp_key.address_of()) + if result: + raise ValueError("Error %d while expanding the GHASH key" % result) + + self._exp_key = SmartPointer(self._exp_key.get(), + ghash_c.ghash_destroy) + + # create_string_buffer always returns a string of zeroes + self._last_y = create_string_buffer(16) + + def update(self, block_data): + assert len(block_data) % 16 == 0 + + result = self.ghash_c.ghash(self._last_y, + c_uint8_ptr(block_data), + c_size_t(len(block_data)), + self._last_y, + self._exp_key.get()) + if result: + raise ValueError("Error %d while updating GHASH" % result) + + return self + + def digest(self): + return get_raw_buffer(self._last_y) + + +def enum(**enums): + return type('Enum', (), enums) + + +MacStatus = enum(PROCESSING_AUTH_DATA=1, PROCESSING_CIPHERTEXT=2) + + +class GcmMode(object): + """Galois Counter Mode (GCM). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + This mode requires a *nonce*. + + This mode is only available for ciphers that operate on 128 bits blocks + (e.g. AES but not TDES). + + See `NIST SP800-38D`_. + + .. _`NIST SP800-38D`: http://csrc.nist.gov/publications/nistpubs/800-38D/SP-800-38D.pdf + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, cipher_params, ghash_c): + + self.block_size = factory.block_size + if self.block_size != 16: + raise ValueError("GCM mode is only available for ciphers" + " that operate on 128 bits blocks") + + if len(nonce) == 0: + raise ValueError("Nonce cannot be empty") + + if not is_buffer(nonce): + raise TypeError("Nonce must be bytes, bytearray or memoryview") + + # See NIST SP 800 38D, 5.2.1.1 + if len(nonce) > 2**64 - 1: + raise ValueError("Nonce exceeds maximum length") + + + self.nonce = _copy_bytes(None, None, nonce) + """Nonce""" + + self._factory = factory + self._key = _copy_bytes(None, None, key) + self._tag = None # Cache for MAC tag + + self._mac_len = mac_len + if not (4 <= mac_len <= 16): + raise ValueError("Parameter 'mac_len' must be in the range 4..16") + + # Allowed transitions after initialization + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + self._no_more_assoc_data = False + + # Length of associated data + self._auth_len = 0 + + # Length of the ciphertext or plaintext + self._msg_len = 0 + + # Step 1 in SP800-38D, Algorithm 4 (encryption) - Compute H + # See also Algorithm 5 (decryption) + hash_subkey = factory.new(key, + self._factory.MODE_ECB, + **cipher_params + ).encrypt(b'\x00' * 16) + + # Step 2 - Compute J0 + if len(self.nonce) == 12: + j0 = self.nonce + b"\x00\x00\x00\x01" + else: + fill = (16 - (len(nonce) % 16)) % 16 + 8 + ghash_in = (self.nonce + + b'\x00' * fill + + long_to_bytes(8 * len(nonce), 8)) + j0 = _GHASH(hash_subkey, ghash_c).update(ghash_in).digest() + + # Step 3 - Prepare GCTR cipher for encryption/decryption + nonce_ctr = j0[:12] + iv_ctr = (bytes_to_long(j0) + 1) & 0xFFFFFFFF + self._cipher = factory.new(key, + self._factory.MODE_CTR, + initial_value=iv_ctr, + nonce=nonce_ctr, + **cipher_params) + + # Step 5 - Bootstrat GHASH + self._signer = _GHASH(hash_subkey, ghash_c) + + # Step 6 - Prepare GCTR cipher for GMAC + self._tag_cipher = factory.new(key, + self._factory.MODE_CTR, + initial_value=j0, + nonce=b"", + **cipher_params) + + # Cache for data to authenticate + self._cache = b"" + + self._status = MacStatus.PROCESSING_AUTH_DATA + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + In GCM, the *associated data* is also called + *additional authenticated data* (AAD). + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if self.update not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + self._update(assoc_data) + self._auth_len += len(assoc_data) + + # See NIST SP 800 38D, 5.2.1.1 + if self._auth_len > 2**64 - 1: + raise ValueError("Additional Authenticated Data exceeds maximum length") + + return self + + def _update(self, data): + assert(len(self._cache) < 16) + + if len(self._cache) > 0: + filler = min(16 - len(self._cache), len(data)) + self._cache += _copy_bytes(None, filler, data) + data = data[filler:] + + if len(self._cache) < 16: + return + + # The cache is exactly one block + self._signer.update(self._cache) + self._cache = b"" + + update_len = len(data) // 16 * 16 + self._cache = _copy_bytes(update_len, None, data) + if update_len > 0: + self._signer.update(data[:update_len]) + + def _pad_cache_and_update(self): + assert(len(self._cache) < 16) + + # The authenticated data A is concatenated to the minimum + # number of zero bytes (possibly none) such that the + # - ciphertext C is aligned to the 16 byte boundary. + # See step 5 in section 7.1 + # - ciphertext C is aligned to the 16 byte boundary. + # See step 6 in section 7.2 + len_cache = len(self._cache) + if len_cache > 0: + self._update(b'\x00' * (16 - len_cache)) + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = [self.encrypt, self.digest] + + ciphertext = self._cipher.encrypt(plaintext, output=output) + + if self._status == MacStatus.PROCESSING_AUTH_DATA: + self._pad_cache_and_update() + self._status = MacStatus.PROCESSING_CIPHERTEXT + + self._update(ciphertext if output is None else output) + self._msg_len += len(plaintext) + + # See NIST SP 800 38D, 5.2.1.1 + if self._msg_len > 2**39 - 256: + raise ValueError("Plaintext exceeds maximum length") + + return ciphertext + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = [self.decrypt, self.verify] + + if self._status == MacStatus.PROCESSING_AUTH_DATA: + self._pad_cache_and_update() + self._status = MacStatus.PROCESSING_CIPHERTEXT + + self._update(ciphertext) + self._msg_len += len(ciphertext) + + return self._cipher.decrypt(ciphertext, output=output) + + def digest(self): + """Compute the *binary* MAC tag in an AEAD mode. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if self.digest not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = [self.digest] + + return self._compute_mac() + + def _compute_mac(self): + """Compute MAC without any FSM checks.""" + + if self._tag: + return self._tag + + # Step 5 in NIST SP 800-38D, Algorithm 4 - Compute S + self._pad_cache_and_update() + self._update(long_to_bytes(8 * self._auth_len, 8)) + self._update(long_to_bytes(8 * self._msg_len, 8)) + s_tag = self._signer.digest() + + # Step 6 - Compute T + self._tag = self._tag_cipher.encrypt(s_tag)[:self._mac_len] + + return self._tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = [self.verify] + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, + data=self._compute_mac()) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, + data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : byte string + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return plaintext + + +def _create_gcm_cipher(factory, **kwargs): + """Create a new block cipher, configured in Galois Counter Mode (GCM). + + :Parameters: + factory : module + A block cipher module, taken from `Crypto.Cipher`. + The cipher must have block length of 16 bytes. + GCM has been only defined for `Crypto.Cipher.AES`. + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + It must be 16 (e.g. *AES-128*), 24 (e.g. *AES-192*) + or 32 (e.g. *AES-256*) bytes long. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + + There are no restrictions on its length, + but it is recommended to use at least 16 bytes. + + The nonce shall never repeat for two + different messages encrypted with the same key, + but it does not need to be random. + + If not provided, a 16 byte nonce will be randomly created. + + mac_len : integer + Length of the MAC, in bytes. + It must be no larger than 16 bytes (which is the default). + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter:" + str(e)) + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(16) + mac_len = kwargs.pop("mac_len", 16) + + # Not documented - only used for testing + use_clmul = kwargs.pop("use_clmul", True) + if use_clmul and _ghash_clmul: + ghash_c = _ghash_clmul + else: + ghash_c = _ghash_portable + + return GcmMode(factory, key, nonce, mac_len, kwargs, ghash_c) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.pyi new file mode 100644 index 0000000..8912955 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_gcm.pyi @@ -0,0 +1,45 @@ +from types import ModuleType +from typing import Union, Tuple, Dict, overload, Optional + +__all__ = ['GcmMode'] + +Buffer = Union[bytes, bytearray, memoryview] + +class GcmMode(object): + block_size: int + nonce: Buffer + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> GcmMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.py b/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.py new file mode 100644 index 0000000..27758b1 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.py @@ -0,0 +1,525 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Offset Codebook (OCB) mode. + +OCB is Authenticated Encryption with Associated Data (AEAD) cipher mode +designed by Prof. Phillip Rogaway and specified in `RFC7253`_. + +The algorithm provides both authenticity and privacy, it is very efficient, +it uses only one key and it can be used in online mode (so that encryption +or decryption can start before the end of the message is available). + +This module implements the third and last variant of OCB (OCB3) and it only +works in combination with a 128-bit block symmetric cipher, like AES. + +OCB is patented in US but `free licenses`_ exist for software implementations +meant for non-military purposes. + +Example: + >>> from Crypto.Cipher import AES + >>> from Crypto.Random import get_random_bytes + >>> + >>> key = get_random_bytes(32) + >>> cipher = AES.new(key, AES.MODE_OCB) + >>> plaintext = b"Attack at dawn" + >>> ciphertext, mac = cipher.encrypt_and_digest(plaintext) + >>> # Deliver cipher.nonce, ciphertext and mac + ... + >>> cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + >>> try: + >>> plaintext = cipher.decrypt_and_verify(ciphertext, mac) + >>> except ValueError: + >>> print "Invalid message" + >>> else: + >>> print plaintext + +:undocumented: __package__ + +.. _RFC7253: http://www.rfc-editor.org/info/rfc7253 +.. _free licenses: http://web.cs.ucdavis.edu/~rogaway/ocb/license.htm +""" + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Util.strxor import strxor + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_buffer) + +_raw_ocb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ocb", """ + int OCB_start_operation(void *cipher, + const uint8_t *offset_0, + size_t offset_0_len, + void **pState); + int OCB_encrypt(void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OCB_decrypt(void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OCB_update(void *state, + const uint8_t *in, + size_t data_len); + int OCB_digest(void *state, + uint8_t *tag, + size_t tag_len); + int OCB_stop_operation(void *state); + """) + + +class OcbMode(object): + """Offset Codebook (OCB) mode. + + :undocumented: __init__ + """ + + def __init__(self, factory, nonce, mac_len, cipher_params): + + if factory.block_size != 16: + raise ValueError("OCB mode is only available for ciphers" + " that operate on 128 bits blocks") + + self.block_size = 16 + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """Nonce used for this session.""" + if len(nonce) not in range(1, 16): + raise ValueError("Nonce must be at most 15 bytes long") + if not is_buffer(nonce): + raise TypeError("Nonce must be bytes, bytearray or memoryview") + + self._mac_len = mac_len + if not 8 <= mac_len <= 16: + raise ValueError("MAC tag must be between 8 and 16 bytes long") + + # Cache for MAC tag + self._mac_tag = None + + # Cache for unaligned associated data + self._cache_A = b"" + + # Cache for unaligned ciphertext/plaintext + self._cache_P = b"" + + # Allowed transitions after initialization + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + # Compute Offset_0 + params_without_key = dict(cipher_params) + key = params_without_key.pop("key") + nonce = (struct.pack('B', self._mac_len << 4 & 0xFF) + + b'\x00' * (14 - len(nonce)) + + b'\x01' + self.nonce) + + bottom_bits = bord(nonce[15]) & 0x3F # 6 bits, 0..63 + top_bits = bord(nonce[15]) & 0xC0 # 2 bits + + ktop_cipher = factory.new(key, + factory.MODE_ECB, + **params_without_key) + ktop = ktop_cipher.encrypt(struct.pack('15sB', + nonce[:15], + top_bits)) + + stretch = ktop + strxor(ktop[:8], ktop[1:9]) # 192 bits + offset_0 = long_to_bytes(bytes_to_long(stretch) >> + (64 - bottom_bits), 24)[8:] + + # Create low-level cipher instance + raw_cipher = factory._create_base_cipher(cipher_params) + if cipher_params: + raise TypeError("Unknown keywords: " + str(cipher_params)) + + self._state = VoidPointer() + result = _raw_ocb_lib.OCB_start_operation(raw_cipher.get(), + offset_0, + c_size_t(len(offset_0)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the OCB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + _raw_ocb_lib.OCB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + raw_cipher.release() + + def _update(self, assoc_data, assoc_data_len): + result = _raw_ocb_lib.OCB_update(self._state.get(), + c_uint8_ptr(assoc_data), + c_size_t(assoc_data_len)) + if result: + raise ValueError("Error %d while computing MAC in OCB mode" % result) + + def update(self, assoc_data): + """Process the associated data. + + If there is any associated data, the caller has to invoke + this method one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver shall still able to detect modifications. + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. + """ + + if self.update not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = [self.encrypt, self.decrypt, self.digest, + self.verify, self.update] + + if len(self._cache_A) > 0: + filler = min(16 - len(self._cache_A), len(assoc_data)) + self._cache_A += _copy_bytes(None, filler, assoc_data) + assoc_data = assoc_data[filler:] + + if len(self._cache_A) < 16: + return self + + # Clear the cache, and proceeding with any other aligned data + self._cache_A, seg = b"", self._cache_A + self.update(seg) + + update_len = len(assoc_data) // 16 * 16 + self._cache_A = _copy_bytes(update_len, None, assoc_data) + self._update(assoc_data, update_len) + return self + + def _transcrypt_aligned(self, in_data, in_data_len, + trans_func, trans_desc): + + out_data = create_string_buffer(in_data_len) + result = trans_func(self._state.get(), + in_data, + out_data, + c_size_t(in_data_len)) + if result: + raise ValueError("Error %d while %sing in OCB mode" + % (result, trans_desc)) + return get_raw_buffer(out_data) + + def _transcrypt(self, in_data, trans_func, trans_desc): + # Last piece to encrypt/decrypt + if in_data is None: + out_data = self._transcrypt_aligned(self._cache_P, + len(self._cache_P), + trans_func, + trans_desc) + self._cache_P = b"" + return out_data + + # Try to fill up the cache, if it already contains something + prefix = b"" + if len(self._cache_P) > 0: + filler = min(16 - len(self._cache_P), len(in_data)) + self._cache_P += _copy_bytes(None, filler, in_data) + in_data = in_data[filler:] + + if len(self._cache_P) < 16: + # We could not manage to fill the cache, so there is certainly + # no output yet. + return b"" + + # Clear the cache, and proceeding with any other aligned data + prefix = self._transcrypt_aligned(self._cache_P, + len(self._cache_P), + trans_func, + trans_desc) + self._cache_P = b"" + + # Process data in multiples of the block size + trans_len = len(in_data) // 16 * 16 + result = self._transcrypt_aligned(c_uint8_ptr(in_data), + trans_len, + trans_func, + trans_desc) + if prefix: + result = prefix + result + + # Left-over + self._cache_P = _copy_bytes(trans_len, None, in_data) + + return result + + def encrypt(self, plaintext=None): + """Encrypt the next piece of plaintext. + + After the entire plaintext has been passed (but before `digest`), + you **must** call this method one last time with no arguments to collect + the final piece of ciphertext. + + If possible, use the method `encrypt_and_digest` instead. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The next piece of data to encrypt or ``None`` to signify + that encryption has finished and that any remaining ciphertext + has to be produced. + :Return: + the ciphertext, as a byte string. + Its length may not match the length of the *plaintext*. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + + if plaintext is None: + self._next = [self.digest] + else: + self._next = [self.encrypt] + return self._transcrypt(plaintext, _raw_ocb_lib.OCB_encrypt, "encrypt") + + def decrypt(self, ciphertext=None): + """Decrypt the next piece of ciphertext. + + After the entire ciphertext has been passed (but before `verify`), + you **must** call this method one last time with no arguments to collect + the remaining piece of plaintext. + + If possible, use the method `decrypt_and_verify` instead. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The next piece of data to decrypt or ``None`` to signify + that decryption has finished and that any remaining plaintext + has to be produced. + :Return: + the plaintext, as a byte string. + Its length may not match the length of the *ciphertext*. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() can only be called after" + " initialization or an update()") + + if ciphertext is None: + self._next = [self.verify] + else: + self._next = [self.decrypt] + return self._transcrypt(ciphertext, + _raw_ocb_lib.OCB_decrypt, + "decrypt") + + def _compute_mac_tag(self): + + if self._mac_tag is not None: + return + + if self._cache_A: + self._update(self._cache_A, len(self._cache_A)) + self._cache_A = b"" + + mac_tag = create_string_buffer(16) + result = _raw_ocb_lib.OCB_digest(self._state.get(), + mac_tag, + c_size_t(len(mac_tag)) + ) + if result: + raise ValueError("Error %d while computing digest in OCB mode" + % result) + self._mac_tag = get_raw_buffer(mac_tag)[:self._mac_len] + + def digest(self): + """Compute the *binary* MAC tag. + + Call this method after the final `encrypt` (the one with no arguments) + to obtain the MAC tag. + + The MAC tag is needed by the receiver to determine authenticity + of the message. + + :Return: the MAC, as a byte string. + """ + + if self.digest not in self._next: + raise TypeError("digest() cannot be called now for this cipher") + + assert(len(self._cache_P) == 0) + + self._next = [self.digest] + + if self._mac_tag is None: + self._compute_mac_tag() + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + Call this method after the final `decrypt` (the one with no arguments) + to check if the message is authentic and valid. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called now for this cipher") + + assert(len(self._cache_P) == 0) + + self._next = [self.verify] + + if self._mac_tag is None: + self._compute_mac_tag() + + secret = get_random_bytes(16) + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext): + """Encrypt the message and create the MAC tag in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The entire message to encrypt. + :Return: + a tuple with two byte strings: + + - the encrypted data + - the MAC + """ + + return self.encrypt(plaintext) + self.encrypt(), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag): + """Decrypted the message and verify its authenticity in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The entire message to decrypt. + received_mac_tag : byte string + This is the *binary* MAC, as received from the sender. + + :Return: the decrypted data (byte string). + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext) + self.decrypt() + self.verify(received_mac_tag) + return plaintext + + +def _create_ocb_cipher(factory, **kwargs): + """Create a new block cipher, configured in OCB mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` + (like `Crypto.Cipher.AES`). + + :Keywords: + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + Its length can vary from 1 to 15 bytes. + If not specified, a random 15 bytes long nonce is generated. + + mac_len : integer + Length of the MAC, in bytes. + It must be in the range ``[8..16]``. + The default is 16 (128 bits). + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + try: + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(15) + mac_len = kwargs.pop("mac_len", 16) + except KeyError as e: + raise TypeError("Keyword missing: " + str(e)) + + return OcbMode(factory, nonce, mac_len, kwargs) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.pyi new file mode 100644 index 0000000..a1909fc --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ocb.pyi @@ -0,0 +1,36 @@ +from types import ModuleType +from typing import Union, Any, Optional, Tuple, Dict, overload + +Buffer = Union[bytes, bytearray, memoryview] + +class OcbMode(object): + block_size: int + nonce: Buffer + + def __init__(self, + factory: ModuleType, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> OcbMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.py b/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.py new file mode 100644 index 0000000..958f6d0 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ofb.py : OFB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Output Feedback (CFB) mode. +""" + +__all__ = ['OfbMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_ofb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ofb", """ + int OFB_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + void **pResult); + int OFB_encrypt(void *ofbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OFB_decrypt(void *ofbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OFB_stop_operation(void *state); + """ + ) + + +class OfbMode(object): + """*Output FeedBack (OFB)*. + + This mode is very similar to CBC, but it + transforms the underlying block cipher into a stream cipher. + + The keystream is the iterated block encryption of the + previous ciphertext block. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.4. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv): + """Create a new block cipher, configured in OFB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be a nonce, to to be reused for any other + message**. It shall be a nonce or a random value. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + """ + + self._state = VoidPointer() + result = raw_ofb_lib.OFB_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the OFB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_ofb_lib.OFB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = [ self.encrypt, self.decrypt ] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = [ self.encrypt ] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ofb_lib.OFB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting in OFB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext is written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = [ self.decrypt ] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ofb_lib.OFB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + raise ValueError("Error %d while decrypting in OFB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ofb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs OFB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for OFB. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + if kwargs: + raise TypeError("Unknown parameters for OFB: %s" % str(kwargs)) + + return OfbMode(cipher_state, iv) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.pyi new file mode 100644 index 0000000..60f7f00 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_ofb.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['OfbMode'] + +class OfbMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.py b/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.py new file mode 100644 index 0000000..d079d59 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.py @@ -0,0 +1,206 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +OpenPGP mode. +""" + +__all__ = ['OpenPgpMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Random import get_random_bytes + +class OpenPgpMode(object): + """OpenPGP mode. + + This mode is a variant of CFB, and it is only used in PGP and + OpenPGP_ applications. If in doubt, use another mode. + + An Initialization Vector (*IV*) is required. + + Unlike CFB, the *encrypted* IV (not the IV itself) is + transmitted to the receiver. + + The IV is a random data block. For legacy reasons, two of its bytes are + duplicated to act as a checksum for the correctness of the key, which is now + known to be insecure and is ignored. The encrypted IV is therefore 2 bytes + longer than the clean IV. + + .. _OpenPGP: http://tools.ietf.org/html/rfc4880 + + :undocumented: __init__ + """ + + def __init__(self, factory, key, iv, cipher_params): + + #: The block size of the underlying cipher, in bytes. + self.block_size = factory.block_size + + self._done_first_block = False # True after the first encryption + + # Instantiate a temporary cipher to process the IV + IV_cipher = factory.new( + key, + factory.MODE_CFB, + IV=b'\x00' * self.block_size, + segment_size=self.block_size * 8, + **cipher_params) + + iv = _copy_bytes(None, None, iv) + + # The cipher will be used for... + if len(iv) == self.block_size: + # ... encryption + self._encrypted_IV = IV_cipher.encrypt(iv + iv[-2:]) + elif len(iv) == self.block_size + 2: + # ... decryption + self._encrypted_IV = iv + # Last two bytes are for a deprecated "quick check" feature that + # should not be used. (https://eprint.iacr.org/2005/033) + iv = IV_cipher.decrypt(iv)[:-2] + else: + raise ValueError("Length of IV must be %d or %d bytes" + " for MODE_OPENPGP" + % (self.block_size, self.block_size + 2)) + + self.iv = self.IV = iv + + # Instantiate the cipher for the real PGP data + self._cipher = factory.new( + key, + factory.MODE_CFB, + IV=self._encrypted_IV[-self.block_size:], + segment_size=self.block_size * 8, + **cipher_params) + + def encrypt(self, plaintext): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + + :Return: + the encrypted data, as a byte string. + It is as long as *plaintext* with one exception: + when encrypting the first message chunk, + the encypted IV is prepended to the returned ciphertext. + """ + + res = self._cipher.encrypt(plaintext) + if not self._done_first_block: + res = self._encrypted_IV + res + self._done_first_block = True + return res + + def decrypt(self, ciphertext): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + + :Return: the decrypted data (byte string). + """ + + return self._cipher.decrypt(ciphertext) + + +def _create_openpgp_cipher(factory, **kwargs): + """Create a new block cipher, configured in OpenPGP mode. + + :Parameters: + factory : module + The module. + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + IV : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + + For encryption, the IV must be as long as the cipher block size. + + For decryption, it must be 2 bytes longer (it is actually the + *encrypted* IV which was prefixed to the ciphertext). + """ + + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing component: " + str(e)) + + return OpenPgpMode(factory, key, iv, kwargs) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.pyi new file mode 100644 index 0000000..14b8105 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_openpgp.pyi @@ -0,0 +1,20 @@ +from types import ModuleType +from typing import Union, Dict + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['OpenPgpMode'] + +class OpenPgpMode(object): + block_size: int + iv: Union[bytes, bytearray, memoryview] + IV: Union[bytes, bytearray, memoryview] + + def __init__(self, + factory: ModuleType, + key: Buffer, + iv: Buffer, + cipher_params: Dict) -> None: ... + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, plaintext: Buffer) -> bytes: ... + diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_siv.py b/env/Lib/site-packages/Crypto/Cipher/_mode_siv.py new file mode 100644 index 0000000..d1eca2a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_siv.py @@ -0,0 +1,392 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Synthetic Initialization Vector (SIV) mode. +""" + +__all__ = ['SivMode'] + +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Protocol.KDF import _S2V +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + + +class SivMode(object): + """Synthetic Initialization Vector (SIV). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + Unlike other AEAD modes such as CCM, EAX or GCM, accidental reuse of a + nonce is not catastrophic for the confidentiality of the message. The only + effect is that an attacker can tell when the same plaintext (and same + associated data) is protected with the same key. + + The length of the MAC is fixed to the block size of the underlying cipher. + The key size is twice the length of the key of the underlying cipher. + + This mode is only available for AES ciphers. + + +--------------------+---------------+-------------------+ + | Cipher | SIV MAC size | SIV key length | + | | (bytes) | (bytes) | + +====================+===============+===================+ + | AES-128 | 16 | 32 | + +--------------------+---------------+-------------------+ + | AES-192 | 16 | 48 | + +--------------------+---------------+-------------------+ + | AES-256 | 16 | 64 | + +--------------------+---------------+-------------------+ + + See `RFC5297`_ and the `original paper`__. + + .. _RFC5297: https://tools.ietf.org/html/rfc5297 + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + .. __: http://www.cs.ucdavis.edu/~rogaway/papers/keywrap.pdf + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, kwargs): + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self._factory = factory + + self._cipher_params = kwargs + + if len(key) not in (32, 48, 64): + raise ValueError("Incorrect key length (%d bytes)" % len(key)) + + if nonce is not None: + if not is_buffer(nonce): + raise TypeError("When provided, the nonce must be bytes, bytearray or memoryview") + + if len(nonce) == 0: + raise ValueError("When provided, the nonce must be non-empty") + + self.nonce = _copy_bytes(None, None, nonce) + """Public attribute is only available in case of non-deterministic + encryption.""" + + subkey_size = len(key) // 2 + + self._mac_tag = None # Cache for MAC tag + self._kdf = _S2V(key[:subkey_size], + ciphermod=factory, + cipher_params=self._cipher_params) + self._subkey_cipher = key[subkey_size:] + + # Purely for the purpose of verifying that cipher_params are OK + factory.new(key[:subkey_size], factory.MODE_ECB, **kwargs) + + # Allowed transitions after initialization + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + def _create_ctr_cipher(self, v): + """Create a new CTR cipher from V in SIV mode""" + + v_int = bytes_to_long(v) + q = v_int & 0xFFFFFFFFFFFFFFFF7FFFFFFF7FFFFFFF + return self._factory.new( + self._subkey_cipher, + self._factory.MODE_CTR, + initial_value=q, + nonce=b"", + **self._cipher_params) + + def update(self, component): + """Protect one associated data component + + For SIV, the associated data is a sequence (*vector*) of non-empty + byte strings (*components*). + + This method consumes the next component. It must be called + once for each of the components that constitue the associated data. + + Note that the components have clear boundaries, so that: + + >>> cipher.update(b"builtin") + >>> cipher.update(b"securely") + + is not equivalent to: + + >>> cipher.update(b"built") + >>> cipher.update(b"insecurely") + + If there is no associated data, this method must not be called. + + :Parameters: + component : bytes/bytearray/memoryview + The next associated data component. + """ + + if self.update not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = [self.update, self.encrypt, self.decrypt, + self.digest, self.verify] + + return self._kdf.update(component) + + def encrypt(self, plaintext): + """ + For SIV, encryption and MAC authentication must take place at the same + point. This method shall not be used. + + Use `encrypt_and_digest` instead. + """ + + raise TypeError("encrypt() not allowed for SIV mode." + " Use encrypt_and_digest() instead.") + + def decrypt(self, ciphertext): + """ + For SIV, decryption and verification must take place at the same + point. This method shall not be used. + + Use `decrypt_and_verify` instead. + """ + + raise TypeError("decrypt() not allowed for SIV mode." + " Use decrypt_and_verify() instead.") + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if self.digest not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = [self.digest] + if self._mac_tag is None: + self._mac_tag = self._kdf.derive() + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.verify not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = [self.verify] + + if self._mac_tag is None: + self._mac_tag = self._kdf.derive() + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + if self.encrypt not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + + self._next = [ self.digest ] + + # Compute V (MAC) + if hasattr(self, 'nonce'): + self._kdf.update(self.nonce) + self._kdf.update(plaintext) + self._mac_tag = self._kdf.derive() + + cipher = self._create_ctr_cipher(self._mac_tag) + + return cipher.encrypt(plaintext, output=output), self._mac_tag + + def decrypt_and_verify(self, ciphertext, mac_tag, output=None): + """Perform decryption and verification in one step. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + You cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if self.decrypt not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = [ self.verify ] + + # Take the MAC and start the cipher for decryption + self._cipher = self._create_ctr_cipher(mac_tag) + + plaintext = self._cipher.decrypt(ciphertext, output=output) + + if hasattr(self, 'nonce'): + self._kdf.update(self.nonce) + self._kdf.update(plaintext if output is None else output) + self.verify(mac_tag) + + return plaintext + + +def _create_siv_cipher(factory, **kwargs): + """Create a new block cipher, configured in + Synthetic Initializaton Vector (SIV) mode. + + :Parameters: + + factory : object + A symmetric cipher module from `Crypto.Cipher` + (like `Crypto.Cipher.AES`). + + :Keywords: + + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + It must be 32, 48 or 64 bytes long. + If AES is the chosen cipher, the variants *AES-128*, + *AES-192* and or *AES-256* will be used internally. + + nonce : bytes/bytearray/memoryview + For deterministic encryption, it is not present. + + Otherwise, it is a value that must never be reused + for encrypting message under this key. + + There are no restrictions on its length, + but it is recommended to use at least 16 bytes. + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + nonce = kwargs.pop("nonce", None) + + return SivMode(factory, key, nonce, kwargs) diff --git a/env/Lib/site-packages/Crypto/Cipher/_mode_siv.pyi b/env/Lib/site-packages/Crypto/Cipher/_mode_siv.pyi new file mode 100644 index 0000000..2934f23 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Cipher/_mode_siv.pyi @@ -0,0 +1,38 @@ +from types import ModuleType +from typing import Union, Tuple, Dict, Optional, overload + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['SivMode'] + +class SivMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + kwargs: Dict) -> None: ... + + def update(self, component: Buffer) -> SivMode: ... + + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, plaintext: Buffer) -> bytes: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_aes.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_aes.pyd new file mode 100644 index 0000000000000000000000000000000000000000..c8830afd92fa25df2ea6d3f5286240b21cd55465 GIT binary patch literal 35840 zcmeIb3tUvy*Z6%vKvZy0!SIfTsilUFm8K@6gARs@ikC_S6jV}yVDM6w8e)z|)JpAU zrrk3uD>F4K5HDz&c&WUUT2_vi49yg;yx+CY3=C*bzo)1F=Y2o_=UpF%z0W@9?AuxU zyVqHZkvUri)@>bnK6pI znv*;-Qr~Po9uldpOCF!bg)&E45g)m3jvSOrq^5N*KlKn*`bitX& zMKkm4{hYh1ada8&bvC8QmK)MHR+7MOB+j4!P0Pv4YULbmTpq^x#>RP*oIhDZDqXwK zP@Ty3rE|V=?x~7u%`DC8<_z_xCVjsxyQ5|gx9Nq>CY++t&=`HodeOCL{cD?EB}NZi z(NEJXrMpXgch3W_(!o>S<~*A|EIh)d+rvBB^r_)C<|528qF^1BXwDt{Tp;r-^D>0V zh=`nXN%wu4`*ZT91sXR;a!W*fL204GcQPlO4>3$NGhx|X{iM@b_c+rSnX|f#-;0^Z zZhq!js%IEAXu!NDa?UPib8;8{K8hmhR$a8&ZX}^OK6k+B^+`+8r-!Wy*W!Fke%TGP$c`6X?GfrifdS%^BrR}-R zyhm<3%aq%4E}&J;E!Rs+Y4};%pkaGW_cf&o$Df!_*SSA(ps@fBn75UNJO)&4`d6hP50fbkv8%(v z_0@bdSyy2X*-D#_l`=;RYbg74$91JcD|EgJ56HQKBb#nT=^VZ% zDX`^AK0A-q=liG}>%My>arj#28dpY=w^6j!w=pHs>Q>)Sa`Gs*`Zjar*0LHj+qgVg zT=nJ7tv+7Ta(THP_@Yj^BgQApUFc=g10Q!6a^r~@dWaEH%D6hpJVp*2EE5=M(+`w} z47{J)^^b-7=F%wEKyF-k0&`dzVv#cpwv#P8DZC?HhzTES(>WWX7s|9m@c5)KT@G5B zx5=^!D-Eg5ZKYeYOTDLlC=V}w#6#|97dFVIpEe$W@6YTRw$IwXdZf(@caZB`kY%@J zCx_BXpU~2fx~fxAp*BZUP-%$9k)CI3C;8L8RX(>*m~p&Mj-$f3Ey$LCG|XluZ;lGG zne%Py7&1cbw{l=EZ^~w}<}>|?(gifdX1iF#VQ)F)$qz4}i!DW|f^6AcU!!d;%da#< zr=fBZ?_gdJz;m?ZQBJ^=+`w%dpU_y|wp`zxT@2aE4FO^aObZ^#+$ZU7AWRB z-vY%?~0xQ|3yZ9?z*ON-ORi!xv91@!#Jqw zOd)gXw8#CRQpK}`0-c~Zu{_a$Y{?MCgs z&QH~)Oss31@41doyiUeg|E4v*{kS=2+h?-YOxA+gzTG%>P6UI_$GOZk1HsO8E#(#xbqh7W$4oD z3vygl9oyS7%jVIhG~Yy-fb7E3KTOknKj#8;aSMqYO3L9bk}bY3@Y74pI9*srCG5fc zP6k4vsPdg({%t<*HMz?knj2b^1(TJs*}rN+!dSygRMg=`U)si`!s zncnp?O0+o#>^?L1WCb`ca0C14?G%Po-S?(<{njN)kWnXWUc2q<%FP6>Xb~F>@!tV)j5T75?IFC968dAbVLV$?In#9l`Rc1Q749g>@w7c(PqnyDmE=-LagC7 zX>=)1T3U;QTV&C`0A}7l*H*>(rP{`qgcmVawi%mb%7&xRWj0ZmEw@qCOu4bv}}D!dR)YK8@#~Ry_9}%24<2Bk{cVs`Th4_ijXirR{QT1ex>9 z+f1)o_53LFA*=qCWkzwTJ$bxk#@SxUsaaNik6vuvX36d06;-3q^r~gXxs=r8XopXD zRE_P_9zdKV5{z%&W>QoF7&cGe3QwZs!G`~GHYSHX3e!->0CVZHt8#^ zTXnHoeB5oD)*q{uSv{&oKKHibmaV>}*~aE>bp&1|Yg&Ii*p$u7s7AqY3O@5A?`U+? zIM*O^3Rg7D%%pm+B3u4XVYV7YHbxaDW=2@Ef_n?HhUmT@admc~|5o4SWU5F7zORt8 zTqp>$cg!vf+v+=;Lmn|FW|3RVRYlfY_gg#f&k9vFg&*y5G%GMWa13W79gT*Q$dhd; z#;_erI(t)4yzowKePqmeoqS`th^<44%cQh<_Z~9SGu6*o>gR0rbG!Qay88L1`uVo{ zxkCNiq<+4yey-x@Dz5_CU|+1D6WDtDZTe-5JzLH;)6DA($d+@+H1kg~>-||TJUm$# zZK=WL5Pi)GSxd1Ri^o<{VA~oQ8YJbpfh)Mh=I|#cPhBibue{>NOg z<}Jp?Um8-4?($fb*-`e~+!>kg&_Tv42(x_v<2pt^$IeY9_R+B3p2*v!eVFX-kv9F2 zP+RB28Ri(?D{X!5C?35)U!*bLeR6&7i_v#6kRV3qJT4Pk<+|;T!(5&DeT;e|Q~kYo zs6k%#(JC?wR;j$B#bbBLk=w>Hirn2(`hYoD_8ME>Mf#T|{Y#hrjg$WIz>WTOQ_qn8 z$zwD%iPXy9$Mr=r|L*?s0N%v`Ro>U5>bV;4ksf_zN;Pm#(B83G|IGQXh0wR5@~ETtNs(I&4;&Lt_h0HKjt4( zV}Er1kN$E?O|a+hDwAWAE3aDuV$A!c#2=HNJBlPb5S(UGv66xT2mOU=acid=&N&=C4<8? zg%*8g?y^MY$y^-Gvd(vgQj)W5f+{hs*(gUvkEY2Nf^w(zIZ;ZgBTA9*~(fKFohP$Wx zsXCZzrNJl}4Y$fwYGjNx#+*lXkqlC@rZxHQd)1kqK_oVhZf%wETJ)7ScE@SsWwOGY zIjK%+cTJj;uC%t9ozc7rx<@JF4T`#Pbf^cf7M|5n5@}kq#a*4~kKVH-iBX;L47Nn+ zbJhI28^}My9IEH2I*gVWgDIOdN~o5WelMFeQ6;@+xa*8XUzj6G;ci?e5IyQ4bE!0= zsKTQx;#S^Y(KDr8JTT*-GG}N8^;CF-B{qGt=OEF63VK#TZCxmIX_u-^Y205aL?x}2 zDckIxGSLFPXGapFI%CSDeg7zZVP&;t*;UdnVWh>LrV7nxoVR^f0=AmY65$;`63=a<~%I1+?_P~uf6A&?H zSQ-`e0`<8m&B``mftx-{d?iZL+@EfW^=#xlMIu;U{vtOstg2?NR2%OSs*VeYLu2-YVQsQToOxeYZv5XvG6- ztV_eJmB)8;HM1zAt4t}-COYV08i<;$lwq2Sl}RG=L<5H2-pq2T$Q@R-^w^CvIn-yp zt?DaL^~uvS`hbqfTDSpbYSmSM86`YWtco)AQrFCSUDYe+*wNM6bhj-th+3Cxm#pBj zZId(fY8iKvb18h8Vi@YhIihSVYm=(?pDI@#cmGSuRU{qxA1l{Bqn3YBx%N}ze?z&x z=W?sQv3su18{{z`M;wvR5d7T3H9A z?ErBowRE=~;BGt6-F8jdHd*2=BP_!$!z%bT#qacY{w9C?O_O&J6~aSgxSNKGy9xJ_ zc%FRdNn~~0PU3zF;DA>#@iqO(OGX8s6N6ViDdkC$?I03Ql9(pqNhM3vA(W8=y)4wZ+< z<(D${DrF3cA$~)3C7y2us=AU2wXr#iQ^<9hZ1pZ18s#YSH%ZUb`kbj6VC(f<4ysm} z&VsTRG}TddSn-UhGh+KmEoDPt*vf22$GBgzO7*ykd(|1`Z!)Yrqg(3zK>STNbjRuG zZ&KZH3YL@!qPpUwD=qOi8Jnt6oxC3?v$;}rWz1%6KoiwF8G~WbsNTuQa0)RCc?Oo5 zG_7H&xPF@gsS>@Tn)Dxa01edbQ};MXoEeo0Y$b+Rz|l z$_$HE&AdFizt%Iu-y~1cm@}89%i63wXT_dxXll-iD@+-db5EOkJ5%dhEhc$!|4TKi zFl8vzjcSHdsDhfQIl}}jb2r^EcdkvflA4vfHOtg21~pUNFViw(#oVA}^vZ)jqGpl* z9W^U=1pJkn;UTJ^W@>55d%+D)fVrqJXE#%`xSOe&D2g~G#SXq<%5JW1l~$&VdzkSm ztK8=-`ZoWc|4ur}MU~I{X}yb zH?b1fOGQbHeL^!fYB#xd7~Cw5>TC5%KS~r>_>GB8&O$m3}3wh zm^UcftcQ7n+9X9MEmEd#|DxRAv>JaC?{8N9h(+IRyuUCc6`Kpf{iMuUV__Q3C%3hT z9=YbuZ5yy{dh95oXKx#NrYe)^Hf{;J`C|8QI5AvSHC;Bgd%Daa^%;(#a-V>@g%^U8 zRCxo<)T*nzugF>`^Jl7hMSBg$nBfyJ_RX?2!_d*Za`lpT2M_%!;_hXPl4}ezqN09P zRxG#OF8)jgDOHKXNjjo?IGie~m&-o+FVt&exq6ZPD~FRPJ*sY~m%Is7Rxu`AEk3r0 zP=>kPL*3jSC%398Rbp;PUzU+nQm=B4(;z%f|EJWe@LyIh!!`8ptC!&#`={!a_wT8f z%jNVxRIk5qIsH%6%k6Xe57o=?IUTIvb5a{mna@et16KVzs}4c{>5NLACn>nW^CX3a z=SfzA%kw0KfA4v+C<|6L0|Rz{*QkdnVsu4$o@jP63y-%QqtUk{w2}b8+|N#WJ#ag$YO`3#FiV)MiLnioqfW`;)wOv`=54LPE!)ZQGni`Yl=Av z<|s(-f8Jp^PcFDm=iO63w(wi?rgNDpXvBR0F)M}pjl!S@cQr#DF({+mHOANqWX3_I` zQc^0B5w*io;BR&GppAv|rG5&j{5j)1cT=P)PXyk^`*>MpJa{_kqpDMF8$B#<#Aupr z+-RiAKW@YSKW?Lk&p&KL=EP$V(x7yosqv5In|r35@#@#Cd04V4`29Qer2&s$n2Q`Z zXt4Zt9KWZ@y6)~3qi;8@;d@tS^MCcPfvwZs+nuSRGLCkX`C3hD9`v!`lC;(U_}%@= zvsGPLtK$L2)V&%P`g5JSthzf>LuDKffavalrZo=-FS80W5{iRTg%c-tgb7kq3q6P*wWUsg8$YG?ixMFgI zf-F$0d7IVzb(Hx)l%sD@lw(A2l)l$$-W%l@3_>=VH}kzlv`<Yv zl6saIN9kax#iPYML*3mi<|&iWqd|V7oPYbH96iILq@@wz7V{n&YzcQ9gIiZ<&vSJ3 zZ#p{TUDiFi%580nG8Zy3p8o-h`H;miq65ER8D-u`;~lY7%^PnVUzaF7&ON@%4J<;q zisLJ)IKHCFZSIaT?`AlW7V|+GwDQaIMkj2nshc$z6QvJxH+ZF?WyZ0}4eqblpcokz z-D5%zMA4G@5HlG|gK=tvyu0Nw$e}lDaG2)^8~w8piiRW@BTOnA;n=?z;doD#Y5dPr zMjlzZg%?rB2)k9^Ys~Rv(b1bc!>MI@Gvh4ZH<|tqHOC^$V%~I~hsCjUCW)auYHHOB zwny6*$KttMp{CZgR%V=|pnwVbz7ZD>pB{^=G`;sdeKDhlxt5Uye7(fNf;nBTX+}CapP^zAlOR06BT*Q;%Y) zdaL5w2@)R~G5nzdC{{~$N1JKQqe1z{{cDs^Qrn%WqcV;u!rBlhIw06W$i`ZOCDgrc zdG;1p)|P7twmO1*&ca(WvOyl9re%eogX89DsN+2&Hb)f$~KR6>nU`l{z4P zqKX}083V-;5wuR*fNG1?Tp*x|(hIHTonQGiObJl#u1*ZBBV_W5js+VQ?+I!r*13 zJQTYz0c#3P>#sDd`~?1P@a7{t>d9*CQ>{8X;&#{4XUTp^%>hQmO}e2l z;4u%4{>eOiAq(HCZ&f-uOV++c-)}5FS^EkE(m`44MhBHrsHJ+dKB={<+g;D*dgB?t za-+ur;g>o*Hd1qOMcp)?HDA=PTO4bQgP+#P6n=i6#E#?xqm&g5dYm+3ROJ9iZKxNd+ zx$8S49;g~1G>EPTf@;WbAVk+M%QY}^Vgym%h#&nMmYr-|mTTg^j9t4#^AbiqRYji8 zMr^g%JDG>=ydm6rD{lXUgPl74vf z2ouEUh%6iV#SNo*RrvvT^RlP7wh}sEWp6O8p@?rulee0OUbQ-gHjL8$h%%qEI?^%j zu3OBztd7ZiEpM9)TTOuEf~*c3AT^q-*dnb1B6YoMyd0X=Fa>49H6Jv`mzakJIEJ>i z>Iu9V9kV(TBCL9BaFltUMc;09#CEWlc_YGI;5kOCBNCg>uEr_)UtyT$vo{~5`J6FG zQu7~H^94tG7skc#BBRvUfNrI+MPX{{1Fli&U&)+0h7M&Rxt`t8N`tbR^VL=CX=RU&-!ikZ zHr10O!|GvPhDEW6*k$+Xs;_>RIvhp2APhp*Vnbvsm+bT79C-3jg z;;*6(J3NZR zoWD}CK%FcoKVe+lE6;s#1a;VSFP*Oi$Aq_(@4d$427bR*W{q0()n|)V`rsuzePq3!LIZ+R~qR`FT1XP z!gajZm5y|!?)F6!zTy$cg|%zk>J_n*yUe(87hyW2N^ z_tjmuO}CExWd3U(9*%x+ee*F5PDVXn^ZM`~^X@G0&Wf2)ySQiWYtQW+x8>6OD#zoW z30bsc!k4EW9kA%&17GZGlWiZmx9QR7Rw>)&SqnF2J?xlwe)5!|FO0iyU5g(c`SFW2 z{bm>6@?O}s(JQ}xeD%FYMuqmS|I3r@`ahRo9yl^;TGEF{Cyo1T=KC|QH(cg_Y2h8V z^r2&;=GVJ+c;Qpy9~uo!{Jn)ZKmkpZZ+y2K3uVgqLgb3y9~t5IO6uqRgPPuVY0eLeyZYVt z!~4F!_UZX~+|!@rZhFLbxSvnjzSxA)kmu`#U-|so=T1aAcJw$fX3|pUs?WPSs?OHy z9q;&1;gmyXrY`SQz5CP?rkN$%8p|`9UM+jp9qnzu)oQ#l{lW2lGiqMDQkr8L-0}Uc zjlXGgcZ*J@>PZKNjcxkoLoe-)yF2{ew$%eZ95duu)A2K__bh&?!P2MmruWjuU$*Jkdj+*y`4=zTfiOnz2`ZUOVb+yYIGt zKdO6B&U1Aln{AmHR;zc;CHXeL?h7Tl;oAkiUET-0QFId;abk zf4;eE#-nevJpAdw+vdI3?e#D3ZT<7hlVby)iGTQ|9Z$rK?3sIEW1amAoNGI^9dWVX z$v;0B{>m5Ai&CaO`%T(AZJO`;xc17z&M$-pz4iDKXX>cVhaUUvww=``KahGp^u^O& zpZ3ds`^81SCa0FJc;eR$P5pO;{Qj%+`1%7crS<;$?pIFktG24uw^Ltl_rXWqZf|$% z8(Sh4zIs>Q=)U%QHavOt&Fu@0t}1%V9J1f%Z13k!TP_~lIjv^w)3@Atq0hwJZ-4L6 zXVr=waeWs5e8gw>(qo@I9MfQS>-@D%l6y|;KjLMdlXJa>-}ThxOnZ$hZ7pLyez4J@ zX9nyEI$<9C?SwkLzI$9(o&BOEVV3pyVE0H0se`Bnq^}uovkzNa)P2ZX z03aCvybb_H0Dy4-APN8s1prk5z-0h10RT({05JgII{@$%0I&jp*8sqS0H6T?s0je_ z06-Q1CWgpf><$2LQ|fU>X3J1OVO#0LuWt9ROe~0JsJK#sh#70I(MTcmsg>0N`B!&=LS_ z1OSf#0Dl1R6aaVw06Yu;o&*350l;bia1H?c1OUzhfG+^RNC0ph01N~Gg8;yf0N@k= zI1K=P2LP`CfFJ-c2LSj10AB#`IRMB70K);mJ^&B`0ImRl69AwG09XnDx&r_m06YW$ z&H#Yw0KfzQ8Up}X|FZzVZvY@20Av7wQUK5q0DJ=gIst$K0N_mk5C;I-0)R09;5YzS z3;>=60Gk0o7y!5*0MrKn>jA*+0N^M9xCH>)0N_pl5CH%N0Dxlv zpe_Jd2LNmUAPE4>1pwUuKx+W7008s_0J{Oes{o(|0GI&)4g-LB0N_gi@G<~+1_0~; z06hUf9RRQv09*tB9{_;q0N_~w&;|h11^_PrfX4yAC;;#o0GJE_LIJ?10N_OckO}~P z1pvDMfD-_`1OV;^0M!7%Q~>Z10Js$ZECc|f0l)?TupIyt0f7Ag;CTRW5CFsifC~WN zTL7>M0Q3O>J^Gywqp0l-NBa2Eiu0{{yEXaoTE0D!>&pcep01ONd5AQ%92 z0RSxkz%~Hz3jp{K02~1TnE>Ex0B{cg*a-ku0Dv?Aa0meW0RT1ufDQm45&#qcfaU-o z8UQQ;0A2v#V*v0o0LTXbGXcOF05A*y3;_TGkZQ=o$Tvs?ayRk@(h>O*i9}vQmLfBd zKalT`E67yj7}5!O5_t)^6A>fhDWn5359xtekxj^9rmFOh-;5 z@kj#FADM={gVaMlL3$&BNE_rA-CFOhUdzZbN=VQjlO|6fy{DiUc5!Be56&12F&!FaQ=~ z0DOf3a25mLT?~L{F#rZ(09?QTcp3wsIR?O041n)20H$LA%*OzT#sCgRJ z-WUM2F#uk}0N8>7a2x|*5eC3141j|e0BtY;_F@3E!T_*h06dHVFc|}290tG-7yxT9 z0B*qm7>xn28Ur8{1K>#vfCLPHC=7t37yvUd02*QdEW`j9iUCj$1K=qPfKM?1Zp8rj z2?JmV2EgqY097#ns$c-Thym~h2EaKCfR8W$)?)w+!vMGg1K=49fMN`QW0O*eaa0vsTD+a*(7yvyn06xJ0@WlW~ z!vHA700_qbcn$+#2L`|-41muu0A^zVbi@ESgaObC1K05%MOl^6hh zF#xV$01UkS0dNEZ;1~u#0}Oz*7y#2S0DLe2hGPI^VgR(o z0C*4sU;qYy83UjW2EbelfJZR^=3xLhFaXYA0NjNE@EZm|7Yu+0FaWw?08GIE_!0x) zBnH4k7y!W-0JAUvW?%r^hXK$U10Ws)AOiy+00ThB0I*;HJc0pm7z1Dl2EYOgfJ6*{ zbPNDH20(WVfHy{s+A?$M)RGq_O*)hN*kcz0&z&p%eB8KiKTJzI9JhJ%)3Ff|32YkP z?~EDq_2YNkF}U9K>&Hia_uVJIzWZ*U7bi|U9kqS?{Fk46vPtB&ZH}iNfBbUD<;%aH zdfyYr)uroEDtb+SRVYPFYUXE(C9ZauVZ z{rVl+G-z=5N2#gbpLy`X)mN4+d+5S|0k8Mz+<9W+x8HvH^~WDS{#dJ41MWL^?1xA0 zymQ2JGiTQMbKt-?$L`*}PS$Sg-+s$a2n&mBa`o!5!Vf>3^4miXt@+~NhxhgT>Z{xX zUwkp<{!X1T4)^Z8a9>Q!^Uk3|SM}btD`#9$(Py{+{`=-%R;=h(_sW&uHuvZ;>$^UE zUL0gLk6xIQQ)^dL)SRG8mrnS3d;83oHtl+GZ0s^?moAgnzw%1+V{gBG{~!JPzk2<( z*M{dsMoynIf4=W~&pgv@!^wRP;6?(^rLXOC>yFe-n|nn_s)4}LJJY18fzK|x{l zOG|&L`NtpEChXbsWz5c`0(ND@2FR={i4jwUza5$eYh}MMpnV-On#ze0aF+=V8Ov4&S)(mDIg^KmUE@${ufy z9J%T3r=RZh`l3bc?)mxWz50|X=h`1X{!NdD+XCMzRs6gzT)0@fX3d7LB_?i+zw54t zFAW*;e$l#hDNoOub<5EF{1;j@YIN_qrAu3M{PD*FbxKN3ef<3McbIF{YW7)5%AWKC z2i|ean^*O%g$vDVUV5obv&N0PJoD63Az>#@Y>WQ->!(Tw4O%g|Wy{z{j~sbp{@l4$ zN|rBwV7kLm?Uum6`Uk)KGP&Kazkau+d-s`Z$Br#3uvng4^}-9SCbenvU@eoW@mH%> zJ$5xN?(J_s`)tC!b?df9ZN)vw@A7HH_BDS#L43maMDm%=rvV@N-}jYl0T?9!;|##K z05D1c#q z82tdoZvbN!z<3d0j0PCB0LC1EaROlY0F3JZV;R7h3^1AljQat`s{ms-z?cp&d;vx` zfYB6Sqyvnz0Am!um;^9B02ti?Mi{{O1z=nQ7+(U682}>`VB7{U3IN6z0OKxzF&kjC z1sKl)j6ne7D!_OYU=#w3^#G$i!1xtlp-pfUyf;YzG+U0Y)!?5d<*O z07fyu=nXK20gR3SV+g>=1Q^c&jIRL3a)99k8215;#Q-A_U<3jT6TmnMFs1^GM*zkS zfUy8z_yLTI0HYzm*a$El1{m)Hj1+)z3&3~*VB8BZS^$g#0OJ(ExC3A`0~mV%#ybF` zD!?!Uj5YwH3&02g7~25GQvhQHz=#DHZvc!c0OJ9GQ4L_!2N=l!<2!&c6JQhpj3)s` zD}eDJz-SCG9s?L}1B?j(qcy-71puZ3fJp%0F#vE50E`0wX#ij|0EhqpUI1VW0JsAH zTn7N(0f2V_z(fGB9RNHD0JZ^u#{s})0Pp|+=mh|Z0l<3zU>^W@6##qy0G0rN*8xBV z0LTLXp8|ku0N_~wa2o))8vt|z02cwk4gk;r0K5SJegOby0l*^wU_St82LM6@mSfPnyDHvo|J{~G`Z0{~Y6z=r_f zApr0&0Qd?3d;tJD0f62BAO-*o1pvDMKoJ1=9RREV09OD&4*<{y0GI(l4giP(0G9xO zHvpIh0Ac|^7Xa`I0C*b!^alX10f0yVFdqOs0{~tI0389qp8#M309XS64g!Ft03Zke zlmdW10KgsquoD0T0Dw9G;4}b;2LR6j04o5v1psUU0B-?+CjdZK0FVR#h68|l03Z_p zBmjUJ0H7ZL_#OZ>0|2)JfCT_xHUQ`e0L}n_wE!R(02~GYBLF}R0B{HZ90dUF0l>Wg zpgI7E1^|8lARGV;0{|NVz+M2b5&(<@08ayeMF8Mu05Amr90ve}0N@?~FaZGM0)TG- zKwkjx4geSr00skq^8nx^07wP^j{<=A0f0XMSPuXu1AyiLpe+Ds2msy$0ILDO769M` z06zc#I{;_`0PY6>p926L0L%dZ=>Xt90PqO_7!3ey0H6v0xBviZ0)RvSa2Eg=0sz(l zfLQ<_9{@B0080VDj{u+q06Y%>Y5{;00B`^R%mV-m0l-TDpfLb=3ILn{0AB-uK>(m7 z05}2w<^q7_0KfqN0s+960N_^u&>aAb1ppQR@B#p60{~0_U=;v}0|1`^fVu#HaN&8v zg%-kvw-7FDLAdZ;!i6srF1$>*@EGC32*QOE2^a1lT@E5{`x8H`tfNx2vMBwQFnxUeVT!o7qG=Mye`hH&8o!i56}7k)vw&`!AUDB;3wgbOzk zE}TcWa46xzb%YClBwRR~aA6qX!mkMz9wA&-!n+9*!i6spF3clb_&wpmNrVe)6E1v~aN#n-h0O>THXvMBjc}n(xNr^O!p4LP&k`=o zB3$?a;lgQz3%v*zW)LnMO}Ow1;ldt-3vVS{_#5HEp9vSXBV0I&aN%==3%3w1>`l1v zGs1t*6E3`saNz@l3tuE$*pG1GBEp5GgbOziE({@Dc${!y z8sWlM2p6s*T=+WS!fu2M-ymH0D&fMugbSY}T)2R6;ah|YeFztxCS16aaN*O03;Pf* z{GD*&3c`hp2^a1rT=+2I!q$WflL;4&AY3??aN$#g3u_QA97DM95aGfg!iC=wF8q#g z;c~)-C4>v>5H5U&aA6|h!iIzkdl4?&L%8r9;lg0TgABLaRTvyivLl3`QldPHn-@5pU#rG@zUh(mY?^=AX;`5bQg7~c^Rw6!F@wto7 zT71yr#}HQjh9R*CiS>xjSz-?o+mP6W_=&~uEwLPl`G{X#eCOgjmKcKD2QD!J ziS39lTH-qr;}Bo8#BU^~A~6&3i%Yx!cX@)ukR;9^F$Relh`(FnAmV$MIElo3B+el| zYl#&}3`Jr;;>Q&_y+F6nYasIz+IS&yD$)U;peyu zKg3-ahr2KqcVQLWh40`ld>nUSJ=}#OaTorIyYNNag;BT*U&dV+$?AU!cVP(b!c({l zx8W|VfxGZD?!qy+3$NiWJc+yTOWcJQaThMeUHB00!i~5K8{#f}A9vxyxCwfE>MJKTkX za2GDbUAPN(VG!;@Kiq{ga2FQiF0|q8Oh`aDr+=V-E z7k0v3NQ`sPIoyR$;x6okyKp7$!sECL@4#KS2zTK!+=b8MF5Hc~a2oEy(YOnrz+Lz* z?!s)`h1+o#w!&Sw7k6P7+=Wwc7yg91uo3RUiMR_p<1QSGyRa|r!p67@58*DXj=OL? z?!upO7Y@f=n2Njbcie?<;x2p}cj4=}3-7^QsN*hdkGrr3?!pIf7k-Mna4zn`B;192 za2FoMUAP)|;bq)~RdE+K$6fdV?!tMv3s2xK48>je4er9(xC_&87iQot498t~7I)!R z+=cyd7v74ya4PP?CAbTl;x7CFcj5QA3uoaj%)wo_0C!<`+=ZKP7p}lvxF2_+FYZDw z+=aDq7rutOFdlc|CESHYxC@`gT{skXVGG=a>u?u##9deicj3pl3(dF-Kf_&^j=Ru- zyYMaCg==sZHp5-`4DP}(+=bD&3rleqPR3pMDDJ}fxC={g7f#1rcnj{rgSZRZ;V#^Q zyKpV;!UEidt8f=i!d+MkcOfzQx3A(Z{1$iNy`s17zhN3kJ`&I2be+#6IaBFxff_~s z%1{4g#_u!UP+6i;9^^`II4|DK{||`2tAC-cG|ZJoy!jX7`j`6Lt4Cancy#l6YMT1n ztKkz;rRu!FhEvyF79J6yU7+k4&KYHsjla9f{IyMyznvO>m+Dd?zsoAuL_~}lJ%&8T z;xumCV*r1;tcOLeiKtk{U*keADwIX5>mnl#OfeN=d~Mb%eWq3l6bUy)~7oN?X$ z3T4K1M=6Wtn<#B38-I7!{dafUCL71{_jhBZug2WAC#wZ8`N8F7W+@Ak2jW$BT?bXh z??P{sF&Ot6zWHU0i<9oZha1VUzaPgVGczgX4+2(sMe(CohuI*e$)9$4{NmXA4c*lB~{4^6| zF)xNBe;D~x>txVbGKd#@mj0-$u(*|{Iu%r{nCy8 z(3cLrnyGe;beRq5zlfy9d4JB+XZqXJ*IV1`U#zN+{;uLTl=t&lE%iwo=c}pXUn$$9+Az+09(&&7738M{p(jB+ z=b-ihM~!-`YO#F3WHO(C1+IGOryS4X(@@G7W94Jdd%Sx4X}y`p>O3hcYWpAcEAmOG z66@`{rL^X!eAcuI`B!Z~>!mCEFKipcwxq!XVuo-Y{DKl*=nzrm2b*O%P%Xc8AQ#{BQq$$TvO%?BjbRrrWbJl6$1_2mbi$=-*3OOj_Ewtn{8)siWk9MWxy^ z_>Z8D$(g|^8TPEi@q7SLHF(o!RV{}@^gv8WQj3SF+WN1xqtUWqOYwl4RH#Q?VF^NL$4=EiuCDWcf zAueS?vL%xNBuh?f?qyBMOdQFPx;Q3fWJY4flpcxW$7gD3RYYS`633@JnmjNjCDU%B zIxUCZT9Zd+jTw`i;qK-AQ3Lu$#hBZ-O&UL5_WAhu9`Tv!$)i$6r;Lg>CXd-_PI;z1 zDc(LMJvlxlb#z*MQgWs}BW+6jcq-Sl`?Lwk6Ec&@4el{uP~W)dK2hyDL>P76qK%?Y zX`?hPR2!XepFiO=#(+cYQhhCPLr3kB_Ao5w7)B#MEZVx%BX)8>1jS4$xF)j2UWf` z=}zoRE9IWHo7j1N*aLRTC5`7Zo4llUK1;}py|9$eO7fm|nb>r0n@!Ss*j@X%M$!&^ zPLr3k7oV%-J#8;fyG-o0&3qyj80j(cR`QZw;uA+cA&h6vCy9J9X{?$4Njd2@?5zdl zJ?$*9zp7zVt)^Vk+I$Mhd)iWBZ#5jiA8e#t(rbh0CwWPO218`*q9W2ge1_8hVp4e@ zNgyw&yg!8TEF_h83&~3=?*)>VRBZT_Jb7E_WlTXefGuzHuo%*^BoBgan(qk`1Tt}U`MQt!)k+H(KpLCZsz phb^DHeEM?cWV%sqTamD0>}ne)fWaeqDAmeXMg9uH_3pt z_3O5~-&S9~d(XM&oO|!N=iYnnxo=*oZ{NtW7-I&cREn`qKzi&v{}Y*xvGJGg7|)){ z-8ZFE=iWD^W<^t22!;a7Lv<}eeVyMQkc4GEAr$coO@6_7%OasA(BLaAEX=cNO)uE_ zqqzQGdyk}3{^2{0WTX7-%Y8=~;G`G(j_NqS@5q-pec#bs4)-3-<8Tk>KlJOw(JYi7 z`gN}cuQ{TYck}YXrur4s$GIc&R58|YUpAATm{ObOjj+pveEs-~*d?F{Dz#=Jpqb-s z7`BS*8O!FRG056LAY{vdfsu(LD2QmmS24l03G9EjIQ5sYQ`4~N5QlGOkgn-(qy-+w*51iYi z76`Rhj+F7(xty;#070@53nGymY?}Gl8M`5~{CURr@~5%UbZI*`gIH1c&ee>aj7$)_ zy3CF5ccoG%Bl?rwVppGe_`C2FkQbvTN;(cmbEC&~QVC^~q95p_yiQtPVsvLoxAHY? zi=nb%knWJF3{c z-KpM~aRnI49T+CJj1a9ng$<;ST%!aqG9{13uY)C`Q;*Qb!Z7mDLD>QOhL@D7z)lTJ zJ`{aTr+gLCa(8lzs>F??#Kyg%yi<%V72GjsD8Zvhga6UWA$RdYo1p%}%l?WqBITMIMNOf`!ojQ&3?w z`PVR+^23i)sWH}fGg*aLeJU%O(naQ=(OIZgc7UqA1Cxs#r?9G&UB6GI(DBdE2RfF< zr)5$WJ4hWV$rw##MeL(JmS*%QO-Ic&YRyP)e5npIU`rOIWmA@qFHY-_%q}}bT)CU3 zvK6yociiQ@t2;e!d_B|T2`>4x{HiFwgG~}KD*p^4xaA)Cmm)2Y(M5py_YQvxs-wPr zk-7m6mi479e-DQ&%6n3lyMd%E5-;}d$Tcw9SHc})YU&E2#oX9K(G%s{&xg$v3+5Yx zAGe*B;=buv^Y6|8`Eis)*@;CLGro!$6XS$O6WD#myQwT2y9vaMvms8BcHre8^%%#4 zLcR7FbAZuQ_82EaR+7~S$1KLITV9K&+GOEizBmZBPs=^+K*H~*|!<5BA91M1@7;UhYLj|{W zg(!pTmV3!8HcTH8gH$Lf>!WQYq%0FqE!7iE>DT^*CeM4<9qG9f7K`!;bqb8fzJ$h~ znYHEUjPH@E72Od@jQWbvOLcL|GKniyRV>D;%qa^)Nlu6}2|sZm$K%z4T7H?zRRXG+ z#jYViECXIvWfsf2#O)X|9<+Wr3T0zmi6&nfs0UnSk#=PUmPR}4c>@rklwDr!P4F(+vhD|)4(vc!mJ`bl9s$dR{ZsXl+JF8My~ z2WoevT6dhX4gM&3fwBl?xXpi3$%a{pcA+<10X#$cwkx^f{4HRV|GL1;}$)}33W;$ymYq7RL&yX|29vxP8!U@8&Op1P} zi=4+@hbZq8<(`y95Bt%wS}MD(9&P6~llzviN%;1qx_qqbcq5V-#%_mirU4EA0yJf} z+PYDBnv24)+Bl!o%EONWQTM-ToAG+`_Amej(!!~pmHz7Gz%0dLA8(z zd-;GSk8eP;a1H7nLJvY)<$Kt}Og*rET9`SbO!-F)=breTx+*dwtdfHJW z#&0bav-*K%i}8Xscf4S?7%Q^6D)vjo@=XQqimp(ed_zuFm%Abnf-`)+Bzd&#fQSQ% zR!ULy9xJ<-usp2GY(PCcLR^lgCjfN71zjlY6ecXaB{NxGY_xy54q39J1>3~~Jl3Q**Os;6Fw zy6Dj$@7XtfJ8-7FfQ<*W(H9lgYav-F6u4(f&!;Y3I0ChC=+6&?4J zp51ij%4!Xq<+V8BJr=B9z#3YzDT@hhA`qZc2JO*d60aIDJqUw1ts-^$1ISZvcK9uz zSbYVsd`?`loPcPH3;-9nl!g-R z&Jin)hKALhO8Zlt2fpJ`_YiI8cqRGe%(@qSO5NwNfT`~>w_z?zl=YdmIOVRYvO`XJ zk7NCaU-C6O*1vP3&mVEh2jr2mK1Y1Mt}4IRw8gRhWRu_LiW#g``Mc4MK|TY^=frBP z1rd)JE&N9-3^sM-`jn|Mk*#A=No8%eo=HBB_3O@pB+VMt_Df(?{C|39e2+zQsve?B#DsOUl4J0lKhta&QU+nsg zAm;ar7?nW5gK$~*wcU~1WaCdE9qrBOF>VHuMGYAL1(@Shw;+{Adxak3{U~CJ!MG1t zC#3qFmk&5A1|r40CF{_?2@l@FU3~dQkDnNnHalKpx%t}rTSzA zZw>5}rB&sXjPfoO`f9>A7#5F2Kpz3Ha9@-3*#iOKEo%o!0b3+Y99u| zr+Ns8Q+C-L(KF*$aGYXMQW-Re=12ds7#dC*FiDzF~53P8m5AGSeL z)7Q_((s0X%z<9#Sz;JaX$GPM#F)Q&d8oz3%Yh`zvm^IMI`@h{r9@@rFAJS);o$|Xq zl!t05*HdEQ+85}4(m)EwK$kDxp}J=i17!k zKZLfJ!c)+b&mFJ*8TI7zybJa!Kl;MERi<@}qx%RI%^d#~;U0nGhY4S5gQ?1o9)|VT zEwyv}w}jVvIQ}N#D{49ZJHmsQA*Ia~O+xYWah5twCF)WTqaT`0Uq1ou5!5|w-3SCL z1^A=ZHsDw#z?*Sf%1Qu{thLowJ`ZHA)q@cRaEN2TMKs|7T&)Pe*%QFoh*f65${NAS zO21EnDgk{bIWnk*)nR(OmvlU`hv&O_-pBJ^vh+)oy-3+U${-I>XGQO-E41|?_r^Us zF{@9^IuK3hiCw^j3SM>~{G;hTy8GSvrga5oSM+%#8h$J04a3)V-uu`UCF*toqHdtc z383nxiVkG~S9hAJMBRF(5_Kykjj9upAK(r_EZeK>JwXxO&Nl2Z9&e9%5*34g8ucfe z=$`%;@h5`#g+KY9_>)1tPY1a_8RY(CkX(tHkv|zEe=^AZ$sqS9gWR7Ca(^;Nu0+jf z!3~l>8RY(Cko%KC?oS4}KN%!fqGsez2FafcerA92S@j(ACx^%U$>9utayY}E9M143 zz(@Vb;S7IrIK!VD{(s<42Dv|h;o(~_Rg{xI86iorkbPX@U^865K` zhyPdo$y())`xCc3xac;xl%;gq%f_nWQI|sJzbm#<5#;8NWd%_a`SrKQKg!NsEc{D_zI4aI1C5eac?+=9GKoU8!AH zzChI+xKzbVu4s?JQE@7?)h(-CiSkoc5rf3*Jq(w^q^gcn()k;4SUDdMqFn}rSXsCf znjIA{hK_?ef5Z{5O1V+39KvEWi2xQCbl{L2Cq{eBh+HqC`lyyPW8oZ1q&Z?#(M5hs ztTL5`1A)v>2f{OWZ-xVb=EwU7P6U2`9eX#7H{}j`^(3E(p3Yqb%R5xd-;1}nu>1l5 zytl?ZF14r8=`EyFNWVwQhDzg+CLv{>iF9Ymd}Q3CG9OtVJBvd1gmHL#S%iDi z*^+84^^TL8dW^0-cxPBGJMcUX1tR)wRm$<7!Jb zyih?)yhd5y`f@0eFg@5McYzMueerw_EO38?I>?7LxJ`q*HTb#)4f9mFVhvuUK~aOt zG}x-aZ))%{4er$7s~Sw3_9LxaRH;&CYq0gYw1f(4HTvBeT&qEk2Gi0HYvso^_^bxo zHF#QsMOyn;X>g-Pe?^1o_DUu*itSk%;$0grx19si@_#94cc}dgX)xU`T+LYz(bM0q zki)a%6mBa+nyS{I!|ku}=Hd}oo!Z;CS1a7Q!{zCk8HQzxpp0jEAHHNXJXVF|-uJ&vims@cb&BNt_ zXVbV`>shoKF4qPcy^>UQso`=PK^xJ5rhQjLYnRrrTDIPE&pi#xC>l**bbK5ksov^x z@b-GoqTSBh>(8XssB!~m(Nwu1&^F@bTxqNN{Zab+t5Tm@rth#kq^oM%SwLA0XhmZ* zXv)|M&@$$Lf4M|H^#dA%T9>8~gTi#tIMwEv4I{`wOjEin9Ois&Ko*)k@{WBvzU-8&trUT zQ9IuR{=JN{4nIF+&(eF&!)GLUvvXM9!t5Z`rCY!+A$Jn#`&gIs^a9Vrd)#eWc_Q#j zkkWiAUz)+!8Fb8W1HLM_Fq>UC+r%ymO&Tf+POPQ&N$#|Vq(V4QD z31cx~ET&ocL0ztnhwCM$2{nYa#&FVQq4G4gg%O9nS(lu5w_dGEV@pXO)}gb*(`5Xc2Ly^y395* z(}esW*@E<^hoBChP2?^#4rO`twkbwEGdAQg`^EUqkJ2@aO<|b$b7|LRvuocgV3$kz zpX4zt9Prp5V6BG z*E)&KUU5Fl#blcC?T#NwAsut|EO)@n%SS-_i0iHLGs~HE{+vE*FK6sIq_32Z^-1MY z{5}>W8uUK5oLT3BOAM?)cO^3h4K^C5fC-o~C5KHpHjPbqxrF66OlO9gFR`uD7<&<^ zpW4jPvz+&?;N`_|p2xI)h(A`wibop^ZJbpX`ec?ru{LQ6fkf?^9c|&!py>-5@zo|61rhz%C7TxUfhNZfYrdWYMx&glp-PD`~ZGP#@D!mL~{oMDIwP6sKyo=ZjJat zYdpSCV<6O0=dZ^oA{a)9P5ET&t_C(WgIcp9kW9PGa=oqM{eLKo& zQAx#&i?CamcXeGj>}LoJys?a7>>~bS@z(VOI;9cBfTH^vtIOw~d?GN>+)#*=#x!*cJE_ zI5otnGjW_u=bW`{E$58YnDZ$$W_@an@=vKjedDzw3$qZbXIC=^t740wnTwU8On0It z{9i>Y_HHGcc~%=LeIBQirt0%)?P%=38DKUuHWP{NL$d+fk=EkILhvEP^T$Ac7*Ieg zUIIG7TM>)51E>Fq`8Lv1z#jyp?_vgk6Z~EgV>^LA3|MGE%n95K_$<;8aDqpXsIP)a zj7>u#yacdL!y5oMX*j_rHGC`JfQA!%Q^Q99J$PeYgxHi|8xpli@IehHco>QF90FWf ztm4gph6@-w0Xo4FB$6R`tA;lK{>O!&U4s}6kiPLP2Tl<0hd6!};6B_hoSl?1XTtSB5I)u4fffM{bQUZ8pjGGx3Q+(WuWSFbMA>hTp z2@2RuCBO-Kk?g>OfDMRgJ-{hGA4Z}%Istei?s>JK)Awx4kd|V+c+bY~rnSK7{RiD8 z1;`VmdnVxo>5fP^LAuX90i0klQUU6I@q88w=nzxDW-$BN(sbwG*Md*bakXV4c(t|g z1%T&1^R91gX%<%bLSclXmD6UH+NKFUe|?~#$-lgE+Txl8Gs>q4xViWn>Y5R3R8Cvt z3s1ZLy28A96cM#7YhELu0e`r1S|sGZHeA2L*HRar(b7~O3WNiV(v12*%e8gkmeN%- zrwIry-@@oZVT<@6Tz RNX{eXM?UW+`dc3Ve*=9Pdk+8r literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_arc2.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_arc2.pyd new file mode 100644 index 0000000000000000000000000000000000000000..fe06fada99cba2c15965789f5c7d64f8d39119ed GIT binary patch literal 16384 zcmeHu4Rljgw*N_!lt5AvvBjz=(FCaz!9sZs3Jke$12>seD-?88Y-<~!W53LYf*(&b zpoB9~beM7U&2vQFQ)iy?)bUa9<4w|_OGDOv@P4A*IudZ3m^xDenLCZ5YL4C%!tC zy)t_DMcXyr-4|7^tPfaQ{LL%;HI3HVnx>}ap!F7?)gNlI);C$@YZhA@o9ld23kt^A zGpZNteBhzHe@z|8CgXv-4&)>M`n&!68R+=8`}b=^dH;dQ0{>+HXhDzdA0z01D1YMp zzWrL{pLjo&q30Y>^G8K~QGM-7Y9l+M3KwH_59G7p^A}ZTu|amR)tEPS0=pQ9RYfb$ z2DJ!&jfM#Ed5q-?C`Z{w0Axg5d8`_uYNags)4Yl}-Aq*N2CbvYEsT*}Sv6yu(9WrA z4P#`pr{*y>;S~6%Obq2SMMpDbPR0x)^Ji3<8uYCS0-I^YjkQf3_B#AQWqruyq@ zf;Ehp#sYF8k?bW%BQ7UOrmA90Ap=z)lAXnhGqzojo2C_YWF)xBQ7Un^G0O9VPSz?;#$7kJaMhChcc_+ zv$>3Y9vY{_1@@6=UjkQ%*^ZF#Jyc#fDCmZ6@$>) zSn!(EJEM{N5}uX12#^vjT3Sk^U?E7#Vq$DvEOcpVnJR8Y`6X)kIaGesNJ_WMsw+9C zr#Ez^_Kbq7txbpKriu;F#Cw(SOct~&Jeit^A~ag;wDmi!bSsmWif@{WG_{C&^}Y)ciR=%!YbXI*a0OhwF~!+WM{I=budP$bHfR)e+2c{ z#qKRoD?3r8v?`HKM)a#tkh&4W6qd1u=j2nTL*Gs{V`Ql@G=2>%5uWtve5l7TjA)>2 zk8{I2sY&3D_n!Y@_^>8*8>D$+;AyoJ3#k(ucPV_Q61m>$jW~-V6__8zu=F}HK~6Ux zWn*6j(IuPpjv4rBL@DvR}hiYB7-(&1sv9WI6IO&GlbGmmMP?WHt31BL0LpG`$e zy%8XAgtadNh1K}`Fj?xUKc>^UqQ^!S6~Oz{qHtOhx&lh4qgvS>s`eR7uBDQbRCtr;2}y@bY%PS0M*E zKDidfwHsdd zU+QCYMF|`1so$wg9@;8Nh?t*x7NHo1zR0I=-6X_FbW1S|=#^Q?U}0HoP%*z$un7u7 zpH3&=EJ?hm85pAVKzeUg>yAooLp&NdH?wu&d;g$iZKE+fe(m;Dn&|aKbpeHDp zVU6~ub|MHOvWyRZr3sxSLWjb4E4)2jbo5dhZ>9Zu?zLBBR&E1@?;sY5SiV$OjP+ij zBxr%LHz79Dfck$0EY+!M*QZ*AC`^j0Wf9LMOR+}j{kEC zVoLN!y(q-ifMA#|noT9pGdmjuqkW=O2;~AH)_OEW1r!QNCoxvqXS71~x+E1MSXL;J zb<_-XM1a!vpHQn}uUNO9`kA0Tm#a0fmsewluPB1|f>x5tri;wbiGu*`GSEkxNz^rH z-UNflc7+;!a+2Db6LAYHyw3#78={If^((QB|J2?MTPgSLB$F+JL!UYmS(WGo4^sGz z&RGL8Fb=0JG5|uLlZFyb7?fEb`TNzCO6yad2eIN&_eQmDzjP;h|caZ zc0!MipMd2JkxIKMRH1|memDaLn>08#ZJra$*RWv8oa+2M7QARq3z75mFP=DwGnMrY zyRt)r&6n(}n770Og)-N-U`LR`NsS!Q`@jy20(KLa?TGPAjw)Zx8oc`w>ML58Ih|T` z*|yEE9v&SS<1)rk_fk}8*Qd+W&0UTZoCIrr_2WWw_z_YRDfk$`Yx&?K7xbxR=Lc3p zVnQGQ^_p%ae#ojAyA_PeN{0vQ#fhsDq3gK*Wk`pUhIajCFy!PB{m;Nj#}n4z%y81$ zu3wKVrWgeefZGnKZrRo=&*}{o3r+UE*?pmcaKQ?cIwKb>1EHBMp~G+xC+YA`OBKaC zvGsu|i(RR&fDpD0f=^xPN%L%E>9ykifVi(0_dVi%MBJYd_vgg@7vlcBxNj5p?YO_F zNuUkvi+^@M*@yi^;YSsILRr@ltX0;17`*vaD<%z-M+@T$Xiy^Luc1-f>7X5&V3ts-SQKtl!f|WrssdEkt6RQ$o;+<=I&JhK z2A7~uSYwj9(WZ+wUyNlQqfP1zSuz@p567ERk7aI-y|UNKKc|5;Ben*=PowkEZi>C| zwqozBxBXYr23zWN{6UGZngd zLm!!eC)iL_J_QOmq7 z504SDd7DQgy+Mlw>>}+OIC0@A+mr9&2fVbx&D-YbrT9UEtW8SsLk48?WNo(;?;}XB?6j=01- zC0mkc(xKZbt3syu!2+2C^H5CMhI(vCIV0SmGO|P7dJ=(=lp0*TFMe>WZ0zYdD)A1{ zHt9#59-c%ypoBXrw+_p^Gk(zQ;$O-}2%@)YS2|@!2l_x&Am`osg%W=Aq!^pT_qoFZ z8uPlT*h(e7%M}geg%8ek=pK^zJ7B1PygPhouI#Ws#JhmVkvh9YcFcarG2zFwqGfoz zi99#nXEe8Z?S;Y+vbj~Zn{bEyV3K&h-Ut(?@}!=AiSHchwj~@%iMLC(ol^Wr0eIuP z4%1d3L*1y${D zEAcp_;1FXeW9*h4H*Gxzkd!2-dr%TbjeX!@cqmB$8|v;Eu(e?vZIW>xjoQ)cGPia~ z@qYM50?H-cHPqwig|Z{IE>v%er{IT&jFPd<5htI4?BN1O+-2;-fckjnP!COnm_OcS z>x>^le>!Ai8;xl{DKoe3g(vhJh5OLFTHqM`gsscP4+0V9k+=35_wppUk7=mK#e3mF zEDonEe1o3kEJ= zksT8r5U32VFv*U>2gm^pxp*d=Ly0z?6b{1MRTmMn2`3?w_v>l(s9q93Y{cL(iiE8_ z{xLkn_&&yJL4K$k11z+~IT>9wXZ)~mkOanH681;S7F!#Z0r>|RJbs8g1P2Q85Ccu1 zFa#zSmdJPtxe40PiG|_fJ?`))Fo265?hsxpB=fd7Mc(Tv@~)KD=L3jI>lfsiw?ThD z!UUW)Wsue{)y0p3T%8XPWRz?D)f(6O1zMEo>4+>WhE-+{Xzi1p%5F-+T=31NB>wkYDsS_GkP zc!e!Fw8yqr+o?6RO4=^W!9HyVR+3qUr}DjUOjV*A65dvwwhIFBfw56j6s5A4++>x~t9!4L)fl;aMVV*>uU-q!}C{ObDY`f1GKnD$=9|J&$qd+JGPW3<2 zYIX!_%Mux)1A5-m_Gl|x4O&{PSiwgNqPjO}6tWSn6%#8JYY(#8xK?g8Y2SogZ@dqS zb-*Ya5gxi!4;1lO$XFz89J;iz_))X5n|D%#NKkR7C@z$+shC?4Qc`21x^-$>G3pPG z+1g01OUM;VqEE?8e=tDo84$u4DI#IZQ1wlavz8 zfBX}gYhyou&mBI7nf?w!3GYs8K;gOvwJ@F%yFf8qq4B8riI5$bJ8ZQR8_Km#YW*z&vMM~yJO5x)F;jL?&b6;28b6e|=pOvEWn z07INWfS|A-BE$k6Alju78S(|P6v1Kvg2mNZml`ZAB3Rs{zAeqg3gTq$#=&==%q1P1 zLKyA-OC`j=n9Q#=Auow-+jPGiaU1w(9-rqhJxXu{8=~%hS7g5Kv{Nh;nY`p+I8b5& z?$>CzCc{ZOG?L{FTrv=Ex*v;LinvV~Et&Y0`E8=ui({@;x{jY)&WFo6PUN4<{ChgQ z1xN1;=GN6FG>0QY{P%j}Fm}hbd5+l|MC^u*wAfsS)s=jbVjg-{k?p@J94AJ9SK<6 zk-Lh^BLN!jZLr}9{ymKM%W`f(=;jHTzioR<8tRd4ZruW{=~0|ExdveZD>6xrOtC9D zkIRo@=NV1@*+b4wYhAlgXM0-+lE3F1IY!b3_`WN$5DSE4)R3l4xcQAZsS3}`1~YhP zbe_%~$>*KrJYc2e+#yA-GtAR^A2T72yZK$k+PB?2V4-CjS3@)+bY!7vYr88F}2Fw?_(jx zmFSYmZpR*J$Gxa&-ZnuJeiiZ91s2O;@rb8HU~M@p5%F9mu$?*V&4{N|U~dU5;+Y|Gc$zXO z(MM@Mk(ZaXXRO!}@oX0Yc)ZD4wkzU!U0}O%*zSlYCa|}2*gFx=PJwmju%3vArpLQ% zPY&A~@$3@VyE*K=h^Jd%zs_OrM?CKetT%`4i+Fkk_CXH&Fyh%Su>CpgK*ZB0u;1jc zk0PEU0>e{Yc5DYDo|M22<*>sM&oO}=$zi{Zcm@UbJAp+!U%0~wt2_La)BJ-5{90EQ z!}|_;4W;n%$x7L-)WU=GT6HHaU;KCOo_l?_TCx@Y4r~;kc~P znuA8nt415{8}w?E-ZC`Paby2Web_iBm7R6Q6Fzk9D=5U+KqBg(F~V>IxtmMPVXWpb9^RH|r{f7c6)yrQvvui5I}wrLEp*!DZ;E zm%Cc{l9jM%b*dLX{g9?oSQ1T$8f6{#&BWO}X5JL%ap17wH`m{Q1)Sei2T_@!O&NNB zhCZ30uV-j?h91dK=2h#Tm!YK@x*$WVGxY8ZeKbR}rh6rm-<6@qGBoe%tb|I(m8;Z{ zq1n>PNY^)BP!k1o3SrdzMRzw+0AAM@O?w- z=x^36`gCaRFZ|E{L;3f=_2cvQ+;&5eJNVt7b>FNj{+DBIeNvP8+OA#pNB+HH^A{~w zK7RLA>ulR!x$|V+6*o>=^4PQ~%f9@{;SG;&)qixUWktiQ9edgD-Z}W>bC=bQ*UY{C zP~_(YGiRLtZ)NEhUsFs~J6_s7_02zgxbV0C`qMkcJY!w?^}X>I$S&D$^a{W`>20}; zgkQ7inn{!PSGWvk%uBWjgQU(|afW8`Q}nOYpHuq(FBwawf7T2w$c4}pJtL|%iRKl7*O zRh)&-*v8B-urZ7BTc|D`uh?G5eU9`-rfw4am~o8#I+H&e{Dnx_a>`E}QLfQxm~I}v zYC12Uoj1eG&hwA&D{MKtn)Fk@mq2#b*^C{9KBA&8i}ETo=6q(xSj-rUdAhMhGg`w& z--2?3jv4&A7HxH2X{O($ngSgwm@%3aOwX?vP3>Kb5AHJh1DMP|B3VmSs>#!^yb9mO5N#Arx3p-A^swEltVpw+<&%uCM?M=fJ-9zv0WShx1pM5I`Rpvs zIZWT8tJaq0RUE+wflrYgX?{w{HP0#~avt)0)0}yE?@H_-d%H6@Fg;)z~84wIQE>b%oEryxHGa(^QL5gfI+0yXa4~ zyX)Ab5oqN~zpthaAo~NMi&qDNzQ)S>MxPYG0DQ!;NgBC6P;(1%LfBh>i@(OddVWnq zLx44F$$jf<8tU)#E%w$2f(q2Jb?B|^yCt+@h0mYu<#%0+u6B9LrcbSFXrO(#s%m~! zpv6~Pzr4P-N;Mv=H3@hiSXUKX-QugNZ(815Rp$!?{mrYZ8las14$$ap4EVq;oxfs?Kv*E&9}4OFQgxzt2afALytL!W50{+d00<$L6xCfNy2haxqC&)TLrp zRomROynY2NTh&zK_czmWUU54}BbV*!hWv1v&3 zJTe1xEz(KwM4!NUx(@tLL9I9oKL?)ZwKxmw7BWWvje7)X2KY^&Nu=w+6Mei8kLlol z3R-}#LcQQEpsyoUgD3hC618O-&)8%n;!8klGJGBAPcl5w7c%^dpuHKM=qDL|5VQh+ zf7k^5MAsmZPNJJKJkbM4)XzT9TXFs-z5!HsE+}}SB}jx5y*9(wfo?qyb^!h&=rN@2 z;ECeBk>D=_-Hr48PT+LrJA`xuJe~c<(%BdM$aC_@Gc=v0A4am{98B~Dq?zD}CXf{H zBhSJk&&hOto{MxhWQZ<9+6bO#0O=|48$lP_(Rc9Gp!b&GIT5_|Quy^Ig z;OYMi@8P^!h;u5Pfi-vzB75pUtB@`Oj`w8jexzQE3-8Lrb6g*IdY+@_uBRYRl%C0m zCrZy##1o}wpUcQ5pfizv0REfnf6D?I@qPhIVa@41pl1}k%lR`n{t-lvh~nMABd1pq zx!H49H8xmp_xS@j*3OwcZEERctFNiHxvsux#hl4YDi=(dIoXP5ou;~)1{`tbOkV8^ zOrHClf-$pe0s&v+Ee)%!P|y^ZGdbjMx++k+($`oMn9^8Z>u(M;FAq+sZEn1(CeS$b z_Gy!?IKI{|$BA$0$ks^JK6uBJ3@&TfYL*D=V)(+JMeF_Y)Q`$Go^C^NG=H6l9vCJ4z^lVXJBr`ZLDk@sc z3=%R$k|<_)6q7g6iy0XmBDB`m*I_ADkGY(>N^Q5SPE~a47S?G1eg1CaBMe~In~jes zWPM}Z0FwXukv0jNAL)>=iLBr9p#BjR=q(RqO4zMVLBAsD$>G6as1MbMJoy+FlB$78 z4w(n2cr938rmmVnFAN!Ht>Aih0cc3#i4+77u7+V6Boh}a4SS!RZ962Ze z3L3*0D4i}2h80@JCCT;3{CYc0Wx z$_*PgQOO3lwAnJHO6(|oMG;$~TUY64idZKfg&8irY*`{(0<+&llT8Fb&nqN(?nEVm zjdT$5NGYUmC?ZKyXoRQFU}@p$VwNGEzKF%-PJs@XIMFH~-m%MNZ$L?MAw9Va8p;WP+>^gpv!z6J@uwf%**5EHeidp23&`;elu9 zj|Ctz)WfipLdihcOoccT>ia9|yP^75x=3lhw<44{cJdY1WmTFGYRPiRXql-NXd0g63$;wDZ|1%&>Ip%})M-y+|XHfVO5OyrF1*3eURI&dHml1~5^yhc0#m&tOz z$>l$)Hg&Gjhv-vON#zvDa8POo&Dd3_**kDKp7If*N_O?TTn-(tgFc{Rv4KiP&aebl zid;;S%Bd1gM~~K>dK3?YmQxiiCE6K&6o^clCKHvcobkJr%7!Klb}gzUO3+ls!)$Q4 zv;1e3sOF7~XEb?CccPtmh!gK%kw|E=t>6R!QA*swQGj#=0nERD<7zM++H<@V3!sKw zE@vzpf&%TjoG}LwIYUg6Wd+*m7+P1%>A2h?4Dr$hu!f|~4rjZ8&twdkZy3DhSrm#l z213kJVS;1_fWiqL1YJ7q6)^A&gi8xy_0bj}nxNf5Af3h&C?q+6VaC8s#8$UKlp_Hmhr$(7pOB;ObSluFIF+3VdD_65Dp=7 z7?%U`BgbnBav%pMYUB*B!O)yIiS_}uoDXXt!h)Y9Q4q$?S6^0W~ zD4(2IwuoY9puif@Xi^9|(;3@D-yw?%mKAQI2;U@GXlh~_h)nIT?O z&&1h)v-yTNy9hrELxu~q_XC3cqbr=wD+2ciA`>7|aJ;P&$Z?2-nV(q#CR(DPGQ_3M zJE3n(q9JDx(9w@fTmgQqfqV{8vH&U|r)21t1Ko(h^2Rf0Hvli`fS^zI5=_CRS}a+E z0#Sp$EK4d-U$VWBiowz5LYz>yfE)>C;aW{J+^I}#!Y*vV>BsfggF?w0J4MbvMrmq8 z>jAZ$t7xk!%Y%H>)LrHUG^EXq3bwjkVUwn8H4w)rWQV8?B!6d!Gf4g$6#7)UmV#Vz zp#(KDmWdRErYszgrshuVWxl|XO@^D2iP|gnnh3Plh*_q}1^NjkGgzaIvdfSJA+vOq zexXQ=$kc%oH*lg<&UiHtjn|vyM+jJ+%9R^|(zlq!LG$6?h}x24T}mnu8-lafPe44_ z_bvfXc1@w3CObolf@78AJglS}cLAbU|MWcCL};8+eh#fQI{hh5z(3?Ew%F`cm2j5P(>`3ILG^0(lF-RazF@DWu22 zgtMRo_F%Y?aRA$6QNG7C3=4InB|)f3;_+bpQ-HwcI}ZWswIoM$HkbJ4>p}(zb>x~# zyaj`c>1Vhmc*Z1CoLUK}2A-jpD9F&ehNqjb+?{TVO^I=O0;eL84&karEfP2tiXa&l zTQ)smSK+Whp-3^2E?{Ac5vBuisR2-p3{%q*g}+zS$V$Pr>?-h!ZgQd9JC=tm-iwVokE6(|)Vod{u~eTGV^PANl`kSsm%^c2($ zGK6Gu^Eabj$-N>a4cS?O_FO{I0`~F%*x@}H5WN7sQOL>}bkGS00kq42J~*`D8T=Od zdT zGX|i^7*1Qr0gwVYXeiPWE!^pes8O*}(fU-(1G(a%ZS6?1?Ogz!+p7PAw#5*@sP7+X z!;oFbE~oF{5k-7CS~36ijUC9H60wiOrp35a>ZW_US8} zshDvr{451FU*un$zOyLSgh$X~U`OCe%P72bnh?+?P2kM{Bp-4-#>>FJIJGS5k8FkB zc?3GOsH^&>7d+RF)#2+FLEC}Qq?9IiQ*7?Mbp5Y@(hFXi(4}jTqICV2z&NJ?czW-8 zMP1L>1d!+z8w=_wm3YxJCaznF!>E|(n8EB@GO|SCOVCb$w6sjCl$Hw!6)B)?2aNl% zgei8AmN83dX+XjhLq#fJ`5;xv>sQTls+O3NnppLt>LvP8{n=2!VHa_a+xUFb;j1R>Zu5WC}&Te$W^o=zt3spLJ4yjPL;7vy~pdEZCg50Ljm z8A7>I2TinIPe2(k|Ogk)WKj;X%1cysRa1bGY#qEdAvTz>b$YvTq zW`i0SV#ahW!2HtPzT(oi6v+dec$^Wfekl>!g#LKm{JFVVM{`9}b#fUinoD0(Bt>Yl zVK-KJ?Gg$`Ipc{r8jGR^<#3>sZdILxNFR3uK_>+5zc6KxbGV9whn%qnv_V*r$DU4- z3RHO}wQ(ol%m7o~O3wEe)MZK)J-L-cVB#AQk{*Fv6K*PSxKm_;(JG&eQGGfi_-SJu%G1inVvcukyI zhdX_vOMPHZ5`c#TZ~~2~?Mcr0!5aCZW&a;gZniZ z0~QVhfP-S{J4J;eA|P6zmh)$U0Sk;gVE#Pa%4gUAxcwg>1^q|tNBex|i{nE+gEHMS zIA7sGp3L#@6iN3jENErYg)Nl>nRg<*6qR+S!>YEVxmiO3=#+L#apE3i*HMVwk#=2x zQl?!buS&aP0av8mNJ#taLO3q@LGjFp*Y2DzJ!Nmu5{HU^m~J!wEZ=&{j&!vDhxryp zMkYh1|E!rM`6%C#xfO`cZtB3vGm_G0ct#evB@ypn#IhaGv|}h77z&yHI**4VGgH{} zkGGFM9S=wL7*0Ge5@`P?;~`(Rq0w|0-3dYVGr@lD6s|9N3y{>Dh=xqi$nzo_UWV~o z;9*gK-cEjE*JfVAQFVq#4nFvcJ8>U(YQyQS-*M`7c*i3%(1$we6I}WQ7u~1l@pI{U z9yw%=24aT_*{MQyb8YD$u}ekoQ6YO($bPQvM0F6^r(zsXAqQ2+VGz**kwYrR5fyS& zg&YSFJrFsjVw_MRCsoL45HSRiQz}ND3OS=f&Vq;uh~%pn1uEp63b_CxrXX^jWWWuE zeGw%u{w0-25s35xkwO)tScR0RkSibp8PVgiig8til&X+&5MhBxnTl~ug4@m@WFz#A% z^mQNt8QSBKO60K$d7?s|g2*TksaG+csgUO?@1?2M;dJS*LBhq`5(P>tC|P=AW=d`lm*Xq1o}%M{&1Y9t#&PF$`UKRvIQ#|Adpd9t$e5*~XwIibH~A|A0)sX!09 zhb*Paiaa({kSbP^Dj+NJ*sICN`XwcHw!c$C{MG(S_9hhF|DydxU_ZIU{+j*dTK|{r zNBQP2*iR?gOOzh*zVAO2!LPZ>Zwas_mXAf7UWc*-@vXIJvr zo5+n>H45b_?7R$qS%tqY1QbZ<6!Ghp+&PLnErXwXD&H;){BJ5(6 zQcTk|(3>JdRZ1a>dBinXu8=PZ-HRGcU5Vr*GFcGCu2l7OF03{_EMQzmpvD3o@py(Z8=Zl^F_pEaq$Q>(ru<`6ovcJxC%rojjP5><5EEoz4P)Q zxDGgR&7HU{gNKyPzZjz5UHOD5obb5P=j{Tbocj?|hnErJE?k16@w67B6`j~jS%2VUl9lk5Rwb_x zUj9NQ41a$p#L~powh3E>CiR z_tk~;^+iMx@WF@wJ+J8svrUxHS_xg1aF!A-Qo>9n+@*wtO88I-Ta-{^hC(k>2}dd6 zWF=HNORS_^?Qjtn3S zvLjqpm>nyMV>}oz-uA-7p-1BbW(4^BVd2xT{-Do+pv|~1$3zx9$MKp^p5ehk_A@F@ z-IQ^Sj;4C&u4@!6GBSB{a?ttc;F(wU5B_LtD(>ns8@Ypu>q-ZieJEPU41KH08eSIxIXG=2=+rZ>AK?aslf zoJn)ShI&_9?xkux*V{b zQ=C0xRxoQ$_oLBY!cKcdxHO$`>=NBP*{RQ@uaQGG*lgwTlPa_?%sQ%G`TXLyaqP#M zmh{k*({7nkuJ{dKEqzuY-{p)kQm$1b49RV?>@f@lcmyT$g|6+8QUX>Z)U zs=cpW?USyOQH;5P-^$E9haQ|vJN)^yvt7XQe&6tgwqDDgU@qr;XJqHBjJ{?)WqFU< z@NQ2&?sL6YB41*8)o%8oSf7b2j}4d}Ugmf0+{~Twr#VI~`J0_T8|F*RzeSw>I&S)% z+g1rLytWAW?Rn?jS5;+h`ZE4uYu!`!lIy#;UA_d#uD-bW@EY?iMG!aqd-jsbQ+?hI z_IOykVe^{IBlA`rewA}?-Q3FCt19a)N{brbPa=N^zX5Zne&>j9=X>l78O=2t7#hLUl7cvhwq<%gf_rcY>^wzN7zUUpj z(UgOv^tKHIjO|Z{_Jx`{lyA{PR z^_o_*WBOv|m9xLA&8PO4?!UDmD6DqDhdglkq~PmbGfvT!mssV8Hg*Ot+~^!qQa zZaqJLSHNIQ+>mrSxV^80?%^O=8HTr|hg~54wpU%FtrrFCwrlee%X?4siclGxD zSz`;d=4hrwHO4F$&l`C1w6(URwqO75myhj}`XC zl}FJ`i?}&#ZkLl}qwy5{%)gnKMsrC4vcjScl^eDdtQi}`b z+K)bUS#$g2(EW>I$J(u@jQ!9z>G$79K6+R@!hZhgvb}YGY#9^C+iw7W)8SHD*331N z?8>^~@NB5TE2rFZiFURpT}&Ma?+IFGBPUYzU866l>wL^K&L5JnXcJ3xEMdyg^rPqd z#BF(OsI{)f_y}>*di1SFeMH7Sta)>-8|r#^F29ky-E^$qf^HAB#OJqcKfiME_{NdV z3z|RoPN@xb(+;jG3AsGhwAWj$8iw5nC%3w;1;s2kk6hzPmxY7Wu=SpGFGhCBZhtb* zXM(Na{kTI(Te}Q2>?7eodheH6_tRYxEy_YC$CnGN=bSX$R$#!m8vp9q zdaqGmcfYsNG21(#t<`yO@8R_Rj@<(iio0v=?Ke5kxJz5q_rp!0eM(+!;GcgkEnB$f z_K2m&E#)gIZ|m$zc2A4Am}2fHRNH(ojK2Hk8>bCJYY#7eJ1BW*)O5={b4E;{=0M5V zx@JSh_CT9`6fcSumc!5Ig) zMJ4}n;;0!Ba;@b;_p{;say-vX6WUD+Yc^TO`B-dnSB{Fg7uYTK}H zv2)Otv=QxbR?j#2uX}BjWE|FYVA?Q)`+XWpbDuokm{s5xzdQMZZ?LFuPkD)S>&7v~ zJ(_e*-te}u_IiQWthTg0ANjVm$?ui?s(s*V%kQzH7QLx488`F!nGl)DOhecDE-uWx zXK(ng4j$Oam>5x8Iq~{u*Y~car_arK+uLcPkY;{^o1I`*UFwmNI%n+o7oI^$&aJsY zx5o6(n4Tj|`?@o)w5Wu|Sh`kxJzRH1^4OFUYo}nBrfdI(cRwsX9%OD8)w!_leuxZYXZJea_z4{A2 z+7Aq0-^OHc+wQ8(x1Rg{d>FRl?mU*(-8t`>>@#=IxE&3|Jgq-EnvLPJHi;*in{d*I zOTE8t@Ho2W^u{}N?_>Bwb3&W7&tFQ}bb(qKd1$G6%5~e51DZB+9l!ZBjJ0^UMd#W< zi+E4V_XE|v9;Ob_`4XAe7-aG&3=78}>?$+}dy|nnV8{2-6IUKyF4L%Ac{=a@`vr^4 zZXRg%A8{vZXZ%gqL3267vcEIWruVNz;qV7+zzar>^zvDrA!VM~Kll6i4I3-Z&)(Wk4`ittCN^wKxwpET)x)VaG7#&WtvrOj-bXh8c0~;}=>4Y#S7? zQ`Y{ZXGCzH;#0;3*>+FWZchw7%1F9>t$stlxlh-+u!D42PJPc_d}FZVL`s%^LBmQt z(PPWD9aj8!kr9U^S*3f6+)e%ExM>^dV4p5g>J}T88_->~ht5{2SJgxhh z9{y$f!q9DtYj5h_T9dPL7NaI@XqR)W!_y~SN?5Autalh%$S5~dneY?S+{9!4v#?PYeRJYiM`RbWT*vqXTKipE{oZfF zj&F6X4sXTpi!Y=vFZmSeC0=#1%%@dy>TJ!|F@gP?79>CEO(Yo1Nq)A$f6c0?QDb6! zPchf{+*>JltXBT?knY(^+Ol(a&_1K={bEU8_oZjFAMLNt)wT*{kFmR~)@Pb*<;50l zi=3=+hTo%l_jW$pwD^(DgX{*Q;8za)*4Gt# z*>^22p7Q4UxyO0uGjkVzZJOVE>P_tzwCFU-qVdi;B1hJuwTt{k&r{F-<{>&Yy{O06 z{OmjNKHjfS@BXTFXOVODW%^-{S#Lef+8Y9o*O;X27}aG`#LLoq(U&JYopSTipy4CR zuGZPxZFyk4EO_mz&jz#Wu>j79n~o!IefWH5L|pN~o>o_;WzRk~!*#^YiJSq;?p)k@ zKI^wtM)w%8^h-;6S>0cF87rOJbLAO+X0G|g<54G0MGc=b^dRn=5Vih%)%=}T%ZlBF zZ`$cziyvo<>8%rc@2W?op!S^OK+iD+`(^z%98!Ni>tf5j&$(MBSq3l zi_;4~TC>~^P|bGls%+g>5LhVnnA5n`lkxlN`_n@TKJDml`E=ySy{Q}ak8cd}d3)#W z(9(x(liR|h)kja&`5edC|00o+dTjaO2JNlues^)2_ARq<(*yhLnlmj!CJ}?xMjpDi zaq*tTl>08XZd+O{UUbW1d!McOt%pUY?oShZNSikA(7^hJ-QDZI9Xd94>*sZ^GcMne zRroqt4h+55ZfV^mVGYGU-X}ovnrE=FZRM;x&+kUk7zL5<(<}mWsTW#8XpHQpk{!O8 zqndjR*fdCUOZqkDcZ4>tP}y6r>vKOXhBtYJ$&eDgWJ%zg^>+^B*TMsbrY+iP@+ zXHTj)=)d-G#iVXBk5!Ls9D@9Q8?ojSb6s)ff>$>ex_k+_X+FvB;>5Tm&-3<;aUNI_ z?)4Z?Z0HhVsC8rq>qKPTH>;HJP;*UfoiSTZ))n$YZat3hJ5t8%Ro8ZLIw!-g=Tb^l z#kuFF<}R7DqmD11I+CX|%;nyVQ|I^Hd?h-U9P;^yacIWa=uww~Uu><)o;lRJXI$}- znBWN?GxQC_zFnrjoYz&vP1E)DwHz?k=S4c&sVerw>m-#<5P%ey%>WMA>^eFes| zG>h`|=FpxCJ*x~|cTuk!L^ant)o$*3F=w+Nq%q#kZbP5#PuAY4A8em<$E%5B=ub?XexUZ=KqWiYY-!Er(7wNv3TY-ZiQswkmCH(8kA= z?q`#FW_k)trd*wOq4a8lj>!T$jmoF|Eel&qzIh!e$LRZlo?Wsy_p$22Fj~0ryuNDc zCl=kho;P^g{2PTXMqYQGbGimxIOx!Fc&x=XrbEH}n`7Fp!7`>(suj6&7ZBc(jz7`H<(-UD_H@m?u4{XKNk~WU7gXOmq8EM~_+!IFFDOo^=EvoL zUV{7wF0EhQbEvw|;ws*Qm@1))-vR$a_H^j~FCvCY`NqmWRd9fl1DsN%6~Y_L4wt?l5&!z3!c?X(U@-%_%MK@|r6BtOfv zwy_BgoekeZ0u4VAVb*Sw;nM{-py2z^pLy`t_@L@1o;xXr|I8!hJb&UL3d8(<=8Xtxv-ykq+AT}42{7)<@cp1CQ)oEXx4%S3qpcm7+4{E+((W>Bo1b|b4h(=Kk=rK z`YJo~ycKfQKk*cD^}sWPKNYo(SA2)6`VQAxsG!mJxSr6j(zoDw466s8=?@+o@!Elh zzOPlv*&`kX??5Zi7!>n@rjh&>mIJ_Ap&sJ|Ks)$rD(j%wSYUU2@2mPQ7E2_XfFI!d zSC)+}5(xm_W`If_X%piA*#)LyMhq>CA=btiA(|M&UISy;8P$L6rTDJ#JjC8$BaDnK zq-Pc2S)FJK{Q0?totmc+d`75auZ8JM)`&rE;nz^?4#>4axuLYA3~(Ko%LXOg1#mMc zs(M8CfugFXs8cX?SNN)|hX&Teo{sepb*(pv=@NkSBfEV-)~O4IeYR6T(w50;o)o$U zMu)M`VJvhz-582C1=9|KHd^YKmPkE@8lYyQwA+TFua4>4Yh(I$8lKvyzrOInqf&n? zOy)NzHZYS2Qq(Az8caIHL<=(sf#R)+nLr;Vc7`z&1`T{yfH5ffm;YTsX&BvDHwO6;*^e#+NxSU8E>ATZbDA1P3(>|ny)o3^X|(ow)WPwH3HccweH`FlD8m>9Main`wqorunc zVu*N{qP{cTxlM=v74c)!;e!A*Ptrz+B_{l#Iv+}yQeS79qNjoB!My3&X~k#;Xn1yK zX<=RMyJKBj^;&f56}q#R_e^k)3K#qTqzSa6e4@HSX8^{*uy``FDY$vSO9Wm{Ctl)A z4BH31{7$?a;1vSzA70AeY5uqW?sHFlvN*ymFh(p93EiUM10MKq2#*j?5FI^N5;H~; z6^y2lA0-wgVAyk1GbLOkmIOxlM8$3)ptnArTR1ANTin^N)=Y28V}+2m33W2i~#(eyljeUz`vl z^bd~;jrI=_#)?JJ3H}kF9K&3&NMU5G5U?q3lYJ(5yL<3$?QIk~yJ5j#Q*Rtb#>it1w|;j8%lKm93R>T)))Ae^L>{v~(63g}f3R1JU`fTsLMit3vO} z<*F~7Rk;!6Mc9?Fp-iRLb_GQI|GWO@cmVyim@O1^9!7uplMH3i6b0M@=eZZa-v*Eg zXE96QBRC1pWXXV||BbT-$|=Ct11y754LE{XCKz@Z@NEF~8E_s3+z{Y-DD{9N_yh{- zORp=24S<4hOMrn&JOtooC63@>C4LNGwGv10wGwXu=n4P+zy!`b2qr>7IuTs2#1X86 zg6ymZIM-BxM*vjsj$zHfN6-=ql0k5i5)T1*qzCYP;j9I)8Hxko2*TeDkoXXQH{g89 z1Agc88#?PmFo!b^oVO5^Kyd&Z!DUczz%v25^o3_Sz&!zehY|pI=d&3) zvstq+ObmPkouH&4d4OM`YzExW0`i9?j0NEU_Y8q}0Q?xhPc~pb;OP8jWT%)T6M)fB z>VXe`dx5Qm;s;}azrP@#O#%Q%&%fwdf(i42AbO5KID+UI0pSRu^ZX&e5yYYBLEHbk z{wp4!kk7ZKuw;IgMrU5|=ilMOe+0pBs0)A=YkqD-Qlm!4M@BH`3q`STEOH)TYi%=t zDU1q^4hfH%?L5H8dyJLC04AK7qe22BV2gAfkRXg5Fxo|5XH;NptS~YtB7q4CqGFu~ zNJLR1V}rwlk%6&Rk>SCj=-B8`u~l$%aTM<~v~mq|O>>#Kmbvk{iMikrPX#}0N8*mO9hN(7b~^0j?EGKtssE9K|9{x5 BFEIcB literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_cast.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_cast.pyd new file mode 100644 index 0000000000000000000000000000000000000000..7d4b4aac4b2ab05d4f4d06bef7fb2f667cf2194b GIT binary patch literal 24576 zcmeIacT^Nj(8bCyLNe;RQN-&{-WF@&QAO;lZMe*tEr?ODL*7WEJ22kVP zqbC%y{n68LB>m|Vc@jQ(qCmpOWc!|nbx)+BzUN`H1de|yuD6i&d7=IxNXI`%rPaVJj)EO%M2qt0JP0;iE%E2%*sO0!znAUINzak~jZb=#{X=4E# zT>nQG6JFpd9?(|l!VQ^>#x5BLVC2QXFkcgHfFBQJf13y-78DaG22ciFENC$ivtc-L zP!tq21}`XsE*6HBm_&&g$uTBDBhtA5%Akvd?g{Wc&X^(L}fRU|KimEfs28H;g|eifqe zIeIkcW_e{e2oYv0V_13#Pwonuo*F}3pzQ7oPJpfbW8qN@@g9kk(3S2w)%xXj}pG!0QEvH;s>GuSU=Afa2Th7WSlRMeFW7Sk@?iqU_d2x8{EZX+X$F=Y&#^_ zfVpUFn}9}m=$YYsH40QWgX4vBevuY~Z8+ask1-#tX2INv)G4rZapEdkQMkZE&kZMP z9hoH}^$*5CfXT7IQv>uYp^c-r00*gV9HI(MqZx;&L_}XyF<@4cVnym@s2ky}7Xpr8 z`5^zW+`ay2+fFuLMB11l(1TR^;>10Xda^{H)gOH!NT2T?eYcRlsK4u*2i-)KsUoSh z=ShuA7Z1!ydIQfYwUfd#KsSxlZxo(FA@UhHLqaJkhILVejj++s3s8(2lsgl32zJnc z$tAsno?S{-HK7w497G>aPX*_%)nl-|#G(I@LwsOX3%MXGt%ROd1s#Q7;T+9u&VZO? zU8bbkBrgLZJ-q_%mDH6`K_&mW|v~1aO z7bkAw`~?gSpQXungZaT}Uh~NbVmT8%#5b6t^oAI^m3oygh9$}|OFG_z3W1#{wVx8_ z2umyp&M*k{7==n$o)l>7QWL3vHv&z~e4s!eq+I|ic#U`nE)(Yb6p8*c?Ht^+6}pR? zQbm-QDInAWx-lD2w-{){(|aMRgtyRIB&c#?#k?T{JRmd2F$wD26#p4?BAAm$ub|KR-znDRB zXu*jOut;L4LTz$Ll|&<~%RxZ^^Y7EK8BCYVJHrzv-ip@&_2xn_=&D6-tMP7Rnss>$ z4^Iu$gh+PL6sh+b0*!Fg#QBaYBK0- zF0{9zwqvk3VB^#Y#0eRFl!4#`PV1uL)>S+;W=RiDKd;OKfidqb#QRPjrK?YJjtEzX z`^pImV29`$A#{g2EZrSqvW!JZmz;1jRPrdKLZlBiARCA+*nqOd@&;rj0$*B54C*vg z$Q4)QgrR_Ry&cqFI3Fm&d2o{u2i7&t4@3JMPft@4Bwx{jf;j0A-h`zG@u!jcof4x; zhA&Rk;6x?t{|0Eh9(oH14mwN7EhG$WAv`#w$q+()Nk1i$LU=0RY~B(?gT40#(1f?e z(y7ABBr7;pGR`xSddFcv#Q6ukfNBbz)2pwdrAv1x&!B2UT^NftmUX};@qn5zN?ef# ze+2^C)tL!mAvF=7QJ{g5Wb+E;lEkB!;MA{_xaGp9FbOb|2I-Gu^uWq{&?+1bePQUx z>IbMUN7i{J;ALhIJ(9{s4oIyMeWE8){{jYx#iIaF6+x(;18|EP4|fXbaWLTu2*HHk z;6~;Xutk>^x-7!5ASXNe~+IIYL}4ns_STLMlY z9*I+^70?$n$`E&I46gu=2`gaXf!P9=9(G|J%#&~tkWp2nZlVtu@s|+Ta6xu6G(u`h zCN)~91>{U0NHPR%WGl(^C((elE)b#yl-eMf2w|W+_oW^ZmR^M#AsKPQ`RPavGK3_u z3wI%{WUiW?f$S`Y`5EW|=ZP{LfIe4L&>bbv4@@o4sp=K zk4Pp=Mqg(u%cE?WWt{rg;I;DXV?&ZxVMFMzJ0 zaN;8ez!s>9e34pC!>wDn9paUW)~9$L$Q2KLt;KyO{iE*_WYK@ow+sRp>Hf2As4z>I z)%08kqSTT3*nzmiPVR}|1%|PcKgE;*o02c!^XG!6p%yLHdnamZHC?m(cE zQ)WOnz38QUw1T5zDfE?xE|t{QdE!jYfv@}oPw8 z=y>4JijD^Hlsdfh1p`;C!(mhml>Rex%cqsc%qOVZfIGE{R!L0;1O+*tng|%Xx17PV zOs!(TGYwQ>ilHG0utMOfa~Rv~VBH*}NlMado8A_qoT@w%T3P(jHUQAf8lZ%X2iaWe zO%*aHCyzde6K07O)eM;6=s?@x?@OewtBX7xK;DDMdkA?yN#4`Q`+D-8LEbaT`(g6F ziM(%tdmg16bcmlh#QC%2C~3xtk2vuaPp_t@<-vgP^vCqHV}KN`f}3C#U!<-A{c%2u z))@6dDa0c_O;AEA4pf118q6?W$lcqohmp-d4x#r#9=4K#Nm^$Z=@ekyUa+x-{1Q6F&vL1xILLBHKc<^eD^Sfgz5 zK`m0Z0&y@6WSa1YQG%g01L@K4M+;{H&e~=U_$a-aw+i-VUNm@!7lBS=I7?S*a;)!h z?a%>j#xeSY3ql!mZ5HpK}^O2W|)F_O71 zM8Whgvrg`jaMp$Coq&UG3mIV4#5uspgohc&nP}*b;2D&s?Lbt~wJ|omKuJ|pVwb^D zo(dEf>niTs)P9v{25NOc3cjk)Q&+-jxaFWc6%&=J+0Lo`3We|v0t0{2 zXO{Bq-RK35mh=Lbsin_pu;c4X z0CC{EbOVR|K;3ZwaUv1E3q-CLS91b(>sm~PQHnzZArQC$2;Y0rRH%a9gCLVR0=0A) z8nBgdU>uqpffiXO%1VVaVB+xi>6Jh`M=()B$4iC4B}3K*Y~?`q!a(DLF600RvAt|& zmL;;yj+teOh5>bJG!B5%kZ}X%AWg^|*uDqUI}rKWsEfo_i9+JECF+O6u}zdAOYnh) zd`l!4rBMcGECM~!K**t$AOCT`!hpe~t#ErJ0|pamqyyQ^wCTuL<}GFgn_0?c-f|%N zNiS5fiAu2@4#Yl^7Z*$?aREZosu~84Q0gPD=R^t3Dy(*~iA*H11BB4Z@ z6lo11ZeTOZnYTFb;JxxAaM(oOgNkvl+@li!DXdu!Aq`jaK z1pyPx0z1JF1JY4xrs8TTn{Q8JOF{PSvIbOdO}Ofx2X^6iz- zP{lGQid8`tF42P%N_q%Ia{^-pN0&&ZgA7avfh^Lzfyrj0b`;qh!6fnQz^ex`M#NPj z%|z@mP8*M=CG1%a)Ak# zEEF(CtXmw=15?UOQzYkjVASQzN>Hr=Tf1Zhz#|-KJ5f3yn}b%Rc)cMaS}PoW8aeB~ zS7oVKm$)lVq>{?0Gkn0H?!$-}`GG-YLQHf%gZ>IuF2PR4~ zF0oGW;Bd*71OYu4`k{D+*&>Y=55h_^s2OCy2qqhm($l6Or5wRzEvVu|KB>o!*l3P= zb0E{9@txF=YtV85*O*CMG6}V#GUN1-BC%9IR=iIhR)lOY7gOht6Q zlNLy_kVrB^EYM_*3_%jhm=L2?l2O6-h7askc0@kOjP^0u^?$qJ_ngCaI^YIEGPhz+ zp$(2kS`Rv4Vl5F?;1tR#sMun!>b4GaUZs(>-%qK1K!J_Dn~OBQl%2wi3cQ#`q_nW7k%Z1Cic zN`~@af_U+YPo&@5s5r=AO2|C~)J%~0*$9~n>Wq*NNVRC&LCdy^Tn>EkuJsS^!e$OT zlQ@zLB;K1q4jk=N1J*kcr%56co*u;8ElQthd^TfXb$rk!%PU6=W?lO@s8cL>8M_!Ds7%A*vFlp{Oed_B-f6(qLNv76V8hTtFI|*f;~3 z$R@I;BTndPje0TxkR4$jIFh9W63EpJds-QrZ#NNm$upA!PoJ#cjcGH)nkx|-Q3n%g zHn8i%E^wGk9R{}1fm~o{>gN)9jbBr9_yHj0mArJ<7zuVm7mh&7U`& zu8N*lLfAg+rR*haZ=a=0$XBHQn%_z?TY7;V1>Wt@3#K^|LP*Ew$-uL{1K|g6e>+Os zX^Q>q(idQcofKICM0xs;lbDp7ffDU4`&WM{IWQL zl`IaNtvHAHT6pT@$&-%h?_%hVgq$P1TcQ^XVfVf48tzEkbu_xe7FDzRo;wi^0`&aE zF=W+u3S{Ov7{O{Ub%f_?I=pn_Q5}uSx@eBfvMy;y>9VwHo-Dnfn%!73kT?J{d1w<^ zn{wdI53zB54jWtr+ZG#wirBY4$3G+Vhdzwc1)R(Y@@PrI|9KHX>=}@J1|R~jATaI3uL7J#YprH zY4Alam`kA-P)gfqie-RH(F>fV$qKra{+$=~Loo`WDaD3%2gMQ;jek&S3F^Ryw^icH zfsi5tKr;iJi!#Uzwiuyhu%#R_*wVm=p7~t}HoT6x4Q~;I@P=UUUzE__y&Va7E7!tK zec8_;s@c7m42P)a4Pz55nv1`iOkXocx*qqh;*70#_Aj4BsHxzDy$bxP2mr0ufg(-k zDbfo7rndEPq^megP*c6Q!|&Pe$IU&U}vGmNh{8kfL z-REzYv0C0X)#lB`UXKYr>sC#>loPwG^~cc>%Ncg7?>Bul+^RXMcjDP*L;-@b2IJMs~Ace57O{vw4J9;m7p$Yb0=FO^Kd^h=1wq?CO7F1iy7>m+lUlbQ@J$1ul#W*!ZqleX; zQz>h3)@_vBrN#tA*_MQhG{U)xB{mL0YA z)m!!(i}L)Y5MAlJ3cl6h36}T5&ab$!>U-4k>POPb&#YRTTKrYo0zOZf*=*3bXvf`| z7t85aW4@1?yKC~@@2<;hiA&cMw4cxVRdGY*t->*d`BtY*9b|0X&u93VthQ>4zcYUR zW_H&;!GqIUJJvQ%`0Gv#MPBsRt)9TemCdZjkK*Hoylv`qn8~xyI=}yp-qY(IS_xkr zRC~@3(QEj5F=mJ7)YFH~jbGd3;!`zuN6Z$lH|kf9^vFmr_%KoCcIVWQl*cD;2xiSF znw%pVH(B0gP5Gk;vui=)j_lI!!@d6US}LmZf2UM?!GG;@N5!ZQZk_kJ1}D;wUDrMr zg^z9eq`2W&yo31(QA(q!|KV(iYkUyi_s3zjyKO~r`YGO@Nch8Z4Vu3kKdJw zujtEN_j$kFK8-bY&OOzOF#6`FHv*HrS!LaV153W?->})zC0DH?6ear3oc-#+6o*^d z8@JrPclO5D^TnOFzrS}IQ7b$v*E&}=W5_m~dZs5;F#Y*`m8fk8)~TpoSr;JJ^y=k> zr3S7xjN|97w52@zMvM#2F^T73EYxcoyQ6!o$p`5ZAC<@Z7QBj;;syT9x{w~f z?at%al-T4rDZ$or{rmgyIj!M-n}?+R^&)khX!p4j<$p2yr@ySv z@?{^|S2M&Z*4uhlCzibGLDR(fGk-pM7uL5=_xSabXVwj!BbRBvrDUge=HpSc;)I;- z>bEVgh^FW5>@O*^d3#}`VD1phfbVPWhGdMVKT4)J4{Z{?Ub4JD;i=k-+}IH5!&f$E zFWL4@WHr0vtW}X>qv){p_P%dp*E@%QyEc7g~qVf0j9OZ`+}S zp_c`AD%=y?l;by}wkf_|H?->DVguLD%cdXcESYTHKox#BXP%Dxe%k5`FD>m9r{AFC%uwy}Yf6KhuSHX+bvp3?i zoRy9jT)CFA+1qhNBY$d^@3a%u*jx8`D;=C?Z%|-ei+-pTJZWuj!b+(ro?WgY)4JT7 zzBd|5q-~qRyYt=ZH_nvH(2T%5ZC~eJYkG4x5%UmEm>V9`o-@bQtY*lab!xb|uQl~g|w^AyY2esTAJ5r}kem{cM zmLB}Yh$*^Z9JJl*R&8XWV8YPqy{8V+e4lBz7%kyE`R%WMHs{5A+y5lSP~4nd*!IPi zqu)>8YP;{)w&I|jw9jYW7u6Q8A3EYrX8~5`JMPu>z?jwAdI2Y%;3+|8E|#zN9cFO6 zwOR3_R`}#%ew@m^jtdtzpOm7GnOvec;mZn*6w%Nw)lmGUMvP0NXxf@Nqk`>yrndF& zOFfcYA?G zbAw&R&>ad9B}bkI@8=C0N&CU=v#LMuICa6-1+61$z52?F&59yQ^hW8(8D^9@CYmp; zR;}1{t;6Y>rP{YQw`MI67CBv;bB1u+?Bru_xXM~n@56zj(J7zlz30N`OzaHU_ELkG z<*}{YbuadAk={5>aiB0R8qhBk) zgYH*PVb5tdxUJwe8rrtCucejc)34K7JmF6@K(P%$-wDsew^oALsyPUT0 zo(?hMc*VxJu3}m(aUE-MaEX?MP1f@-<&^E?W}8YKs7y%MJzXZ-b?No*-)x7R>5Kim zJ;*Tfp0-V~{Mu5~@fz1z^EaLeu$P&e zUOzfI9NmuVG5JX`^U56!^&_ z#XafQ(iy`3&wEZ&#yf19>t0?UrC1d3mnb{9)tj9&>VrfkY zZ@LmZaya1*Umko4Ydz&W?n`CP+0u>7Dccil-TWAFjn6F$RHcWwaj#w&(r@C{cIsK2 z;BE4lo%5Z7U7uqf%<9eNjL_XzN7IhTl#8gElU-hV+a%r!syce&?&2k#k#AeJMs7P` zs_S%MozqsUP2o3=xZbo^l}gRF@0z1iSD4C>QqnB29kab(^UVScgBcO{xTwsBMCq$I z-r6?VOZp1NpQH%+_~fgduf|#rHLyJMEL7t*Pkq^_4Xu}1S~_3%k36zwsv1_6IYvgB zr7GX>YT@%dKV7ZTKaZe%Bqxxj!!-H=vzKxVwv`l04#4VIykJT}I`{J6+&GW`>eH?jr ziW8w&i7$WiD1OmvuP)AVDN%y`y!_bBrbm*#D!;7Zt?!a)c_&kRVtDIk&53^{@-FB1 zO*qAHDeqppLf*t9mpy!PPQmM5I~$#GYqciQzm)lXZJ8I?Z(i0AN*|`5o2b!T^-)vi z=#G)tsd4JcI^O2~Z@CIffBq_RengAcTz_3za4bsITq$7hjE5>S?iMuOzMoNkR_Qs# ztWT854LkUvb7jP*J=&)AcWQ^HJu6*NEO)y3pu-(6`d*?u!-{g-wBme?d2zn1!K&sL zhYt<4c)c#NAmQf%tYb`FndppO1%3DDij%54v)7Eq8rol{^hWPe*0}$1i*yd2 zQBT`qn7H$5;^T|^&rNt&Z5Vj?h4;iyX2*oKC+oJBcxSdx+~0J<=E_!F>@Gi2*ii7kIj(r@YePr%vexcU)9ht2RwokFU?mkd*dSzvq-lA=qx?x0cG>+2RixvXs%7;pdc!QFVP6n1Zo z(RP>e`2{Hrt#gzNU$0)_er5VE#+dX3Pv)+?p0~re7e$Sw73sDO3Ec{RZT{B#JnZzu zVEhngZ6fnUJMD3HzI_Pgn)kc6r`tFMGQm?Fk=U;fVV5C*?(q|TZ zn?G5e=kxuXKQOOP9Z}M9OZH2`(q*r9chU|`e>rx}-G_JLgY6IEBX+1Ax<9-6*R0`c z2ZJgGu*l`h~jUs`?;^ur>g1Qt4u6L+^b z492ExJLz6LY5NVGY1S`i+iS>Lb?%8M7j{3Y+0uVDTsP^^VOF{PzJyk}<5@@VE?v9$ z=*7{xra7T|JTj9*A`LIxo0eAM?m313EA!0_{cD-a@^6^v*5Z+;^*3xkaU$^Xb@%q~ zk?pH~26y;RKA~7@G-*@EsDnafUXt^Vb+oG8H(m*HD5?iz;u{l7#w}`b-K{O;*BE5K zJHY&TW74X^a7P)RUF-FAt*dTU7kiZ&pA;-}tyubHezAh?iyIMJ8?qNy&M7JXsJDAb zy+qf0b;8AT-uduIXEW+t&#T$4>5>vLWI-r{vz@btjG0 z!;bHezO&?)a>XgRhd)ej>d0U1Y2~jfe&NG(-Rw|3%qd1tM>R^ne+6e}$voM+S-xt0 z`@40~%su5)jBPh}xcu^?RiB1D;9~rgScXAy--#1{1zX&RS-iLj`{{mh!OOfm3cB=E zwF{fIX|>$OVY8;+4KnZDSiC_?He+*j#?F_^d{=L(otZkXs^I(Svgxs*CARaVHqUV} zZw`1FW!bXOBWO$yO}_Mqj@jJHA&bLuGEGxv)X152?_bGF9=G916N@SL$kaZ8UA9yy z%s@V4YxA}*y=Et9)K3Z7*G1niM|K32?K=G`vgB>_`3d>H*IIT>${u?5;P)vHXB(gT zrcdws@#*l`tvg=WC?)DUpToCb!_=nCicA(_AJr`FSziy8d92k9)}H9 zQVp%}!qJP5Uf-#eZdx1jKt`!L;R-KeVr$Tly{hrdqoTy+gQ5&$LMbXW=mnYmw_P3m-*L!;P&*IR;_S`o$jLIGj-1%lw>6g1KjdV-= zc%J$lP2!E{*+~vX0rxq1VW`XxwO#rzy6f12eAb-r1)KDKX%z{i_MTwND0q&%6S%pk z{qtO|{_rE&58uj`D(pJ4^W+t)isirJUHtn#ePkt_3-Q{o^p42RSIt<|^*ry5{06U; zZ4ZzAGMr=lI>;X@sh6T|I(>8Hm$>Z{rc#wYPEMaMRqUr*on!YPDR)?F!{ST4lHEEX zZ(r*)38#N|zxeJro{SB>;!4e~+`>C{ zJWq7cv%4&J-|O+l1xNkumcP-)0$YRRD~4S2eA?d% z3V(hYr`-O|ZhKnU5t)Li!TuTrU)){>&HPw8<@Tn>)QDkQQ$PHgx;o=~Q(yao>UG~M zTYvbQJ>!2VXki<^%?^4sZbX3B<%({P-pdOFbrg?nc;cO+Qo5(;wX+$mKv;Ka@gkFl z8=gljKXor?nr_V}qg!9oY5Ql+xD!IBgwYtn(Yen;iTb$ty`Or^rM{K7B(U7>U%cZ& z+djN#$iuw*t1c=ieV_ZttL5RdS6B7eUW&J~>OJSvBVIpmE54NGcR){JLCNgCtW6o? zU;25jIgljvq+C5Ng!*trSinV>Ee?lz@`Ex1K9rohpAgz>+TrXvb-&s2$psUm)05I! z{ac--&i2H*2(4**wCoIIvv7w(PDHedp?v8rpK9XszV+)O{al9$RO3vZ8;!ci&EIlz z_`@U9i$-azQ`vH6U#^kUxrT+W4lHzRbR5?5@zDm4TaV{_ef2344S9ymG zqjB3cXMg>O6UP=4?ARf)SRb?(U z`}o5}qt4tP(;NS)H!LkVh7tJfUfqJ?zKn4pGP6GQ_tK*h_vPfAa&k8a>+nAGF7|n% z+36Jxh9Of=(5IwN`1Nvl@XpJ6j1?!oWb4;FxG_c)GSR^^rdamChdqI9Uq?G%d9Nu) znN)b&?5g@NU16&H{0!%1Lt;D=mbDxg#V&PnbU#mfHZ#1by`wuzg`vIpm|}NA?aR<4 zdt0m3wSK==Gb-L^?aT0-EBk{R-+k=NWbWuW_xu;#FPb9rtlRA7?kqVapLLI)XnyK` zaBz+3r5x%M;livu_4o3RL>=ofhbIdivbrzcO)dIfQ{I-$ZmB;%LEgIb!_Kd3YTq1r ztZYYF+}-|)Ejp+#+n@A2N#&;Y>i2txpSZQU@vzx~Ux~!CU2|-OaiQC`#jiQ=O>c44 zu;;1R&C~jm-5&b*b@q9ft1a`N?WwGz8nR?zc~Dh!t-nplq!;%#Bs4ea7zKML`)SK1 zF6*;OGaEC-dVbfd!vUtg`)Z~=nCx_Il(BU3ex)yz@%gLMcE`8A%-Q%UtSHMYO=anc zsrtVnBR^|fiw-XTzLCM(=(u@&Zrh_t(<?s1x0@TIoP_B}ej3+i5{ne}*ZKS#>C9~^nebLNwl8%vg5dNax^Bx}X( zRO^Na)?u9kG6%jN7O5pfR^7 zlRjln%r$eP9Wx(B-|jm+?Q8VHUE>Zqcn7>a5mz1Gbq;!%FZQmm| zKiu|C;KLPVr+NO+1mUyfh<)@@q3+ayIdKdK^yKj%^NwMkmU8;vn$@g3IVk9LakZWTWA3&k&Bw)aGr ztqq>g7WKB+@7npi&-C($*u#6ZHs?%T|Kxn*F^$XH%MWx#3_CXC*0o!9Q|FGnW)RZ- zAW(YVskr6Fv)_%MtkIyIViMsk|NY~($9D`nmwwr{KrPXA!PYgQsUCNa2am&jN2GV2 zp1rZj*E02M+gRny1j{hzhBpdIzG)XmM^A2^dF%X>PA|^si^RtPT3cniZ-#|g z9r0ZmReU3w8oMF&!`rcr%4Qlbeu$o4z24GwEITP+^5uSh+^C_YZ!itLA(w;`sd=?0 zR8>v0Un}fxf6J?1zg=s)-2~O-m$B2jjnDsh`gH2~XM3JaELT!6j2cnQlFvz_Pt&_v zJGDN)Cw@-;&v*M}mK;=93^7;67d^X5*uDEJ$XxigvORmV?Whs0TxDib%v?LUtZy~z zy#2$APX*|(PAW7}F5a$hmpVIozI$$p`Rcb%j}KkKJo!^-A340q>HA9EEkiAv_GH~V z6FReSS@Yr@o^_v6f_Huonxj~#o-=dt=*YI8oWh%%$|AVJ`9D^z4sckqe)snKgs=1v zMp&=U6RSg*;*yW&{LflEAQF@|szf=vHC~lz-DNo8v(m1upTqaMKTX(=r*D;&OSrSZ zq?>1bUSvA&E^TH*&G-bZ#X4PwUo?D}Zn-*tgl4R(ot1C;A-;_VE!Y^HOWAyX>ku|ckTTv@Lm`jZpIX`QXt-BR4@m9&Ti^_F2lQwUTvKw_j zj=s!w+>M4pz5SO<)dTd8IL&$eu-`X%>Wm{b*{hDI?@Q0v>G;N{a!t8Xk=)}`hhk;N z?A1+NW0Ge6@PNJ^_j7T7;SZnGkENf(v(JCo@uNrKCui8NJezR07o9`*y6S&a{}>rP zHrL>0{(*SsI{p4(m6J9k``z3XvAXj|ME%;Z4O7(Q*d8C$-ajiIf3CxeeRx*R?yA1& zZ;IY-3#SA>?Ml&5Z+&lfJm0tIXUv){AL!CA8T-qc^{j)fo)_94?e;L+u+qBrV}V_O z#eutr)$hnSj?5FRd-dVoLyJ5da-bR?;_j#8(nG{22F?W9X6|v z|K7EJ(vzA-rn_}|+@&AgQ&(+}t$1@W>GkKrkU0q#t15E$Cn>M(+H95OI$_w5?929A z>)xAQA2vld_qOa)qwTFz`6+rg%ey|EzTB%m|LLkp-I0F5A79J{8oL)a%012U!XICF{m_y^;m*JIY)JGl^MJ*!30t#@ zOsBi042=w!)i5?Ddq-T`=GCQNb#xZ5-ch)r-euZ2!&araMF-Y?PJZXFdM0q;n$8F= zHgcO8OEoUM1@n8VG)vk^`%GTzV&8!Bcl(dO>C(+?Xup>;r9J1$(Ev|*i*%K#o4YD+ zQ`?Q{^XKx7Qsi61MuuMh_-g1$x|ZXH)w#WGBk-1KuCX6DMq2Qvp7S-qX{pMS>EA3D ze%fzkC%0zz6dU=pA$~JX%v#gzo2o>d(4J`Nta?O0w&QZWn(S;n{UI|`W0g__`xn$N zWGIih8gw{Cp{3t!>j#;VIT1wxbI0vg9@{=9>C6Q;O33@p)Z7b#{a=Vq^!tE;Zxf*( z8nL9{<_~2y6u2zWj~D(gT(p1l`viCaMc**e zn+!GhMV$C^NG4`x{z1WjL!Aa>6Z?7aukrR!gKrc4od*By4fFUnnls7A`a6x}!~aHe zBl*z%j}AAIZ^7Sa9weVH++nB^TRcdfMSO&wuSb-89 zT%Nz4Gn<{{==@Hd*6&lhNINY5(q_n!v7e`OLVuA}c@xglG{zO?`p zV}xRu<{ugp(JX;BXbvWkH26(Y8ImpTi>8tM1{Di{iC8=azr(?3@E>|6%c7%EGnOR& z&ep*9sL=Pfe1S}UcS_F;W*+K5+x3S=+Jxx;-9>>$3XOu%>;TG8Fc}2pDVY2+fC@60 z!X#x(fh!wDp~+x0u1u7)uap@D{-s&gLdr~4orbCN3E|KA;O6s6t3iD*6P+u9~jRd|;P<~4K1^|8(iiTuF=KwDn z)Q;*`2er$Ol*L9)qGKbu8f|J(!+eokG|mse^H3dZ0XYbgHq4W9qtInBI*fr1W1w3o zMp2YyFy%?|n6iben>_M~F6frX*ggWoMvnwvgA4>IQWQ+e4WJqgQwxCNA&04f95oA- zD2h52{8EIeQwlLz#6yl%7E`bQf08;u-y|wV9|}5=li(uf zER@=Gs8Sde>`sF3q8x{!1Pg(rdB~FW0-$Lj8puWMgX@F)Q2vcv76ZdVpacw(i|VmZ zcoMn)tPk!p5Kki*RBY%Zb!;eQDCvhuFt14%jCG_uHnL2kN3CsWol41&C}m$IH$^j` zD^W3}I)xHil$-SwaTyUF`9^LSzQeo?#_ZlHZ6eAofL9{2>&IFXSnVxpX4 zBK*;`IY#if@fh|JwRnYcc`<%ro)K}O5drX1V5(Co?xDkQ?t7$PJ3*hWkbM!zf~47+g%}U!^+-VETio9wFR7zW^X&pAgkO zKAIO8?hzUu$c}~q1R@O6r#OU0`z?bS$;=5|#`WXI+xvxuMPrfDC~QOh!a`REx^qIK zc{qr}(!pAXz-2MP!GYWXD`z{-o8!n~T9^ccK|InhA0K<4=%_&d(4bI%AF=bmTlzqc z<^}lh;-dn6LL-7AeF6fbdECf&pD+-PK4BRi7#E{^7t%)~N>WBy=M zB%J!1SP(Z5PUR^W$Oz?uE5fmcrOZPv!!TXUCva7ue+)0s z=Wh)fpbzn3m|s+=ad@;bgp#p8muDOj=oe)iW^Qb5EE(57+TjbNJeZchvHZKG{U1fI zS;A-tf`2L-_=4BKUXJ#0DJZ54i97nm5YhkV`hUj*Dk>Oe4h7}olK_T5$pRcfD^)nZ z0iO=g6$)CJZUDQW1OWaP;KHF8wia-2fHhDu0IvhcfZP=EBREA3!}bAg3Gf<}Hoy^l z2RY)TAUJmc8f$<|z|8D!5j%4!F&mh;7=&X4;bX51QdjC1gHl&X%h5B5Ql>32(FaiX#nqy zg7E=Q9Y7^5_+SX&2*UpukT?h6yU~z?03GE;PLLOU1{~!_5m00x?;4yx4$du6j@b{z z40sUKV8EA$07q~l6c4}$=ZJ&z$5?2;1SJD_5Nw394{!uKpqxg1bzmN#Tmw7`AbTu4 z7Xj`DkO4X26Tk=OgD6itVG1^*oDN_Y6b9fZ2b~KA`P>8GIVfg8hySm@%Ag2gyzoC3 z3EWf|3LH|6KoHd4K{ZI{A#9^mmCU zM~C@?u>2<-zDkSXKt#{RTmRlj)~4I83J+t%26Cezy|Wo_Zeli`5g6eg82~?=u^I2_ z;bd$%o&mXjM1Wry9Q8Ki;{&6|+s;r{nC=%H9T>hWES>=ZBBE`^$8aO2Mf-;YhWkYu zhll!eBcmgOc*g#b;nV!0!%bq%$1@;}3k`zI$7`@QB+7I^7F7NV8~>w8Eb%)P>>%x+ p%0bP8jDt}JR~-b8_=@S8xn{YRxy`w4xx(C@-2Z4#{XaSQ{{{Otiz)yB literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_cbc.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_cbc.pyd new file mode 100644 index 0000000000000000000000000000000000000000..b0a111340912ed43a4b839c371c8e1a7b634aaab GIT binary patch literal 12288 zcmeHNe{@sFl^)rWEo=)p#3BkLK~Ci2Y=UbnHZ?IWQm|p16^#Ohut|t4ioq}yjqSwp&nH0f%; zJMYPsN!rtM_VkZ!@{Z@-J9lR8-1%na&V3p$zxOd#$QUysWwVS8fYRd@??3&zjN<8Y z_DyF$oOYygz~DVn+1eS3JEF13`dFab5e$UG5!ta$a>Np0M=0!Qx@)DQJJK%ISS%$@ zUG>rfkAAt}pVTwCRC?xfXNr)2>9x_*40Ps?M^76>`RJLsLjK#+(*!+tx=9 z9ANCVGKip)O-HgLN8!Q(- z$=Joj3_dtm-to?0HhVEqa52LN50#I81D*o)dGcI!-*I_i@~lCwrq|5m?+tRv04+0~ zJW!oczkt~vGpTohSofrbtVKCWbkvGU%lp#SuMdi>9gTSZ{Z2FQm!0Lj|6!-Yvl<;Z zxN^~}TyPyy&qGOX+S)sehHApZ*!C0Knw)O;3x;Il^?wVYweMxThfEe^$OJWwO1)|M zF7+qVko!GWmRboBPuW&Gh**|^sO4D6ed)x_>RPS31LdF4%CDjFv!$eTV1c%j+;!RQ zuc*h1vDA9AiN&hj3{A=jo~&oGlPkmO92B8byH`25kJes`@?)+PdKJqL<}l{T9?P1J zVL1wn#tK|%uH5ge<%;aA=gPxQ4lEp=2Qqu1gsHFM{TSg)xZDFUYTeu4RZNegeP8+! zi`LkUCbj-dcDUcP2?gqX7^XnRk!&m?qZ6M}BN&-lLgP08iDcDz7t~`IrRczQ+`auL zY8ALkCuW~YzG+Y<x28wo5Z&85!}|t6Vw2``0+U{cd}|5A(y#tM3sL{83vIuf$JisquJq`6bdkCvo4jn;*vu7E8Ai_mbIDMXvzBIOtRidsCY-Bsq8}10 zgx()O3e=Qe05bJkf5>Jhi@uRxWWnmwijrAF;x;I)Lo?TLYIXsT^S(=1RqDb2$!5{< z*U$$#mZ#_HWNpJ(GBGW+#(){99jnOI&Dwr_ea?ol zBG)M@RWdY{n=l)0&jbDkGP!vZ>zO9c(V|>de#w;!*dz&)`i}s?s|+bW<+MO176Inp zKl&Azj`r@A+6Fl3I+V424&I(CuVihX1(UVOB0Icqnvv1Ia@@zWRh=a3_hJtv&(-g} z2AHWf%r^#KyqA{Zk(;sR3o$|Jcah^t6Bb>+=_~|1MSL=i-DlcDdByZoBK@W&_*kmx z2G1cHGTi_P^*Us-gQKY>o?i1K16WKbWS?HH%sf84c9)q+GE)bqnPEUZW!?G;qc8 z`J509M==Z)K}Z(kswW)XD_0YMZmyin+E&g+bLHRY9B^fuu?JGE&Ng6}wRxxngI&mz z?aq3b;B|I#1*KkPm|&^J^pP-1nQGS|+E$LNZ3dcUhmzTXhrdsg=YQb7+}t@K$&Ox@+q1Tr!l-6D?{6;8+8A<5n$IWx+$_PTmOHfk zYm{$xpjkN|9C2_Lcvo{dcMbBr7&0DczX=)FWLMnUGzjLllTYRSU3AvSPP@KA-z8^;Jvyo$fD?pg znVEdgkhn^?4z3*G%23vJ_F5Wmt8yz&qHhxw;gGs~QrO zw8hZL>PwU_7J0b_D7!kW5mSHwNDgP;wmxF|cT~VyJP1NrqR9RPao{d z$2krtS}7IDEl&1%;!3bCi$Dv+GPP$kytkiJhXAeXIB+TLJ(VctU$h-0UnZc`>hv_e z2npMMQa1J;Pn4;b&;aM@e}Uu%OlvG<`ZcO$l2tEL?TlD+t9a^pr~5q&aqBq>P?VFJ zr+yG^(W6m$<;cx@!87$`Y&@8qT0ZU>I7)~2y%1A2Yx~ShaLO|(s`1pW`wr0sqzVCs9)ZIrvxu0z&cP0C=i>r|8S zif8M1SeCjxTQ4k=!igs3xH9fKxxdv} zmhkbU%V?PE1MnPj2hR)<4|un$_L+Y z@Y16kqjFI2;J7h!dnR#@V)_AUCx^{Lrk!94NrCB`;5?Ty4!J%#>=-g_Ll#qvf=9s( zpw`i*n@%(}oJiP(Cg;eak%T2_S&ve8|MhD@XlAP^7?u%9Cl8eS$=`{i4~#3l(BwlU z933Q{c(PS@;EO(6)hphQiuX40J}%yGiT6K?_jBU?UGaWiy!VLr0N%R|8T65vjMaGG zAw-NGI=$fbxPs_rIE#pxLbt_e07FH|ge4l|UWIs?)+9m})5aSyDi;E7Y?T&e6oI4o z3VIj1t-bF79r!Y9??o{51=TxB5_E(KZR5%sJ5Rn>zM*8G4C2x6Q(5Y6v$wz9xrVkF zLU`Hf^C}sY7c$oDy2KmCV(pK$D8*iEPsB*CaOLO1W;3?ogS_vQTehlpAn^45iO~^%<3yB z9)JND%5R&j&Ep>BpjUy3Ud*C6RWTCy1MYfNedq#3A_qD>e*Xh&GuoHa8&|;hsvGdN zP5^jYB@|BpcwspKh;Ni%2>>nFGy*^)7y+PwlmL9%DV*CC7`{YiB2;M8fb7ZvsbmC} zY8H9~kZ;G@y|-kobDF4wQ?et68}imYW%W8-08YoNMKL;CQnxN9&=JN0{Fe#&asa<* z63d^d&yvNryiHg(ydD3}B#`hN>Q)r$NM4#il5A99u?}Z$Tsoq2#&d{z?#HK!2I;S+ zKq~y}2jZiXEsBbz$Q4?O@>-xMuc48yAk)}jMuJj$ztk}8oOHcC6%U

=Mpn|m1FYN9>r~59`qUCIpHll#)HG& zn~t>vGj}Q#*tJW_5!!;7FEtqot#YY#4`^~^+*>&8b)lO>_l+JqkAu2urIkXPE1t>U zQ?esh6p3tJdDT~;z`cs1G_IW8zT8c2V(n<>JNOjL6Fbz^kUS;|;dOHLwaPF2cdl|R zbigGjIHqvGlOOh`EM6GuRhpy9DmaD>76=#Loe^p^Iy>E`788P&t^Svs4Pd@Dz$vi z6*SlH|CRdOn*MQ}=Jdg(OnFG2yQ3h-<|gmyLYj?)r=E=d3q0m4?8d-$%xbphRlIJ?7C=Bs4vHqe~RSzF5< zh3u;nvQ6S1gzi>qLUfnm#Lv&TYniWIW7O2v20PZH3i9;hSk00Z_~~GYhw9W%mEB7+ z_Y_%+sK=+uL_OaWnNQSPGgT()wM~(=ih7+>Wujgbvd0XtPru)xwMA=8%R9Gh+0woa z!(eWVPK+Za2c0b*&AutJdo}y=WvyDhHB)3-y*9{tsGm*RJy7l*tVYuEbRXtJzdE*c zgpmpvg%rAsb^&7xwfikyh9AC>za2+1>DK0jrV#@We=K)DX=Qd@kcv!9~gMWkmcsU>5>;7evO zwnxuj4gLnCTsh_ER7Pb}C>UP%}EJkKoFpXL2ihR?kzZ-Gk zpzDugGXFpm3K>yDfq@lZ(hU`6R?&{sTFffYM@3zE)L=6KyM4@N7+^(IN8nM!O6rQD znmw>(fr(jXltu{)vY#HJIa{v1*2GHo7mW)Gq2quAG!)3cA#up748g#+q#D11@9M(-V}Ab^G$Lmi=@Ujq+lRY4w?+x_z9sN@fYJ0jRxaXA**?C*kd z#y-uurS7-{ZuOECt3K1(vb=fzf?7@IH7p35B0)MSJ7SVVQ4Dm1WI)l)J~hevCAOIP z<5H);LrjvNOtLe7FcR(vtp~FHa3B_oZ1e|W>o-uyDYR&V%oR!D4WU>h+)Wd}7EGxT z2}_%>qh*4O9K2uJBn1<)(gN^1K!=gmfG7H7 z1#S$$KMiWJ;a?uWmxI29)CQjD>qyjB*-XafA`xE=8qoQ6&?j`B==XGfH|PnSC;D5R z9|!f}-ye3|rqLeoq?70lohNz*iR>H!ebBD)U7*Hm8S8?aXf+bmA$qsYw}bxcEZ_k7 zZcw`89{^7jzZ(etR?s7eZx29DanwUdZ-LK`5h=zTL85q)XbN$c9X!#mBGrN?x*N$2 zK0ofuj|nL@JdN})>JWVk=?U;e3mvc%JjHX*BMpE*0QxH=!sIQ`#g+I@f*ikfuysf$ z$WBoD&K&_yl)fFlh5AJ4yMlP4^i4oKQHtMhMSY@nq%RTw@$>%>0fYFy##ZQ=YCOgK zS_pon9RCr-IE3_V{P@&Dkz2HQQ+JnRgA|J+9BiCBzovGs10M&G_E30zIsj+#cvcXwW!!vlO4Z_|uk>MS*x+>R#8i*#QONc;nneEPQ)B*eP`f;`6#g!B`|7 z>5%6IBi*+L;@veH=FfE?>#waP8TvCV&g|zRIB~Zta)eoPWw*B&gz|; dcJ}PF@2cHZzw5-VkzMMp@m(KjeE!M@|G(r!JpBLw literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_cfb.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_cfb.pyd new file mode 100644 index 0000000000000000000000000000000000000000..719fe5a2a31b940b5827919835ee0d5a7daee15d GIT binary patch literal 13312 zcmeHN4|G)3nZJ|CBq3p#2pNbdGT=mFV`9iq42G;TCc!&6kq7}iDo%#X3rtOB#+f&e zuqdIEv<$DarMuc{YrBQ++1=A)Ewvtv*xe=uNTNg#Z3V40Ywh|%CGP60?u8Te} z{Py=gdt~Qes+@mm=HN%5ov(EceFAuDpmXR`wZ3!kq{{CadQXMjL+4cZRkgn5w}~MI zbjxpd>hQinjgG7I>Ok{m8sowhHPkWY|5_H?`pAN&RNXjRXv@o(aXBjl=L(J6I14ae z#XGTV4bNaKOJz+$)(HkF6ITXnLQ}0%3S3NUoIRDA?)fszhdMVg_5r~L&=_ap3NuE! z+rOBx%O{u@Bgar*sZLa{D`%`QoxY$;nJBc2;N{H5jiyX%*Vh3|y=<6OS;X%XeT@Cs z3>IXv87LP_#0mE))0#1!je0Pk4932Ml6E;6>n#gw4b?R^ppwSCCan=rC&`OMBhA27 zoiqU@#=X?!L)QA!hOJG20>Ghd(o(?S6n3@od)V7 zc$B;VES|Ks*a(={1IYQ)8SCziT`h0Wn%hvnRI6V^^{4VkXwR257jjl8yPuPf!<(I5 zO6&%?Fb9&P6FgqYL_3!T<@u<>poJdk@B#8UxAZ)hj2_AS%zVaN$_t9=1?W}P(O8C~ zmrHlr%ef@lE4j4Y&Y=sN>#qK%Xt&5maNk3ECcUhOGRmF1Ka@=0LjUgG9cHbw6J5%^ z{mNj%)Q$@I4lGmEj4fWBMU#$QCx@^yIhWRNfF|NaXO=)bmXU`69LJrzkIF^B&YigI z)%foW@;zuR^^fh-CUFx@V$TsS9ps7IY@UR(Fwp?}@SJt)RRcNQ(zx^yOw@fmW?5!^ zuZ*!B*^b`PGZ-RyqZ0qXu(gg$rpvK<4mIZv)Y{9*I%5Tj^`{7`(uo8(aD=YMz=hVN zm!UHGho36SWYu@ltIY5|ttzek0^J#XeIUKUrL)*1F_Zi~D8VBQNH217kO?P%{k@~##MIH>xklRn zCmlnI_3sf}x%8Z3{R$ApDyrn*foYkH_Lc1pt`u!1Uc!St6hB@0)Ewwcw!+?6eD+i1 zio;jK=Z!Fs{9{mDs)f@fOsBx$Ny6j3*nOsLL`$X@2_#IlrCLiTa1Pdh=?ZXY)B#f= zFf!$UX`znwU^g&+cQ0+Ox#01EOaqNP$(PLEEdf8_$+V@I1}>TJO)=5&Lo7o^5~5&T zm83`KnsuZ=Cznnt)-{)*yYzF4{|==Ndm!0pZ-NdLtBY!|*kwHKw^w2c9{Xl4q1Gb} zl3L1PK4ON6D0U3dwz4VKndqhr#FdQgKO^IL@46#pJKI_=oz_fXdJ|_Bf4XAd>!lCT zRIB=9B99A&7^S{YvCdW}RaeLpb@_^wfs%UpV!}_)RqA(+$GPQa=KH~povF}b{(i&QdD;&o_in9kw)_O* z(b%Q(8qf%v-`3cf6}@BG@;5>3H>d+r9|Zqu_!)x#1Dd>)tQ~0A)f=NuGv$azWXk~{ zW2e*l%j?0BSL3ELsXxV3o@k}BMzj~|8}vg8X4s>n@<9YaM3&j{j}5U))X>4DVJ;0Q z){icw^)}jXlRWkYedo4P_!jL>vWIt+`l@T)p$ZZ6q3rt+n`uF#zXVTyR+Da$pH!Pd zvHCiHqSK@Q0z}*YmM2WBDB=%2O?%B!+rP_n0|ojm2!ryyXr^nRKn`ofH0VnJI(BL| z#3D3kdk7=gI;%6$l}RUKo8liDwthu^4Mu<^i@RTq*)hvA*x~XV^u^L?*5`=MR%x*e zT6VNpLZ*920r5fQUCR#Be`rzSFaVJR;hIMQ4x1jr-Joh5Q+NtOa0cV1`T>r|zMlHq z8EaW)+6h;Swex24<3Mou-b29n?9OGBlgGS0d59n_%Up%AMo-BzmM2{WJb7y&Hy#6; z#go%JJ;~|M@h`#e>>5d_0$ipO4e9OpQo$Wsd9+wAOK z!gAr4S%4X;n-=zK`rds~9)M~c$DxUoexRnLmrI&&qh z2hQ>)obU}+crRchxvXNfKqe9aI%OaaNgGerja%-ALTc@t2K@or(`Zh_Euiqe>Ck*m zk{jh;s>k@l_G8c$-+7Q!wgC=3krUZ?a$Yut-()q#-LaX-wnza8feo~jcz+JBdOb3# z?Nr*InjO>~kG`)W9HKA9<@CMqFnG(PhA- zT&W%IwF&blKrIK}DzL=AMS>FMx4?KB;glujjc9e3MLW>q@@N#|4aa!j8#bPIjAK*eht(Q!Xp>=#PXTY=--C{u!9-T)$1Mj1SrdOuInB#HjchupYEJR(s`% z+Nu+=LRFG|Wcf(U95-)5turxi0{~gJn37=;nRNVMzL(;idh`Kt^e!_spb?G^VozL3 z(T#YLSBkpS``6U_F7^J3dOxk+A64)B)%#=W{c-jFpX$9A_x*-`jL|=ttMTq3WQ;C4 zy%6@egzRQeLB=fIj9g;{dH_Sluw2EE&N+Z!@@nge*-_53-qolk*v0kIe<5|`(kn=^ zA>GpTOIRJz%+mD)5PF8{`U!C`6ppec4-U1>9w}hsQn=7V4^kefLw&4j@GR@d-!j+L zvlR+Ppe-ilCCx7^q&;e+38Cw4_1$k;x~d@1E!~put~wf9>Xx!R()0DwibA*aD_TxI z#_XPpCO8;xP8M8SUozm9hUK$x3CECoU`0OUqx%-Lf18(#a^qQd)oCuh=(g0Hc9$G= z8;3p8A#Ox;k)Kri@c5@r%U5@!SF%o#j(L)qmE73Rt3KFj@>KOlMV`#8(5b`G7LHlO z-{^hU5;Dg7oB%VVz81^dgTQL?aSU02+>)Q-*uYr@cIXyciUl|6N537bP#3xd)M2Mq zwPO{C0Vl6|CbC!R<5h>ESMZW|E%Tjte9gQ<>8mhurGHm8R|eGD$f(;8%qKm0N`5cq zk_R@74k2KXH$s)fK=!HH19pIsi(PlQ?)2W#KarW~beiQglO_j*Gf<+poiRecxSj)@^*jU3DIY zKkq3St~VZW4AmQldHge&TOxC>P->-KPqHA*-Y-T)cQP|yxA*aA3%tx7AL)gybbIGs zZ10s9+k55b+j}KU{LEB)yOAfT_C8GZeo?pg+zah}m`;(G=_#V**KTP@{@4gFH*CpQ zwN>(&r8{MMsL~_B0^|e@WPi={UOX74n_i<+_FlW*wHD^Ctd~YLb0ZB^w|fnpuVn7) zfvDzQOdM^y;|b>;SlMq6!`!gDXm9tZ`(DJLYB*TtOE>%ydJa#TU&FI4&SeY2x%3Tt zCoJJYLbzq2tEU|}V#CAq@S8LjsqNmHG1-nHN_lY zcUQd+w|T$5b;8%;7cn=F7}A{~`G(&XTQPIapXumHG~7De2~!>S+ER~?S- zKoprf5mWA=86t{U=F{rgyDm#8ON%CXx_5!IA{9<<*4&F&6Y=C`&A*6O?nVen1(ciB z@N!W=`96gVIGBe53Uhs4Ksg+d>u`Krn&xFkl&&})@<`>Lk~5-3k1jCu=zVI; z@!$;-9&+IeRE0AD&VZOx>Du7kFzI(Xc|pWUdlb^+4&GOsbxX2G`iM&(s*ZRDdQu(n z_drN`Q$1d_biGV$$yJD5BxWP9Ww5b(=^aehlT=YPZu$D7Kn@fWg`C5gT#3i{TD*00 zC$i|(8IrXcF0C&_LPJjZCDialjg9yV)A%!qk5d$Ng{X^_D%SIP#KIwZ21pNl0Tc$H zZO7GMOvkE=wpl+NIEgAx$zh&|<{)rg#|LUupOe3gZXT%yPb7H0%X3RFD|5zrAPmcR zLJiYnKfOR#MB>a>io6(BDe~d2x0rl?s*HS2rz2_gMUSDIlXkp~OQ&|%J87;PM%&-T ztDiKsO>O~ak6MW=Bh_AOJa9JExEwMN5hSFgNa!i=@+8f7o>CO-#jvy%QKG&{&OvjM zRE`81lGGlKp`De;w|Xu2_ep)=Lm7X&zJMASyk1-MZ*+K0hq+Z6|0*5U>hLxlw(4+~ z4)^J>M~BbrP}X7Qa!uY`9i~*hT&Ej#DC+Rr)mn=m>M+&*yL$Z|9j5q$I{k_c-_zl2 zz5k;+U7^z}b$CGM@6%yQUh!qDlj2eeHWX=e_yU+}|9^!IE=~Tp4pZ{*E?~+<{M5Hf zGjJ!OU#9~F=Q~}MH0eLXmE+XrccJLQ#k!t!cwxQzq)^DEb3Ws1J(^Vjv4 zn&C|~jUM{m>cs00^&70cEl6WKix^u9nBv*X%h?w2{&9kbgsoirrdt-EZ>V-U&=~B& z=)^56FK=$y1UaB{h^|?USC=&|YQv}UZYN&D6rNjcciU8++OBB|4_=FF^HiSNE^&TG`# zotVPY+Kqs>pT=p|zCWkFzn2LbP2b@gFs`m!^$YTY;1y2tFjL4No?)ULePCl0Q{N!j znqA0@SQP%LohF_+v4R{t1D9=#M%8)@=BGaT_i)yv{saJe!FO*vycSmzcn<>VybHc+ zQ;Ul)49PCB<}j-`jamKK%(^6tSt~9XId{4CUGo?4y^>3q>RZ(NAAmoUMjP<@DXSu* z;SzjC%w3Yha#v@CsV|=U*o$cQ4oVV!N!K9o+?kC1t4_}Xeg#UZp6K~$^@dCX%e)a^ zmF8x#xl1f;Ze;dIL3maZ$)|a*Mcb-bNFgDQpgNb;84U(Y7PDY27Ocfmkry^hGq7nJ zQJ<5^aw3^wV^c=CKJRjaIg^=}Ok?JXtcGbc-a>rv)a6HE%ui9qA)lZj14%6GU?|98 z1%8yqY*v6V3M%r$2CE6WJHV`l9+pLIRDEQz+={HQHXqDoiHTWe=7mWYG=I7XPRu3G z#PV*;9x?SA!x`m_EvU<3b&Yv!&U?`9OJ-*BPiK4PF?I;0^*Yp}Z#r$z`j4AN^#0Bq z=JccgOPOrO!RgG@n9KIy95{sXNUDDtZOApT+>KesprHnJ{B1dG+k07T&YKqWpMmks ztP_Jij&fS>pH3TQ<*-@)TsEt58k<#N88?sQg%_AImld0M9br+$h2SI}6@3xW8wv{%pBM-Q)%vIy3a7CeI^4m4=uL^l zC=>@$SJ`r4ux~lyK2O$aaI@oT2v5YH0|-5m?Bhy|O;m~}xh(!tp8sAgRtBF22JYlB+@K|f9y)7s!> zU(nww`0Lu6g)lW=5oi^K2!7dMS|f;mwAz+&lOWc_A`u}dHbg?r0z&!^OpQXMEfDmH zLW;${VRT1zA{1F6_-L*T5h03hY<1?XF(J~?AVgY1kv3nj8LNn484gTER%ECRM131Usf|5>jS*j@qsG_T8f76Pd2_(m z8rUYR@dWVN72;SoW?L(4jBVN^L{hW7xo-6>bsk4WnZLD_&SS5)#v2U_&4HFcvsY6d zv{eLtRP=krjhQKgIAedu+Jv^K0Bl{&>b18ty6fu}FDch#&SA}% zQ>d9v+LnkQFt&<$xB8+{A%gZx8RP{-sHKfvH;H=%b_4T9h0Wd;)nHzlULo@~hk`AE zP0+G8=!--`TfM%>rY(3^t@YFvoT?InTLO_#u#JqtmP~083JUGm?INj+5ptLPo3b!h4j)tt$4v8BZVfuNLIz zGqxCo^5G?bT`1d;u@d|)^0_y`e;Ck)T&x&;g0~_U>jF-{iTxJJ3BHCx^Be)ZyHLYh0W&W}-UB|tViama@Kzo713o$zyz6jo z1JZANDuENkdjb`|25=bpWi9xWmvp1}fv4wjl;4GryKD!a;0~0BsSRK%cO&?f1<(`t z1h=9526%ctMtRyBS2Feq_yo_QT#g(lJ@=xV>#72`5PTIpG8;mwp5|!JMmLTlD?g_1B^SgZg}1 z2sfKLW6Gh`l=dP%ei6tx>YADm3ji;?mfz6c)@s`#M50KisuwLTD_>+ovK{gVf}5%r zt!-RUTDi!Ej63M}wcWFj~DR771P-ZQd-j`J$z5 zf#ygk8fp}A#Dn@Ak$cvJ{pO-HYE$hg%kXu)8*Q4Zqx5B=G*Ti!YI?wvdL-kG^`XXei6sqwm<%*YtaMN(D9_5jl367N6!G8-71Hh14N z_F%!`d3y}f;d#v+oe^6o9NZZ8b=z8ffk05UZSdQ|(SWTpV5`4+m90D2=C3L#DYWaV z_Z<%2_2GSnp-g(`$=iocA@6#g4ZaWf$^aYuK$Nqgrv*KC@NEH2gC_*c5#{YK6b!1! zx4&S};fF(7-YD`bJ6k)bj!)`QQv+jdx8|{)`{uP|c*fZ|wxXPAv)OD=Y#OzB24Jbc zx1swQp2Jw4AWcHH4FobOo*dQ!R!t}aKAP7kO-r;fFkgc9kZ=oQCkZw&b^!HEJR!!& zhRH=tG;e}_8gd-vwW6VVK{dLPoj;AgO7{22pq)G$FKRONUGE1l@z~I+s&Jc6_A$0& z8c5J7cu|o3xZp-r8XFrV1AzczX#4<~$AxZGg)~Od$0n3eoi!N_4Z28QI1+9}R@kHk z2%QyQ)wj4q{euLf2DQs8H)-h|gkU;qg3BGbt={f(J#2{8eeXgLEq#yTJzz>8Lrq90!PJ|SuTDQvfZQo~ zQhFXpJYfyk2$XmM(xt_W^(CX{rPpcfHk4ngm0v*RuN0BeS|}ygan-0h9#49wk?plyxy8<53!8g=YA=+q^dY==lAXyeYha9O*N%4;({#x9CAXGn!Y+tZdsFIQ z+;p-ELf504!ZNm4-K~q!=;+1iAbOTAr2ZRViC9GdbcSvep#sNrOYUu%=%uJU*j#)q4)T_Xm_gj!f(Z5YdQ9HmkvMMK$x(Qvuf zd)KEj<0kx>2G8bEPAdP-l{YX+qNenBVFXDTP=3S7K_;94vR}ndGI&i+m=>k58k^$5AL7`d)Om_mK zp&T&n*0DX94UFHEq{(#`Ne<*1sOCw!QgYV<(Bo3BEkiVLrQ}N)A_NOy+cbg@75%Cu zJGxd~O9pgtV4o%@PvpxNv8j^#$DpO+3 zPyaKPViAgvl%}+ms z^=NEXdKL0mHov8jb8C`g`RT7CbH71UNHSRb>)>Zt{J*K`%jnt)zTRY%gw9BZwM>4x z6Uf-BS@P*Mprlvgr4vb>)>IzvqP<48m+KSsU0TdAM@Q2Kun1yhnHhV}5S=cT4z3*L z%7ALkf&Iv>)+&;{NuRl0w0z5U7mbH^lw?I;w+bb4DU4l)MVmS_`b*H#2Q}@c^uvM` zhSmG|V?96m10dS`w>)He0&}r%=;t)oEcL0|Ous}vy$Q=;`b%KbEl`#YX_*4p%MWzy zl^sxvr9qoRsKK_at{0+Ac{aK+_O4;`jp^qx2pGxAzN1k)S{We4XF?WTr&e#Ge7?xb zRj{(7-4ZlypG936RNt~}HPwLuzPKEKvILR68sIV0EW8cE;%LHND8U|#m+%8@kNtZZ zuVbuziD^4rE!x9dOCAJ*&G(B0j0d+bp;$cX-BW}Wq*IX4AbU)9mtebtyj$irhG3AHlFzA5fox;z+$Lcmr@DzEX^!}iQ}SF zFikFmnCpo&70^;hItj7SK4TQB`v$2Hi)9m!_fa*}5J7bBxsz%Ydqv-E)Xo&`xr!!% zy}SiGe3KR43)oC9t6DA4i9mpM8R$dM#uE+WmOEgOdON2|zYl(@&4qOfD7>#2mNzH3 zG5t%ijo)n_hOPMa17xywaOjDc$i@?M@@e@^)X};-b_Sv?G60sqwbYeZ%FLa|!lT+u zrTM9igP8G0OrBR4B5;=J{&(L;Ol8|_=X(v~%+V_w zg&DSUQkSHnYZTK1;EoNN2TZ$w7)gQYpMbefq-=6+Y|u7fx(!(jF$!)4wg+6p_2(X` zcOHqB3r+UpOOHoOVkH|<>Wa@<2S6iRNx`s;NIG_))JyA~*!qAtl1ofYV8YfxVwW3$NZx!#`#QT(ZKO)}05bvaTzariT#QQPv9>n{8Lke}ICSx_;H-w1MO}iJC zJ+2_S8B`H5D_2&ch}`lTh#_m%v(yd|*ewErZV0)Fc4iRQs?{P0s`V(N9-KFnQ-~y? z!P0vsO~1YoOYby+9JTbC#Dp|R@iu!%!WM_XCxD%bmrK~{DkVBwNy?WwAuWxh;wNw; zNx7<8N^C;1ABO!INq00l4@HX*T&3q=IcQX-d6WZ^@~CG)KX;}j%d#W|-Ticm>&=A< zPwb@2@)cU+-3hKL{ZgWA7B>!X=aJ3D;FFxsSYpqSYGr+;l=yO`+c?yq{I=2flE?Ts zcRnka8l6vCVm}6x$9YUAKLPkUw3_KPYy1k#-7W>e;P64N&Ww)I)9r5Bp>M$u{9nhv6=t`4c>J)+hQC=ICGBpL)ojFuJ-k(`x$1`qShwH@3yb z+}hcW7Qc77a!qLiE`5|xxurKHR+=v${5#m#cT}!u!2OY}flbE_X?e)tcJ8;_-tSTS zv0BtCzvc_}dz`88QAzP4-g_EKbg}vEJ3pP^XZ30JTtoI0#>BpHF18EU6N~sPU=7TW z8H{AsVDyW@*dKmo^lc0wML^gouD}J$7s?U#NLauzoafM}%=yT0rabBphEsAL4Zpzk zv2+}z;Y?hqO2&hKqUh%>b6xDy*F4`cw;tX8)GhzNA}=Xx%B93r7&00=p=I*BcaZ z4IH(d5J=raQkLE|l!YVqaAh5CUU2;?wjz)7h^224%9ID~JHd$Cz}|C^tyeze&bKUI zok?QG#}JiRzETK$-&!!yg|jPh#Sc~McX4CJm6JF^;QX;7XLN>ecbt)mpQmb-6mINr zY(W@>ZsQ(~*pbI-Jx($OJifv_N&$?dq&&)#bSXI`de>{G(~@L->lYM`Hzw>jyq<5& z+_!@j0(`+!BycAL++y?g&~4};xB3+1B;!jw_PQYo#x~eeZxB~B#dIZ6=`rG#c10;j zWZC!%+}Cb2OYyC^qI%psVG(L7E%emh)rdIbG^$r;!qW%9h-@Z2y;o$PP_4HEL0mx{ z({HO?H1wSO2^$B301SSUd!74+bty?{EN@gEBL_jpxN=xsjnGz#BZ7&qs77)j;qq|> zj)N;W-~JvQWWqDud-g&09EDzDM+kn;w3p!0n`48NMP>{3x|RwO&UzYnmfrj$qDd6GcoANx2eR-~ic&D^FAO zKSO63$l}Vbmg+0V5x~7V^Rvh)xAF$A-4%r3X*k0NTCFeRSccytB+xLr(@AR!O-lEA z_d0}zDGKYGsA}bDnk{QlcltaSpL{ALC6->mjmISA1Wznp@NeX&8UI8xNov6}BybW1 z8UIAUA0tiY!YS$w z(P1nfx1$f@@Hw?aYcCq9R(utOYDFjB`p``NUZx(;9<2#$dcYjv36{(+u< zK!*o(__Yk5u6MQ$7whm&z5Im?pRTa-Y_<*4gk_*>ftGJM4QA^9bHQPkR!_*S!Hixk zS5qFMXTDh|0q(*|z0HAyyAOJ5sque-$E>&Sbk(*+T6?Q?czU_Gx%vq4uTQAd+gGbY zmk#e=KGoiT>6Wcn)-2Jt6V+!kCceF}cL%7>YW&)C5nzU9udZh8pxr(}>k;2k*f73T zo%HR5{X&w1{aGdPR9CmQZ^WK}JT^;KwQMDRx>)8Wo~Eg^>xi~?3e6+CDYRz6w+%FORc}kP;M)n>xB)c%dlK?p@?kA+zxn2y+cuyZEQ#KUeuU*# zyT>i`4Njq5C-eVqar!-!6y4*;3(VUoz3v1eCph(ti1_`WCe9Smy`pZ>rO z_};~i*@UMBv^xQHnrIWz|Ih=|vguYcqkz@gmd~t<^O&_}`tcL9weK1p!GJ87jvrd! zTlD+`@b6^h4Y+>JtI27aj?V}R7n@n(%DfQCB35U=0pFWQ^dl}kgTM>VVC-9Zeg^Q* zA!W)bKR2t~fXl<&%kiu7S$XWN#TIr}c;@l4(2N$+Pwid}Ugr$Pegu64MO#+pG#M;; z%!0mH&=*ThQOHnWUyV_8a%Ssj(u#%d*rUI((9DMMg>yKbC zKR`MT{R9m;29|?CHdt;4(y85 zbur4x<5A8~&QQ)iQqHFE8$MQl9`swFe;V|cuyK5kajp%4%!GdEML9z``$#$FW){Eb zGwe=%o`=h36KvyxJ*(`DonK4sg_~#P4Ks4tjN2{4CCQ?uc{B3ZjJNaHw6+4sSy;>G z8G9J%29hIPvRUL+&^j`-m-89r7tP0yox1+h^Q2=!h84nIdFT%!Sm8Pn=OhcD39><{A$Ez>H${E5Z3oE3g~Su`B>2jr%3u+@*P>HDT;f4I9d;FJ9s68oCb6VWrl z@N&P8+S(NMMX3)AhNfJ zx@t>A_IEdTcKh8Cbihv-t1#4eMtmEP6UtITI}GYUbVSi$wfkXuOD_#!eZA>t+{E;$AW%e%Gw{nm!u<5NjsX?OCvw?N7AZuke zF)uJb3oskzgH3!2PUR66M(G83``Bjiv@#iZ6`=NfocvPA*RTu4x#xe9*VTLm{zMCu zmLk4FqVv#Vz+R+VaAF~N7vlMsLBAW&hFH83bb|CDLoaapFT^*H9t3^|Abl5c1USL( zlp&l1em7u=74atUQou)$jsqun42kM0p2^sJB*H5JeLCI-c&CmN{IQPT4|qh!3BIP| zg{e&UQp-*Jjs7sxHXrLgr)I_%}_$pwe<-+f})%dROf(*RDHv d-LtE9chl~*yHDWG$hgs|pg6T*i-f9lG(=3qE2no@Jr zjPQ13{@$_&rvn=l7jXC%*Mu-ZsS1V`s646IYQjnoGsW;mZkgMj3*i=cX8aKAp}8Ry z4iQ5ttmgw2Z&50=AdbF=+BkO@%tD{9g}S&j1U_!RYMY(!zCI7}u5&SP$vk#H4n*3b z5LV9aTeB*^Dum^Kjud)^|BU^7?1?Rl%V@Thg&a@NLF_nJ;TzZ!TQ-D|*}Yb1$A|@T z&Tl1(5TY(SyRW}*HDa9;oE!D~&f)61*Kb5&3niQ@)qVF3b&JMl{^PNSwXI#7Ic2jB zKDS`n&z=t9#erjTqoWOLUmhJBdvT!V#T~iP-3|NSffCebm*ob39Oa5nw(cIA{?vsb ztUOJ&+%wdx?@@hTSoE|@8RX} z$XU7JY0LH@{U9EwT(P}*er{{s`OhF8W;{4j5B?wua>csSnciA=`_opLMf^!9JL`Io z;jBCBu0w=I$n|-&y&g38!4Wi^_5tK_JJTu7T*()wz50FRw|1tZIGM^7r+qI@!q}F@ z3h^R2hU1#Q1LpyUGxHvHF7e_sm3^mfQNi=I9EnI*Z3{YCOJ{K>lPgJPw(eHjvFZ2GR(xOcbvO~(6+A!LWtUfG`%}sP zm9w>uVIEGoGFN2RKeP`VaH04#CojZ2(L#4}c5oR}!4P03I6rx;lZ*FjY$T#PTyGnnp zW#*eYix1pO!;<2oxngZ+aoTZc);9LoSl#EI!IOatJWzv&29IJ(u9(MDuXtl~4(B3j zd;gAmCUX5t(fc-D`#djvbX&4Fzf!F0hJVY*P1DSrg{5W1jpCHnp2nm)GtuC} z4A+L0Z~@K}PV?lQczULw9cUbzUW;UL?^a}RA#DC9WO3Gt-^M8` zedd>AV-@MolF}c;ZqndbYT)f?Iv2Lq$7vgYmfP|YuBy@ld@dI2CfdaLqvD#e>9;Jv z2^`DU4UW|gWXm(oPRy7;3(iMS#wAZ0zA}YJpyfbgTst;>2{c>7feEcoQ7!J^LwP+O zjcnWdSG-@(kMi}*2QOMuJXHKnuJ|)NNe1dlr|6J&7N5ZLGI0^$@n5n3<5;>s?$Kqr z;*aflP`vxxwxxT*+NcW7A(c54mDTQ*LWV!`SqFV8xN# z9L5h@Jlqu-|1x<8uJf*j+~~e2*NV8cqaoKin!5)tx!BO@bdc7{RJlz1C|)Alp5!F& z!-e|t9?-XvV#!zUz!GQjz#4Lc3;&e&CIb`3rhgCM97hu7{`n z9r*r$i!wJ{_XWf|%7}iY7qFDvV10Axc?)VV7hhYc{Vkpxc;P&?J6BAf!8?y!F;{<} zF1KJ;zOi-WU@r5I)ADFI@NaYNb?5HXftUS#r1U}aPAqN1E5v~lO3T3U)cu5|({o1- zOe}pI;sM|a;K+`d$J|h!L>ku6Hal&7^0-4gK7(5w2 zuX9`1^Yt#@+~gO@D||xXI@w>kA5T48f5#2JmKr!(mv654NUpdO@Bj08yvv)fEOzp} zL@y!h`4Wwc15S4Sd(7dr_iMO&K2bbB8)vz7ZDwO# z8&AOCuCbqIZmRn<3UDp{4S5O9u_n=8Ay^2Bi6L^Up-QIOo2y4%) zTZJoZ;QHL^X}2T58^C)AwGVDOk3Xvntk^yUFP>}9Yik-<-g)+YnXj}p=7!(Xl&jr? za6)c)YN2y@>VvouXkN15v3yhU;;EerM*Aig7uDB}b}ks{!_C-zGY>r7`gjg6lDtwH z2S3^z{(#YBT$dAoHM&fjcG&s8`N`5woZ8mMabBiuerjN1?mtcco2T^X;W(_Ou<$RC z2{%8PPj7yFU~1_l81S0@Q$T#cyvBy>{uO0A2G95$t9R(V4LsL}OB8r(!=4Nx-se6$kIlC1qKKQ3rMazJI1Lw~ zWZU*na#r1|ZW-b_ci{1f9e6)kiFcW$({c3zm-CVxoBmVu#G4WC{?HFM4!PlugPGfL zLfV^iob(SlDQ3&!bqgV`zNt9#^~1T^(%1A>@~P%MI9It%_w$rpjSIT`z8vL-Pk!Nk z+86Ev95@E=_&fo435<9y26xow7Chaz-=3*_e%j-pXFR-LHSd?Jyw7n(hwBV(D-AlHnn>(gFh(77!D>(D@TbDOa9ax$hoYwyYPS_a-FC5D(9GQ>``I!q>PN)g_ zQy2CUPOQ88m0#d}J^Dyc0UMGy40{KD8$|`hzGpWls)A74h~zI&H`KI|i;O z*8K^p2Y1!)tlNZu7x~t@n-I0Vv?I#TAKVq;XBtpE#1PgZ+K#F{?WgZ;U$A$eNu8Sa zU9fLp+TgT{pk}w8yb!=gb|!Dg^0=EDyuU&B*){4S%elimwk} z_DX3ZGCEzYWB3sVr!P6>*Z9C5sQX%^`YB`6*Uj;2)bf>)Y`jI>ZZ;}QFP~}!e1F5a zz;n9I-q^>cAC10v*mPSlz8EaK%x-aU=Y}`sy`9AubtaF+dBY2NX7gkPIHf5DI6ZhP z{!mlrf=4rdgv%G1_074?vwxl&UKeFfX{QOb{{UslaN}Fl;V)UXS?MV}%9(BL&5MU> z35$jnr-*Gsi))D4p~cEyG_*KP{;r|Lb;ORL#S@6_LyIToYQLX5`}N#{*E53)c@u_n zo8!Z_H23n;Cyfm5tL3{pZg}6jBfily_J))BtIua9KA(84v2*lGo|bXv^P9?4ozL=V z{A;Q_x&K)K&L>|ae$(?={ATi8ejGor@}&HB&Sl&G%(=ApeO-y8I@~YmaB(I6jRHK& zKds|lgopX!bURJ}u8TJ8?RG^~!rDq$H)U5lE}0fwa^tS2CBwHB&`sxH9D| z2d#EJ&GqYPj$cnLem%Xzucx>B^>nUZPxE*^#W#bv9`qFWD(281 z8oWBLZse=uU;#&aFP;NRJj$u1r6ZtCVT;cYJN$Hh&EX79 z9fRjJ{lSFoeA(QwGt~s(yTu!~M16gwp0DbMudhDdYM#}7W;i%Dwmlq;o3tD8GjHzf zefZJ()dvx^;uD1}hw!v24c72}E3;)85TASDZW7tGw+&6s4PTlX8y$S5{zklt4)5br z@RTg}%~(?iL%5q;k{TNsJk)gK&+%4+Te}u7D>WSJ=dyFS<8Iz)a-P?Y!J(k8gJlPq z*^i7)dFSw!yZQ2Cvy>k5$}Rk?BDViavKaHnP5uk=nQdF1L_>bkhG)_+4*<_QKJB)A zClNoDh`)z;>+l|oFnl*5d_NIC9miXTzkv`_(ojlGVSE{nO#El~kP4i~CH@RJrhZ(O zJMP9}F~<{eFDQ>CVmji*ehV>P4I-3(JCXioBIY_&jyn9nhT>A6roY2Z789#1VW*Ud zrCLyX^XYhWIp5G0j#w_w6rYHvwdupvA+3ywQ-`#ChSAU#uASkTLt8jM!`~U&!euc$ zV`vK(&aioC3m0Bmd2$FZ@8w%mU9LFH%PiOWaA|9aKlI$s%aM2R<2OBD`M!gjs2iJ` zE^E8I?F#;+)W9qFAl^$$_s&2YUBUBru_3QuURfQjFX1-+55L5x@xpF^-tU*JetpUC zN9uE`nA_F!o>TK$PF)k9k*hc&HS_>v#ma}ZSj~t zHt~JH;`kO}aqZymzZf!`x8mXDW5$awo7wj6_|9Q*e50_quIll+4^L{ItkZ|K=t+g} zw4p7};7FLong4-@IP=eXh%^73hdA>;^blwMc@LZS586Uz;z}Gj@;c7pbu+VczOUo# z$~xX!S;twGbuV)ENOG)?mL4L~g%Cz>j%(mS) z&-kEw$}T(%59pk?Wwz~zgHgWCBJp6H7*UYfb{|j=Uh*C4E%=Ekv#qnal@H0>%(i^< z9L$fGe~inU-+{5S`Ry3no6lupUT*NGsmzw^abuxl$eaTAo3Cg7uU^k(I1a4%0zOpp z^?4YlT)-L6=W@=|a^`C}7iu~0)N&SbIr!P1%eeu|u?MtuWJo8t-5#j+Av?1}c4CKg zUb90wuS0%53;$w|(ZE~cN9iYRV-Mgb;;3!#^~TICpZWzpoESMUbvu7pJ5aYBPanKN z>ZP8~^-n)HHr6@(sb)TNmlWTNyZHW@ONzhWvZVNI+lsdLU%jLBw?EY9tnsTr<^38T z)#i%d=`22`Ta;(mmp?=8MZn)#Z2mfe%(k`^?~!m};jkx_Z>ZX8Dj{yKm+(pG8Jz1y ze!Io4P$-2Pq#m@J8vGe?1Y`))%UFi2> z@AP}Ig?=w~0q(`d-Co%3gihhFxSg=i9PBd-en9yc&k|3|cXWPm`~6sae(>U`^YiPT zn96y1<8A2p`?h)!e94%{Q4KlAzTnV+TZLlz=7chxcI@>hqZW8l#dUO zwGQjQ>iF{NjEc z>`!!zcrc$s(Dw4kPw`TgKW;m&KfK8B!x8)x9RI#3F1+bqCIh|A_EN=^V@PMM9s^ z)|XC1#?5?PvdT=yFZlz->-;SXe#baJe%ePuua8gK=Suna;|J0F z@5QI5!B0>5hm^rr>+=oz$s_TT3s1z3QV-Ts{X@@H$i>SmKlxh97p6W{t1PJMeyx*_ zWP9;F>CWME|M=0dvG(E#_@S$<_)rc%eU0vKYAfD1T59MhK8TBTYpuP?FkH6me{IE=Xz1uT|&xz#q&Ig~@ zJFj!#CwSc#FPRsgtK@3@T(u-t^;>%u@)?-8BoBA4ikDo#Q&oJz*D)VYNIsCmH)a35 zUm$R{a)bYd8+rY<5bt;m_BwtCT-jsMHRpQ$CmGmLe74E)*>0L-Y8a~1oQoh1(or2eGz`N6YPpPl` z81L^-n|^$o-h=CI$FFYmGy3M{0y3TW1a5dn^VfKrqo2<^ip$cu;rG^e6sL3v?!#Ne z86z27U!$dT+opX1F$$yW6)e zKE$Wrrf0&zT6|8DJ|kR+hEwpP*i;zs3%7Uwf8UP^09z-yhWL9>0?eGRl#E@STLHoePq#4`!tlI zjMaxLeLPv3I%4S)$iU;6stOA`;& z*pk5mc-{FV{wk}Z`1F$E(`#>AJNSBPVCubiE{3+VpQwHOKzi|T<4MDdQk}z%`~?~> zyW3kE@Q%?roZiC3yS84C#@;yi>WqH;a;}D;ZN3ThPuwHO4SZ^onMto4|Of5O4SW@T~L*pFx0iMDm8Ja>z!4p`k}51t5TDO zy0TTN$wOWEwZXWhP8sT2RF#@K)U~)OHEpP?y()FoP*+D)Dl^peuBueSP}fCOsiTLw z@EO{;MI1BKwWKO_>`>RmRjJ0Iu6I|ZrVn*>R;7*`>guXW9Y56dyH%;Cp{`4+QYQ>` zEv-tOIMnr?s?gqU9uS-MML29UL`QV}XRhPzPRViE= zmsh25XIM{$PH@W zw82S7Zh&*}CTDQQksH*#X@irG++f0+Hh9aC8{kix-sBKYIdX&gH*F9dxxu72ZE)(5 z8%%!F2B#gV!QjYLJQoIEY0CUj37^5YjqS*d?Pwc(ts(PAS03Vr?v2e~cjX&Yn-S#fK+n%yVH`^n+*&h9o?zKmCul>x0 zQy$Z;_O7#|Sv?v%B_y<2JivA2{x^yAFcmCi@cHWOw!Q2K(ZN?NKucXhU6 zn%(c+f9FHx-)vM~zwq}@4V_yXn>+DoWPMlbQ=RyIVN>VXd%7}q)K7=NjDb61w1xL>{h&^NI7L^>)xkIZcncT@x`zUYa#xJ7^MmMY@6 zxA@&6|L(7Ic-obZ;1Mf!^cF9}zjNtYSsHrWIduO_`GBK1*>avJjXHTc#Y%rj<-7A<#@m2cO2t``Gxbwl4a&O zUc5i9Fucn1OC4kNM(4LW-sSk9<28=YJHGDN;~4kJ%?Nw~g%}UE8*k_sk8%C~EM^Z{ zzkQC?eei+(xGkjlhkV>pA&od4u%{7wP66|*Lo$s5*3iHjaQ<6LYS@wHE}b6L(#Bf) zpqYnrV8Z~L<`rcRHIN1qJgQ+|_TYeTTL*LU%Hs239@elwH5JRpBKBjRA>&jkDuhi@ z&|W~UKpqOH!X6>P4LRx&-nai!bdxV|s>akO=!H%<90A#U8*#E!FGS-ABT~lw9 zvOVLPdNizOKEh;dt{Kyu6&p|W(KyxFn9J|Vk zybQ?Oq-w|-=xD%x?8Rp4r`}e}p7g1Qbk!+GA2(Q8%y|h~2isvof@?rZ3C^dra9+-- zTCcIS`kLHp>ryKX1q;1l5uGdjn=w?*?$!t$@>-?tY&iQ*cvJ!H}_bmp+;p3t%S1cs z?ziJ7m*4DGML)RwMB}PJ-u&K>+;qw8rsF3vBb1+lxcgN}-Pcc57W%E$Ir^)W>-1Y| z)9D9KSe#oD>o6`4>fEpSSz>-5fv&rsGF71JS!mq+I)3Wz_xS;~ zKHBA$pWWye7K93!V}I8S#VIonbZ#}s7-s=>_uKqh5Ovudoy*U`xcNDObJw+gZva46 zIt$qkXx#lizg2QHKUYr&w}PQMfYumiJF1x9&~^C96T5TMIlnXvj?P^-KPBo!>iTvx zUP(HaAA|bq1hNFXbgIT(cR!C`c?6E++LV!>8SkL=v(hp^)RVZb`OWDW=@x4=?tYse zdH3`DlW_r#&gG@cxcQlM?z;QgfPSEKsWyvo_xt=PG{4W!>3tROvzfXHjnmIO=NED| za;%SyQ`ArlzokseA@|&cTadxEcejUHKtZGIW)N11dM z8aF=$ard**xE~)EU>hpA*?kAlc$xALDSN_ie=4%+_3j6%zEbYGC+f|guuBJ5ib#K= zo$KaD-gIxktm@qW?z$CVI@jIr^Xrg&PWV||-i^zTL4Km^nm@Tb_9r*p=Eq3NtoykT z<6NJqj?SfHaNYcDYr6T_P+T$y&^a^YuDc%z)7@{!Z*DD_eyfFDYc|f!;loNxv04 z8dpj^oio-hPMI9&+;n7&vw*th*Tl}H%MOvqdYY+oFiwTs{Ay&}bjsvF)w9sJ>+VN^ z>1s`bbh_!_rk_N}jMp1yJF4&-x(+{iVs~yjM&l?@jr+M$GbNMlt zAF@n9=~QXKXx#lge&ua`9Y44>Ipk-?JIL>5b);V%RAKX*(;d<+)@WQUz%@V7E3|$P zn~})+O0~_;V$Mf2-TX|6(?|kz&Qi@{-2EtU?z;IoJ-c(WpB`;CX8os0*Xh?}>`vKq zPr+rnb%*SJ*7`2LR;HZ%v{NTir{dgo)iUWii^yz#gysjzZ<0!kyB~$l&5w1EYtz}2 zU)^99LZ_Mih0f_$QXG*u(7EXtjjL31I(J?3Ya;sDhB~XA%MYUdMAK2CI``8^4wTM9 zrq(0i+(W7F&efIYV;S zMZqK2BPGC1H(9HapM}^^<+>GMI`cD-ZhpqtA`b8q%e!$>=Hw@07ia&7kvPyfBIR8t zC3nBgPcB_5bRzvghC<_{yWi&rNQX#0 z;)r@8c0=PzG3Pop+oe+`2RcV2&i=@QIy!e<^Gl`9D(V!S%O8;{Z}Wq@?taSDf%3~B zZhi{V-LFdO=7$JbqkkL=W<0WwXF;rCK+~x;D5tkmgDI-4=F(LdweQR*eEIIv(G$P~NvKWoKKhpfj zBG8xlp$2jy>wy>zja!CE=dQb-M%DwROBL7r5~RDI3FFe?4yYE?8&V~QPQ>o$+;z@R zjU|T8O~+`Qn(fYAH$N2VMCw54MB|K*!k2S|;-*6u2TC_dDdX;k$GQ0O#~5`EBUjb@>qo(}B{NVLiT#yB{g%uA85B))T1%r4x-aV&41|otq9>9H@F0M%>(w zz_|ORP`CNvE^m=V1y2-=mnj-^D-;C4h(Un^wgRRxM1-w?U*8D}9Kj2&BQP%Ai0snA zl_ExgL{#Wp3FJ*TS*w!nh62#I6%>$gt^~OBV4HGL&}59Y8)rx^KM}h){RKwYgppyv zx$C6V-H&;lODDmmWMo)y?m8)T^8=Y6H=T?e7#SiGcb$~n{S1strwFPsG6WHaPOR+C zkx6FQV&Jl^TKk}fC&Ry61QmM0wIz{L57f6-2`N3UxKV|B` z$PkgZ`6-CIAChzF5J5GD$gtp?zkbJt!O^+vn4kH??%Z^Y#!;Xe%x~zr`I(|lq^@sA z=|VWd1lN9SZr(yvG(GR`fF(YX5y znjcvN`Z7P%Ku(MdO-0r?6y?qDA?n>vBMC4v#3*il3gYf(rE%$SLq`p^$dD>2bRu>~ z=dN>pYAi8yZaPNe)NFU|y7{3IA9k>hOV2RcIw2)P&&B$Ag*iu)<|{kr;!{O z8N$NN{S?IAFU1lOe`K4IaAJEm)~cDh^%zW zaAbds?gu(Izs>KfvnuKkr4x;lGH-rwNN&1h5TNQ=$a!s6VLnx7#H3{7W3Tz;zLMCvXIjZdjkNn z(pkuUK;!O5!ny0_=j!R;mNird(3EkOQ-$Bqb@<5>yK~byzcdVv&RsV@CF(@#`gSy4 zNjjGwgZk?PvIM(ys>WS+KaXE|1dim|l#!nq@1XUw(lS8Qlen(=&FLBG7Hc%_ew!b8 z_XDXzRez#$dFe84ekPr}?tU8C2Pj>t&0^gBK0gZ0@AGqdUj_VZrfx#x^fS--g`ABX z>!YSn=-hOS##v*#bJxvJJ9Q#;Ryvp8G8}Dw6U9xZkv^dOCMji{9jUut$1iT{v-#PD zesxnZT*l?+;JP-CY^=G z%}+tx{j4*hz^bZ@|{>fHeDx)oqL*WK^)>yUg-_*q=ujmwY0{xTcry8Q*WswpC?dYhjk=f~)N zo1fhLHovLfl=Tp$6OEHHZ}WRYa?>SSSJktS^+4n9N5Z-5zJ7L>h3mIk=kTl4b^6U} zI{n}Yi*rkAeugYCG@S`?`Kgi{WS>gr{>{N&|bIvvabottj+b0XK>Z}V#bte4%>0Evc z>aP>X6715c8h73OJbvX7IFf5qMt)|zgVxVV%K%YN;=1NHr)Q*FtkJmpZGPn452OxN z{fW-yrOUYanRM>D`)On!pmeD=i*fh+{3tZP&(G<774Wl}x(VZdoMJlX*JPYcCUH;k z%s4k4qj3nV$L`#9t{%>4&j#!Rl+Hrq=BFU;ejPtc%+Kjb2=J?$ zbNXp^U49YM<>zds166PHYc%eDv~sS=*@K*R#zg1R=`wD9HgxW~`)Q;fD4m7I%}+tx z{j42Gh&cm_{!5^IzA9m&;vue}nlGcIn_s5$X4T zLL+qUy8Fd!RnpynS>};={KwC^n)iX z&Mm3=8M468bSA{*r%Fzw?xN5*73OVzph}3+DN_YHHytVCETHawohfl!KkahMZ^g!` ztC#8Wlb3htbT9{WZo19SiClNT&94QpUUo<4@^dh5eoo-rbsfJq03a)!h3p43?tUbk zyKa82o(^vMmuqCk?O)re9aZ?1XJ>K$*LafDLpoLJ7>$F7yI(7#j`>}p&JySHGiBUF zCY`(Pei~VVT{>0c=BFU;exDy;>x11K@}p8rh<E>rjoJJC$bCzlrDirI+K_Hi^e579=-hSssj*lAOI*~dnoy%_-j$FSGgP}N$Hf=Ou1iYM%>nC^Ro;6>ZW42 zjLXl#b#I0u>2fgVeny(Z{cP!TTD|*`Bd47@(YYmGUXv+%X%Vfx-+_@XWb7pUM_dt{aN!T?9#!NBGQjG7!92( zfxPKr(1Ft30PeaKU^>@X$$IlM#+Ky(Ke4!jrF$Gpy^ zlVDSl{w!-9jgvBOejpR%<|iWusvd%K*GZ|nUzOt0DMHrRU49W2= zJ?jK$9D-7upLJOdLUKgd44oq-F25-p%@1^)$8Rh+IyW7XaZu^*_xSd^{bDf&)(kYVzog)%wf8;?Oox862rBY`Vb&AgA&yp%{^MkwYe#+E=^2;D@ehSjv zuS)9XhX`3?{EzC@fm!{-c8b#BH*_6-^2F}kbd1JPpc?mMUgy#&krP?3Z%5;mq;vT( zm>;rCK+~x;D5tkmgDI-4=F(LdweQR*eEIIv(G$P~NvKWoKKdbqXMW8S9 zLk;9a)&nsb8n+CS&RusujjRVsmnyFLB}jKa6UL>(9Z)T3Fr-QjorvAhx$B&t8cPhF zn~u>qHQSxLZhk1ziPVA8iN+Zrg)ip_#Z8AS4wP<^QpVj6k8|^9ef_Yb7)A0EsS8mL zjw`3eAb!+0H^kfm@a*lrIue#_kS$CxgqA#fN~U{Vc}j3LlD{$?ig~TEbDF3hN}la~8@^P8+#4 zI+jXzlvQRnfaH61tYv8itBDuMHnBD? zS5@m(KD{wp0%FwKwCt@FiCdCOV!a`Nfw&dX;(lgX=v+i8_nT-s%bdq^6O(gF=3H?P z@6EhAIw!e)E#56HJYYeRV`_piakVbSbKTn zy##k|$v95xpC-B}Jz0dfoTJ})xe>YV%#~yI^9l4%A)TWHS&j@0`OhB}IqIK7T0$-4 zcdq`dCN}~=g>$8hXbKie(rAoa4HTGHRp7+9mz7D;9CAT4n~Gtu`gqOdwWx}fthcY~ z*e6nNR;U+ixd2y`+;$o;+q zn?N(T)BV)>%>_8RMGLY?sIs4-9O_arNmrLj6V~fn?cJ|VmbZ3RZ}YQ(`#G^~$cTtz z5Le17%*qmq)R`AE6m_olNK5xR)-!Kr_p^fE35h%|wGOV!Own~URH-b?CQGc!k{T(- zMGWa@MqBxCY%bCO-xGIXc<|*3}yM;@DmDTStW6*>$yG zCu;<6xv?iT538+>>-5p&D8I}sWY+fTn2WImxULR}nRkt7v%Im^t88ZX*b=PPrlnsi z@;1gfucbMlq*CKHx3Vn9X8mk3j4b0E^jIVDeVoaHG> z=Mc{!w|kUjH%;ax<9kvyyH%+teM%Zi(L@p|b3!PuMYFI{9W_?RGHO1nA=j6Bn|EBU zs#bn_d{=E8^AI_q4eY2e?rO|+wvcMb7N!_NOCCNY)4i@dB{&<&Uzv`@Ok*=7Pf%tp z;V4jrb&}sX3*{%Lja-{?>C98m;3dwJhu(_(c7R&ULialblouzKl|-7krvCIqN5UXaboEOl0CRuzkT*M#bRulAWu zw^_6_=DoQs)my5+OWsta%v)!SmJzr4IjWmil_grr)$)_aNk?)RIS9p^tK9RPfpx4* zw2kfNH`%MKI=0DAoqmpvHnVk=!tR>iP=(|wq|@?BVIU`9C^z<`tFC(;OQk!?Dl;2E z^1V9NvNVI$#EWE`SR0qCs`V$vGu+uDFNyW?mhg6XG?N*Q93FPr5lObE-Kg^X%ul(uaxFg9A6}jxpF8 zO;fI_mNZVq9wIlay}a>v2;8|P<2YG^q;yB}SR;yZVA{DQned#760ueL$p083H^Z2z z8MA0A-I3fALaI#3FUfVUv;UN$8rk|(EQ5KI^lwZUveZnetb4EF)FTp(a*q4bAVG`d8KZ#O8O8oeM2M%k_b(LLz4}X2>a&a3oJw zu-+R8<)^{xYfWPH(pBbu zn01cTL_}P(nt(zDW@QOQ)-zvLHIt@cwaw_d*Rh^?Gb3c49r&G)$m3G!R>)Obv7;Jz ziWY{+5|@;eM3G5HTL6eQXB2%>6Xakfo{k;jc1p)~)?1aKHF7@%`Zzk0GlSin?CPAT zLUI+-$&7I#IYE9g%aW5~G~MfzvB7laWo98#zE{U=rySd|-VwyUs&nk)HC~}Q<=j(_ z=~Y_}v>CS~DLWVfOm`HppRDe8jza%-zq-T@!?=y+uu05g4Q(#(%~Z0dwX3YnWQ|%g ztGO^{IJ#nKXHp<&IxA#KTqUt)t>l%?i1iNfa?3f_;#$%;6?=%>aO{3IcF6ZcXUT#G0J9Fij{d@xbTS(_9L6#!}L;YJw=hBmtAR)g>`EMrOZ-pet zS)QVF4)GjvyGL1e(_~&Uz9&_)Ta|j!r=+12O(dZ*Cxr4^Gz%-$QDb#1qvo?3a($_{ zdB^3dYUQWLch$x*C$o*+NW_T>$Su&JF;C8KH@Ir`Y|C~;3#^_a6Ve^6K&@hn6PqG# z6?fA5IklRw9~)IH3|$hsq@*N@OghR>qRp9e7MUQ2j))2G5Vzw2#bpXW#ffo%V%0IB zv}0VruGYvG$L?xud4%N#HoxoMP=0T0)5;CiCX$~5Gd2hnqcfZPpqa=uqvy38OPbC$ z&dq5RKAT#xK;J5KUDMOZ=`{&eCWNs}?l#knGV6tgqUMCv+d5Z&_9bUIb#2C_Gp_=7 zv&1>kM2$8e)NH#a&B6iY8}1w`Dr6?|tLCkq7+9m!*jD9QmF zHGF&0P&p>Cmt#<`M4Dg5i7`_#dBHP|^7h@|#reIleeF0-_T#l;ONcTGBs$6Pyg+%NGj=}=a0z-0qaT6ls6x@UemDflDCf$)7wb+yeF`2x;ED#V=W-Z|;PzBOJ z)+Y5H3lYK2O<(DViV&KXOSwCIFuiR_0k>512fv3Iaj&ogDRi{y@QY( zAL zm2jj4L==Aune?nM9#|hM=m`oY(efOTv2UQ5a}w;91foiuh%e=p7IPh7D#7ZBB-qUF z9I1d^bpUSSL?eL)Zd6DJLIn&hphbBbXb%b1{ay`CGHNYDPQP;^IW~;=#(+P@@beO$0-7MjR~@kA_kZ2H#b5n3&b9(FVk~4+iolb1JYJhVhP2TW}BrVpJZDkxN$q9f;9W#;qaCqySVbR7B2zIz#6~hRBq6Wc0;c{KeP` zk%=rzt}kG$jmuH>8tGoATy>1uu|+;mRY=56;3$lM78sJ_i<=N3r{E?;sJuoJFzJru zsKur%h{@y;vp_&hnYDzYKov*>S)VK*XP#*y0Gn~5F(#y12}epmMDe$fNzV%7f%UP1 zo}ge7Ezc1d`v!_RC&6w>AgaWP_)=bJG1md660Dv`g3bKSkqX#V2jC`7G!kgwMumhR zRKU;zT9mhe_K;BB@72&Gqt-Iy^gAb#W5bAV3`jXK)EJm_9f(Mrh)s+`oJFck;!u7N z)=PIJ56ozD=3M2T52}C;^bSIDluIQ#6(@~@5DP%19peIawMG#lW(_$a1{lJNjBz5i zK)@hGj#QjT9t5s48ZKp;|nfEsO^uw zd+%%C|A)0THDIdoUme_W=yN~(-kc{x!OB8ymHxe(j+ybf)a55+T~Al(vp=@1;gb3D zeiXT0SEaxEiJ`Iedr!V3{)dSZs`Tid7q0!M*BUR6|KY;KD*du6{-S&%Qu;k_<8g7DggvQG`>Qkm;&XfZRx-zJGWMkYWbH{cANqf;njD{ixGp|_ z@p+ROLp)FXtCxZc@p`(FtjFgqK94+q++saGZ}EBL z`QsMr@qF=ljO$}PKA!kI^89g&_4s_m=aJ`+Tdc?DEk2L)TV_U04fWx?IcJ@E!qJmb z=g)oXX(t{txn@D@*{8qr?X!R9Eyo=-Vd1&6&NzAcw7LuCozpy{accUcW2eNYEkuO| ze7>bG#+a!gL>QycP>7I63}g*<#8z3UL7uhnLa|Me_kv%wrm>chda1u0tmS;6+{fo* zE%W$_5TTB1K`pid2d<5yd&#{=cx$L2$=d0DR>nvXGSSj+je2Cgj& z!u~{ZY*>eS(NfWr$NcIM*P>2yM5&teg!;)-kIEcQK5O$z=X`O_ zytW3fjpxx?s4tS2s@CLlXzl7NMH<)P>-zBLPhCl28ulz^D|{dOE@ncQZ|gGhTO8{T zhQi@m*T2W|O^$Dv@^Gd%6#cJHH2C@=$C-{TFO5I0ixw~MtPw^SSil{DhgTX=z!#_Sz6y!1?IU zUVQ2D<|`Msv1b1G+*QnF$K{r&E;l}>x~1cCOI3H}_?+rij>|1q-F4%0s_R9rVHoS{ z<1yH^#cRxX^G84W(KXlLFv7?_KaRfq>gFYF>bGlL?ke@$lgur*y1nCaR<{qiyYZ?& z`+A%Aw&y~)C4hGupYcPT-uT?kg%s8!*T@<>->u9QkV`xUcoahzdOn1Qc??#^hcSeM zILCn5);?sx>+xnf`& zDJ+oB{8SqM1$g45RG4%PaB@0Ko;NK_?win?s!xadzI1Qx%9@tcgqkp6ZcWS4)9b_Z z{G>2_&BQQ$-h?oH?$P^>dyBmYU3qK>JF)Yc+ zR1IDn@jOd4)`!M5*q2WXjp)-jx1l#Ry$-N?5 z*0fl!$>DDNm$<%%{eJdh|K*d!e~R25^*24qlb+P1H}=G$!Yu4f&sKVpUx>Y#IUHX# zPkQ3(Ds@y%IBM?XmgM<5VPtmQAslK5;e_+2gtPL^HD|3kz2>ZWr`4R5 zdP_~?!DIF{cufg4$sQI@wDa%RaCy##387)lq|ku#-Y|FSzEd;lkjYOBb$Bdqds_&v zVlVkIB>A}hyat}5uQp6xgL67B6;3#%4%bp+n3g&wOz5?>7jPa}%Y!&q&w{^< zeew_K;+y%3zEvMtv8KC!cJB>qLVL%u75({Defbp|d%OEq<=1WO@!ZDVL~7{`OM2Gj zSHxCy;%i&H{o4zOaNYx6+v4q=CDb4{Gm)#`AHrS8WfQrX$c@l@m~F`GKIOR0@j=Ju z9n+j2Ta)8CjycC`9Ito0)$uOJ`yIdIXx&2ih4V%n3%7ZW7h}!I?JCbNb&Rzeo!{zs zm*azu*El}!__||{W8CMP=}YtL7q9Bg5A<~}-q^!Cf$&}CIyY|o;6U%Y26|TWf$QkW z_uYU;oXX|v`tk#-)?e21k##+5aE9wH>$z@K&zkk!YdWr9-QCObcdc8W@9qoXrn+U_ z`ClnVH5YZ~7Z3FHb@$|#_HA636cR?_J%ixYnh8 z-TknIOVjTe=-)pTT3)&9R;^$6(e7oP z>-zIKv9Kg)hq1e z;cT6Oe1Cq;iu?_|-7D7htlh|`Qoe8F4J+28c?j)ZH|6|4HL{cs4a=Cp&1rS6^&uj|{`vw;slm^ZFsV^8<>csA#G%0f6T ztmwYJd-XuRd&M~6ILybo5LWfBo3){T7A~b(tNZe^uIpaaJ8S)%S#xIjaaHMkcx>^o zBuf_XtgPP98(-1?d*CBiM~|=l#@N{0&ptit&(97ae(mM!F5ZE|+af=XLx#-bY6z_y z{_pm`{TbkYv1|_Zef#hX0$zu`@fpMOj>h{r@)rOv!Tw{!OMwTlKZp3oz~94r_Ad~> z5BLc7dU*E$BitLap7{305N04hANViWS0X0<1ox<4Mtl%B>p1j8+yeYA_WzEUIEI~l z&N)7Wi?B1!0XKQP8MxhJ;v*jK0Uq+0IIYRN4ZvRPGjR_{{EX*`U-p>zOYA%k0r$g2 z?2K;#HshW+z)Q?wXP)>Wk2eD!JqhOwHG6wxc`w^|8Xhau5Z{Wu6ESf<_V*#q0wm!ban2F*{`%;4yB->W8?oPrd;$0v z_94UvfiGge?gyBo!+x&oMa-Y?_;cHpc-)EnnT;`#KX)-E^5>`TA|~F2J%acD|62<0 zzzKgpUwYxyh-eW3$Nd>KKgKXUq5cg7M?k0cFUPjchBmL zYu5E#yYS4*mcMJ({4*omo%gI+wH|NH3(vfvyZ_7!-#Kma1*`h|yEk03{)Pw*dioch zIndX0e*fz0x;L!qpS5A#>b{Nr8`tJ%t=_od{8jxMW`B6jnGtTg)~&^z%H_$~*y?R@ zFOb8m@jrBGsUGi8e`mv;O?O6j&b+ht&g<{Q8QEj`Pk!O#p}M<`^Epz KXX^jRga04Fm?=vD literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_des3.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_des3.pyd new file mode 100644 index 0000000000000000000000000000000000000000..f3d0546abfb242096f1f549207192ea91a028ace GIT binary patch literal 57344 zcmeHw3w&JldF6kkkuAR%0X7&gZ~-}(0*-7MVlWOPTY@o>ii6Fg1Y#Ro1}Afi)G?&HkubqFuNX*(2}v-I!Z;?4Lw0$lXwNy{cW17~ z#$>nYue;m5(*1wmIp;gy|9|h?nYkm$&~fFhp*Dn&#y&CQf;ccH+H< zg>Q~~5AZeLjU(tgAjf3NPPUrMig@rUA5FTJ4{UV2k; z>`ScwNOs~&zfw$leCtKSV}vhRv+_F5@lHpyxGjWLpBfWB{?(I~$2I%I$o3dT4G)h1VHF zDs18d6>m{0G$D_E62i2YJ4nnzpR0wsxHtqpZtt|s%y(X&hj`ulFmTB{c0Uee+9E7( zX7{QU`4u62@lQaZXZZKn&%>VBvbc<9T3N{P1ntL;a}}P%p4hS>49)DaLOVt*kaO-% z6d^@jc4l`^_e$hCCpb6i_v3@rb*|lj!X`>MSE~Ez0d)&TGynF`!`fD_&YZYO2cMfa z>6g!h@JjCyx#8i4)vpbYjJ(oY^UAi|@Xm(4ye3eeU6Sj2Im#8EYTh|A<>~W7$Q2(N znerf#ktyF7+;Q)?bQsF@-5-4@H!^((xy|j3^{~yo=V&x3-99OV{-ONXdw4lKa$0V1 z(vm%(AH)NdE4DVy&26qb_gUnF%=?DwfgeOcu2^>p>CJVwJ!6$wXUbh|$XWd?R z9Wpcm*X`NXdcfRAhtP1+N5JK_r&FA{!WSpKiK_#vOh+-9$`vR5Bqm{O$zp|gksQHs z&E1CcfWw(_4?CB5af6+)?O{L68FrXopQyvE$00Qpcj!1jis?(;tOq#*tG{FqavbLj zKhEd4zGB@x(wpnPiUn-1`#eTFwp)w?D?4Mci+~E zv9{dis~V%rvFa=yyOAlW_j96e^O=+zT-vxeSNvXE^U%nYU(N@C2WNJ!Z&l-5Xzh(x z@uF8?2^kWv;C3e|H9dtM=%ek zT%IdFIx=OW&v#B`zUw&O^2&SbKepQZ9M zI2_gYW-32^E6a?vPv8Y6vxV2_jBSgGyBPOoP}I>sl(~Iaa4JkZI9z@C z#)jPH8ylmR%k0vBrM>ve=AET(^x8C(*|Y=3(i?brzIpdRYhyP1tyJIqjn9G1^nVxQ z_PUP&a0#VWOuK0)e{pFEE}YW8gQfIk47tH6_wqChPWsOXrQ62yy6HWw^ei-#N6>FJ z{r9P_hrYdT8k+Jd&dwgW`JvKfxCpbGMtaXLeGyr%xI5Q3H{=_0#T}(T*D~`{+lvp} zOT(h#qq$;jdvVfHXx1|F*ht-%pT(1b3*1+OhX#*gQ?8iDQ?GbqV-DvcYWeWCd&YA8 zi_!a5Ui&;Rd~}<#H@{J=>x935=%z_#&cf2PY}?3=&2{Y%O3z?Yotda_{zpL6^uDk3 zD<1l1Ae8#ixNrI;-~$fhvsi2MAvSvrT>BI;WL2>jBCxD()}7iHE5F@!rfiQ+DhL;ij?8Lwo-bQw%N` z>3co(i8egN3V6T5h2A@UJD#2?X!|CQOsNG~+`SnLE`&{g3l?Xs_(Pns(w+Y_GE$NL zI4OM&c9Z%>QoU!R>1^1VAE&JsTCV?9Tveq9_*^X3jkSsMhs8A`Q*N1u6F8Eu>l>-< z&6a1JnV2zu8k~=yj7uIjctr}2K-0d-aqY;IMbK;w`^GdsO|`g<59RfEG_oxpUiM)< zKg!oLAG~N$@%7@5a>ZZbNzz+aI#GwTz4!#4mx+r2kN>j0pT*MsagQ#|6<@aJLEZgN zVL{)X{iSEl{FBp>&D++S&-Haq#g*JKb!5t`=YT7=P0ej?YZ#fb2dFrdo5lQL%Lh9m z!`~!s!*$-#kQ?3;<(iQ9LNhjaJfB^MhyodVEYnJSlQ9mY#!%Tt`>DqN_q?E?HA zLM-{}ZCK(A9#})JZ~h;hi(~5@Gcx6)n5gn<@Gzcn_uxen?^0Jnz;!)1>92wJ23(Z6 z!Md*@-&T(3S9%Fc$@SGYmR_`^26OSXmD*q8$$=NnlR9(7^mN{NPVIZm-$zOxHSgHc7Q902JEpV*7*E|V7@eLyv~O(bvjBIe zG$H513($OATX+Gw-TKA7ZbIFrp2ffoOE5SYKd*C}*Yfo) z-#FDTk~jE-!gaE@bU&VYxc-jndn?s@xGvvZ@sV6{JKq22@_3gvUQul4dx>5`*77AJ z-^kYkTtf8fJl&)w`39Wq{7=Z?wf8&VN)KB1y3)5)iWBR{`8CIT{}U3s|Hy2qdoRrW zJHO4RZl-nHK;4PJrH}K4A2>1=?67E5hP0P#U0$w~^wA26@6!Mg9FY+K*-FSB}^-b*gc4SuyT`xXvyoQ4Y#*|PNx&Z>LW z{sFFY8y=t7f%lW;c$Zm>Pmfu|rMzTEru+gu@n*!kKlHzx?eq5_>^lPQ z_&fo435<9y`nJ{Q<~`HB*Pf|-e%j-pXFR;$Ht#p9yf1J?2l3vA3z&0PUN#z(M6~bN8fhr^WM|%l!Fp&*au(-D12WwDj#rwQjC!L%MG)xbu)~-vC-y9 z+d+IAFQ;Mi@k6E^*K-3ZruFooeQH;3__-)IVOI`E74h~zI(gf<+j=iA)_oq;eLL#6 z*KI_?i+pq4O~_ha-4^BN_U(x9nFbIKF@)8~wxViR>nXci=k4yDs!ol2&fC*Fsc+JS zP_vtlpO3&tb_Q?A^0=GpyT3vA+_L&23|7+u0=S9Wc?ZsE{PLk{2nc4JVJWjd( zr!$)_MRM;@LkM{E_{^As7P-y5V0(|+isz9ICYvj+E#Hpv?WqYb#ZU8Dwz(eJ3O2({ zAkz0(+K}6xor?QxxSM(Vj*pTLM43%nz_biru>Y@L!z)Rq|4zW>$MIg4D?U;><2c;# z4SuQs?p`hhuL1R?qe1eC{s#>$eQ!^N%VXH_fZ?Vvso6@9?VHSNENWODawnnesAjFXV|G zd~GIGWfWoeJBfDXhgactlTOg2xEA0w0^&KJU@YAHK!);Y!H0pZ=@d;F>6NVk=FkeFVy)!O176 z!w}2*f1cU&EhL#O7o{>=YKHgJ<3(c2f|}uHEvb!@Iy_Ues7lAgm^c}W1aAU7+Y@%6 zs#PzfrQ?o3`@!4!&=@h^h(eio<8aQ~+PH9_mSMrbLM)-Rv1MRk4P$m-q4*027N&_W z7+6@x*fy|m3}frS!m+v9pXN?~J2&s`Oy6<5P1LE!^KfzQwP%hS>f2Mx`w!fbe`s5L zD{AO~(~Zy22R+}j;`2QtKHn!EaK2yTiL5@~<kk*gMd{Yemhrf@TClg;Rar@ED{0+?9j&)#7hHS0Qp9rxXg5z zFCFywac$tu-;V#=zs2J({!rJ5AA9jI9eaT@>12G;i1-?5Dy;$WFLLZW4fp9Mnw1m6 zCk)=N^6~psM{$3y_%nP$)`{W|u5~@|6{}@18^4A86(H{1c@5zQCy(g8rup&GGm{VU zX7V7&cYHJXnmh1fRDLu06XSSw_?yY6!MxL($w#5-J;Z(#gXOH{TM2#;WUo2fw(_my z09M4-NK&P?a$e$$?-l96mWFM8=S=;BFb4z?00K59u zf$_P)OHw1leQ(s?hmOw!8U3f%CkDw-45}v1~t?y|M|NCSy=8v2FC-9jq z{ZFAGe^QE9kwG2+UUT@8)BmGH{&XV$3G&T@yD-A=iZyx+UQcOuhDK&xl zr93j>U*d<1z&tMDXMr*Gl?J{aBDIQ7z&%UUka4Suz;fmiT;y!(~zorX5Lg0Fvo4S5Cg%4%zV z6(7U?+t>IKGXKj6x?ftf^6f=~pQz6bzf_m2eJ!);LwE+-Wr`>HqosA|=+|D`5Q(cm zW#NcFHt~Jb!juyWYx{oxm5|xA84oWXGhTez%$5(tcU}wQ z8?A+PRgc$wcvACZoifm`Cl%6@2l}7&Q;0MFoToVR&wGk9|AMDD^Z(!}&isp>Hty}S zh0MT}ICSWBoWbj6M(JE%$LW=I{7z*Zr&ZQ5qq2_oR@O1IvW_z=>o{Ye|0OKxqGId$ zQ*-m|0Q-02bn-^|_4-U-8ZR>sY^vjtD{>K(!9onH{hbJD~HL9ng6l@bg*tdwYy}Plz9-pSO%WfS+hYctf3>x#f$$ z#t)Z<_D$T%58M0dw&Ll7H%q8+fKHsve z<-=ERE4}o*eoh|03h4PC|EN7z{84-HG2KZ#%f7s`+>Jyxq2EK2+0v3)G9PddG~uwX8Zt@JliMXkP>&9b4v|I#CMrx-j!^Vjza7x{RzJ}*3&lDa8BVGi- zYxJLS`)w!2Z@_gge$W#W|8Bi}q1?+=@k03Hx zGQcB_kGFdMD78nte0+GUby)n=sUHG-B|g5p{P=#;1N+S%=o9_G{&^4M!4J$Y=<3f8 zi;BN&t^B;}=Q>6_m@gn{d2Q$ycqz*twH4PNUSz87(+3yWrwiz~`VQ3fVZ(lI#UE{z zy79huz)!BO2KkO3HnQ%(St)0bkpI2-^fdVC;YTaIiuAo%pKtJUQYLvKww2anJ?(>i1)MKGqVJWi z1Q#z){zJYhRo$<(^OIm}@q_91!LuHFcx0rtcntn_qNVsyuJ~qdc<-h(oH$AX;JGf@xZ{Rc*9~|C`H}l8PV0do>=qDHBCkruA zFXAtaz+1ZDe{?&38ir@^Lu!RzJ+~KkFB;w#-CDdj-CEq$yrX6K!E|f$(XGXucwV7* z-?QiEhJQA-@455aikrUBfS1#@=1rfu^Aav%CkpCYP=Rl<4Nq%nJRWE6{9w$m5o^L_ zF$3pwu`|N2vMyxbC5m4Nm*T_L#S`0_cegY@)K+{Vxg1;J<+yeIS6bHO7W_tH2hJAH z(9fn~Z8#8~E3OQO+1h+L9?1hv7EcsUn4gH>_Q{%dsFOAIU+rWyR!$a96weZl-yg#0 z(5w1qohY7dUT+7xvRaFmpMkU0*8BvYKReUS7tcsuV2M@n`mJ2G)z>?FRo{)SX2XD&$YrjC?Y~}j?5jXPsZ7+T@)qtPR;7w#IE;76nwibsL4VUoB`Vb$W zi;K5B5I&1LpXOVh44+nfG;H9Hbds;6Qg2GUl8U%CekFZKaGAR=C%psIqu~>LA>}Ll z<_UP+M)*LQ?+*2qkMaKgwDILz^&VXJef;uBpV2op7QnO@pUMqRYy1x1;q>{ut+*te z8~jjxTX8~%!hLv)m_C%j^)*~dw`|-KkmDtIfE^z?8(8cx7Rv57F?7p5QhH9s5mZp_R`H)dwE zH#TGrugT14#m`(0zo>ykQ|9mm5t%HQCNhh`WXUWCvzSa57=o@`@fIB&Kk@AQ0e^q- z#l|~vg!m~3Pebwt$fMscBmeov+QtHYjn9uU_`%#=f9I#Ht*fmWpPtSdz;EzzpP%^_ z{)+CgHvHA$;Ief5^S6bWEw|0uh2OLm@53{vwr|hamf|g+3V6p`klB(x;*?<(JcfdK z!f49cojdSXb#29G78Rdaee3GJw^O|n@5OU5w4DA#?c@8>3kN43 zH@G0xJ~;Vyki6_}Yi__hM*Cp8pTq^5&r4&U-1p|Rp7HqU#gs76F?ZjD!38hiGgDm{ zXqyXn-|N%5Yx{;kwb$WK4r>NF&Z(km2RhEJqS6B$^Qx%2fsXU4s4)W_^Q)+_10Cac;1wkj$!&~ZT()iBU; zVHI`wKnH$?HtKvGG0?H7iaK(j_`NC$m&TP<6fTVqRZ+M!uBxJNXX`v*EUtgiffi@_W|Q|`oz%q_DbyveoB zy>Zgq{KSRDTRwB=QoQ(#n>cwecl04-eEQpg_oPF(ryj~Zm`lCu^v4{!LCw21IQGyD zYTvcNaffbzbMP)_aN408)V*ti;}6|n%)2%?;m{56Cu#3;2qzx8LH)Znhz{Lg+`Be7 z>Cg?vziWe&57nS=Xd<2qeQ!+7{9y?{gKrtxmK)jD()U(F<`1uUoj-J+-1u$YWw#Gr zT}V8C+KLyY?MAz%WAKvtmLh((GOYXU;h&}PPcU@1y~FOdC+yPA_KqvO`h;bW2BnK}Go+-Y~* zojLqM+-SEA0pmWqV;3-Pv)lFn<1V{nKQL~xFUC!FM;C9fZ+_5zrNF!8#IF?QmVc!X z|EOG1e^ZCQ{q88X;IG6UTvU9lGzn)V{xc%{B|87zOlDnD@jhJx8x@!Q-*NWAIP;+xwOWYDM?{qP++=KXl=zdkwE_8=0QMuXRTq^TQ8U z<{Q~lB?%_<=?h@TUkTu^y77k_H+N+DL&K~0&V2%lPtZ~6Jz#E?xUC{s@uwe)i}B|Z z_~VaM5x>2~?-u!Yo9%;>?sy1~Sh20Ecq#s!QpfVrj$PKN3BUfu6mm50+&H%m|E@7} z+i-ChJkG-Zk=w2pZCNjPeB0v@FBw16W4p)Y9`hb=_4pN!Lmr>>xZmT%lJz^;V|+4N z9be_~MvtH08&_ETlJj?XjMe)cKk0G5$0OYT6~{9jU*K_z^KqZtw7@4|h~wQC7~WMC z8uq8AV)EQpWufQ@+pBR7l@M&N7%{uMS~Lqi&Qnpp$6 zEGg#Tp@D_71W66ksE#33%u2;NCUY_t@?6KXx0}!;Nwm&qRpeV$S1$8 zksi#)8MSVB=mnNn#}I2+2iMJd+c}@s%w7egS?m#ZwyVcZy#_nZY5^gOJ;MIKF3VUG zqI6BYP0IGnYwFRko_vJK*jzKFIV(0qdAJx|7IR((t%L2bf#4cIiQs%% z3+LsWs`VOMtFOtuwl1~OP_WdyhZ@6Ws7pZKNmc z(t$-WvOdwyb@PKa-5Zcqy&HgCw*pRUy8C^8Ou1&hqJ8RD*m{5pQ> z?)Ui-Y<;xLEkC=_uUJr2N{;4TDF=uA83{ zbw=v?_Gq|5I+h=U`s)O;1iN&qhFy0*k6%0zkHp%<$WO*QX#K1-1`+iHu4{gCIwRe3 zjfUNC^MiLk&p#0t;L)+XbQv~3Nyo0cpAG0olrGg~G30)94AH$}tr zljr=3oQ)Xkqo!2RvFR8Mv&MGEuA85B>WtJ`=~#YaINJOs3Y$(NeTed#B+4*5Qg^?O zU)a`X^Ro;6>ZW4249m~Kb#I0u>2i>Bzs;}Fbobl*niy6^>a-IxI+jM4Ve_-0W7lnd z8RF_ zfH3-d>d~>0nxCm^6-_50UVf^?jMQCJG)#p&*RQA&Wag(#9#J|=47<((>h9N>61Mfz zZi}Sfiai<@rJjzNYZs&aTjWG!x+DAX4`pT+RbMGLf8+!r^@z3D zBJ7Tt61y%49JwBe05;uZtxA3tVndbdR)Fcu&qTWUnPZDMz|UCT4HJ@+pAow-`$x=# z5gj8F?>Zr|`)z(==~AIH(vQeg(J&#g`4LGFo1cgnQS}fUyG}^m{i+m}P7$)kdW!(U z&>1VcV-Vs@XG)B3>5&6O!w^Jqe%3`C6p4{xv*;L7WQG(V#2JbpvLqhr${83vT@ zexDyfIz;LbN7OT7w`f=tIoGM#E}b$lqGM#j><=E$qhr@Kzf|h1qE6AV{1Ksen;+P9 z_fw{hD8CHC=BFUt{i>vHeu$7Y`j2CQ3`h3kSpZ@9ExHarabkCDI!41NP>uUBuVd+y zh#6V0Z;ys6q+|Ipm>;rCh|;Ojg3&MmvCWSNMOb>^CPscJLW=Ms>RW?jvc&W&(+I|~ zu^0`zKhpeQk?70(Py;a|>k%$F94dM%E)rmny9JB}jKa3B%Ii4yabBw@8&3 zIwN+Ej$P;c)L3HCvFR8MQ?uQ%>*j|dosl}CbVkF>K;g?VQeo2}3nNN5Nt9vt!{gZe zk*^FN94Cf$F9qdJeZCsoeb;oW!U|o9J_9Q+F8#?9Z@=?VP@pbPtmdI zkcAOd&%%hC`;i!SzZB{=KiuUlvaH}l!EhO(F}FfN0Wo4y;DD`wX)GecR=}_C6bl@I z3$7zEEZuIIIdXT)yNuqbk_Q?p$4BRV`Jo6YM&glrbWB8& zeq|cLI5rleVfPm_KUgICGC$Nn%otfT6|7+>;?3_V>fKKx0b*nkqpiHQOD#Zhk1z8L1;mXEe+V6uulI6*e8RFrsvm zL>YEJJdVv@@b$xvVid{GNL?{fMC#G8>+&O~1NDf~nHlE#m?|Z(fx>y&2RJj>a2=7MCpu%3CWw^8xor?830lBEMz^RVfTY@ z?7FWX#Wp4VR_hpkmAX#9Sxu)OIAL*Yq~>SJ0*j`T5SE`RF(Y*s6%A7%Z}TIngi$(W zs)&wF2W6NA)ZMQ$C2Z@bU2gfU*f4eVGF^V+@-CeYa)^#ixA{4d>+ZMtwE))3?$NRQ z91NSE6F7EV$L|dgz)EK!`ym>3KM2RJo1d$vgIm_3Ism2&vz#jY7F~y*II%l6o%2h> z;L)+`=BGrRk-EM;8m^Fz<;S4@I)N;~E}g1j*WJ(K7mvgvu{JUClkpB(KP!zvL_LA) zn%|txNVi<0VfWko;N6c%9jf{>I+m9%!{#UH*md{Q$UcbDrP?fp-S6|G(EL6>r}tIB z&t~eTXqbNToL`Z%5o3MSlqxzl9iw5^*zVYM^V3e9kvc0K%Wn)vo8LrX(`lp+QGSy| z8D>Z7?$_}P+xl#NcA;P0R1BA4`8l}m%}^v=4s!0d`8AsEew$wt!>UM~c49`y(&#d5 zel~RMy3H>m{V0>pLc`{#Anblt8usJk0&GJix3q~o8RU))tj;&qI5>Xgye00Z%AyqWb3MW7P21Eu=_zccHP&{ z?y_+GR_hpkmAX#9Sxu)OIAL*Yq~>SJ0*j`T5SE`RF(Y*s6%A7%Z}TIngi$(Ws)&wF z2W6NA)ZMT1FKp|lU2gfU*f4eVGF^V+@-CeYa)^#ixA{4d>+ZMtwE))3?$NRQ91NSE z6F7EV$L|dgz)EK!`ym>3KM2RJo1d$vgImg?Ism2(vz#jY7F~y*II%l6o%2h>;L)+` z=BGrRk-EM;8m^Fz<;S4@I)N;~E}g1j*WJ(K7mvgvu{JUClkpB(KP!zvL_LA)n%|tx zNVi<0VfWko;N6c%9jf{>I+m9%!{#UH*md{Q$UcbDrP?fp-S6|G(EL6>r}tIB&t~eT z82{rG(>cE;<7_esJH<2O*mR7BA+R31W7oNUq?*a1W9f7lHZLn2yY7BAU>`*3EHrF> z3c~K!@w3GIoSuLIesyz9Kh3VoFJQX-oXvDZ)!Y0U4Z9z$9BXp+Af}x;qhslG88$x~ zI(FUtG}4bKorQ+YPeIuItTgP$#|7BNR@nHjNVw@=z9gUy71;F#)63PEMlSdBU)Kni z%UySWgZUG7>A<2G>Gyv^qv+Um_Y2voq`LvLn7=Ze^~b!nEMGr?&j~-Z@^0A5Njr93 z{^asZH-;nU$LM}U$L6==_tjYyb%@d#4HJ?#zc(Z{T`~Zo>RHHoM8oa};n;OwKfB99 zztuX1U!|_oZ&uUk2ToWV8>#u3vcRJ0B!uOsO3X;zMMc9@$lLsgDq)mPnJS`V(?J<# z0d@E5ObOfiX_s4mD>h7By-b&%xV%fJgB+q`(`|lE*nX`>ENb+xkhBz{ z6As6wV>GN5K$@S?bAF-tXzVfUlJvFqmN^z6

(y{!;aOCgj=@<>O zBPsXm%m~~1Y<_m3U)@v;mtpxixbDqRBwY@2?q{Ys+|QOir`5Y395Lv9Z`N73|oH+!tQ6KVLv`Dz&5tRmeq)8xD0uS*s@M!O?O0>^sM_44VTMZ zcYoIW3A=P)QH=DX4MvNOMSwS52s)y4HvqeC1(?otR-Fu?aD{X%KL+zdmI+ZhRa!6_CLp%?5upf658TAa4@F23enfq1a7>n% zeq|cLI5rleVfSY>KUgICGC$Nn%*c8~j1~bhrbm z6&fs3C5Fz3-J@gIIX^X)SafVUM#I!>ckH_Pp-5+>jwqecFf&m2a*R~ibjZSp(oGU& z*!}Q0HhNbCYcI**$ynoJ^M)cm(o8I%jljfXx+N@Kiod`>LV?XZP zl6g$(*2G2W$s*)sj(*4GM#R38E9dOz6X;gRF-pL4WMWabLXM>;CjlY9N_orVek&vZ zXFNse9O60Tc8^$g(_~&Ux+hh$Ta|j!C(@!6O(dZ*Cxr4^Gz%-$QDb#3qvo?3VtuK% zc}HZMC|za)y95J8pkf^1f@)LErjRV?gX6RP{Y z+GjT1X3^5fdvjZ=w^V{V7B+vKNCKSz%?vvrli?wa4C3W-%nr}3gN5HnyXH+Irh*S(IV(ml#5 zGaEqiy*k&jG=tT|i)5Qv8^u-CdX-Oa$d-T{wKgq#Yem9Fa!IVWh`>bHifC~^Sr$4L z5aoUoO=p?ocy7kzoRT?L+{1g5S4YPT@fyo(QZwr(-5ixU)f|vK`}r>VAh~*Q;6~kJ z9BhrIiL0t5jH%ee$PH^RZ@ib_&Mle8r2c85i_()t$jco4j?0aReJ5AW+0Q4?KZSIR z60jVZSmZx{ROV6t9MTB2kl(TTvzpid0xBGfGN36~C`qFsVl_}=UR41z#=Wdeisq0D zqS;gogVo1tF0VyZtYp1?Rp&mDdb2{kSjz?2B1Utr#kH!@{1uPQ&v~^eiMxR*cXict z+DUk1OME#)8oFafeG_{f>!o`nmOIhbh)S7tmy6~~!nTAmSX5ibEF_lhb*v{JH!@@f zTV!mos1|IXF4irPdXf8m2{wUdaHspJ^P3Cs=oT%=CZWoH7UfWvib=Y?nkgV<1ITvFKa9tgclXs2L zW;w9dt88ZX*b=PPrlnsi@-~J!uhAS`_o%>rDz$p8Nu`EuZm}%KX8mk3j4b0E^jIU| z-#;6~149DLa>AXL>7##HQK zm!A))}KQ!Ztrgb@QsSL`%6^e&U$) zNDLzfMIpy3cb+q_j+KeFvEBS8dzDqkHuLro8-Ituom(=G$r_N-Jrc(nqaX*;j*TSY zoQe{;Rr|>Q7$P@|F;X+KXe!+!u@j0^Ny#tCb+5Dkl%g8h`cy1~dCH?YSZj_dvEEvN zD4)wFM2!@!5p5LB=6>o-HMnjCniWj>t2&pFs^(A=tF?yacTN4PYJFn!yT{Ij7NF(& zKvgLtXEJ1wQ;6_LoUCBIH&B$H2CtLfG3(^F3COZ6X%}|cbjam|^@L6^%AY78mhN?| zR|AiR%pf;gCK{@xH@5Dsdt()=p{7=)O;BE%DyhoJk@X&JGS2TLWp?o>6LG>MU0o_o z6z`yE?r0;`vz~HE@a+rpXeQL`tGa(xWW^K$|0qKB)bhq#^S*v@*ZvS^Lm zPk}y;9*N0dHz&I~W>g`u3h5+cn30%4ej#It2{D@Pbz*EVoxIE}1m$~m&UWJ1mh~P1 z?5mn%AFuHW)roUYIj2``InZXKD#HvRztcW%i%CiQO^r$~&VD|D{w<_qlz`>P#G?K!q+{vH zNkGW2QvRDs_gf(WIO8cw=Mc{!w|m60n=-L*X;YaWMQX)jemiF5H$|4mMWR?1n^fYu z+$?on!TsitVg>4e!Ri?)VsqKB*3QYv`d*!DDRNm2aojc5MsZcOs?l^@f+0rbsI_U? z(^hA8!&;wcib_;w^LsayTD|pDq54&3bHC0T$UI>+qT3O-O(%7~qTqhbNk5{tz6;--j{Q*cv6DqbT2kaUm4sKq80z+~bA zSww^wk~PAkf+|2GvOZZrOrA6&0ye{phDb=Y5{{G*k;UJNB%M`^2iC_5dIADTG@c_E z`$iOUOn}`;L{teg;!C_}A=eR1Ww3fi0&M1Y3@Tt(9RfFDMgxHcZd3>u6qPWwgck8O z&>j-1`@I^PWYii%Ouu7BVr&@kjRBMciy8w-*AbBkGh!3s5VN4lAWr26V7+vY#DR=9 zM~+qQd{6~+MDL(TjB=@r4uuKhB*X%uXpeCLyIP|HGO`vqB1f?>HLCeCAA>s8XE!^IfavOH)Y75J@KaNZzC zY}7p}0Dz>x@TY6jAYS=#EV?UL;eH(mfK(h>^hADjV0C>VyungsCtcbuM=0DV|Hv= zAE+v2#LmD`F^g!4MPht$Q$)%sxG5qPuaN*qx<_KvViOBsGI5qHB0>zw8sSkv6`&DW zpDZ9IPnr<{n_)&nB&1piM@opu;%`Ng&ML+O>th8y0f8hM&k>A$BMLbtz-}ZWs)QNw zC0?|U>jFHuF0M6|k!gftxU+fj|Q{Dg+FQN|;(gi+CGo4++)%UJXq$YKfdPXeI6`4Y;-KI<8OMy+0yYlX zF_1YR2V)$fbdLb`6)aK{=P|DJDrwT;VvK899yE{&{8r35Z;&H4>K+w!bEEzW`=gb=_Ha&XK@Y?MV4Ia-}(s1XB56N5!!W*m))M~hOB2H#b5ket=5(T0d` zA1uh9_9*I#z#lN9DHpzIAOhHQbNGu~p#;80RB9^WMIwD4=3|m8%NkLGtqB3G8 z)R{VFWD2IdgV7hc_=~v}f{BbJ))!)|jpC?!jdZUQSDj;aY>^LCl`>*y;HVfOT4IqH zU)&UtatdyWNX2U;0Fv&J7`52M0+>u3kwrv^Az33lDyRZ9BI}a{#NbW;76J;6{ajK~V`)OK1^q1MMN9y5FmzNk*+P#PmC6B*umj-xxqSu&6PR zbR7|yFe5f04lxU=4B}LN0M<+QNF2y$bL3d%&IeUMNAwPg#3+}_=unt2PC_goiuM>6 zu&XtSkdd{>5jlcIc)=KE#1`NNVNrz z3@I#@d&8{tHMe|r-bF~Y{o#-Aee0+Hd38+$7yk552c7oWfQyvd9qo+tj*OTmTsJo5Z;i}kEf2=RH0k0aJ2*W>fZ z^T#dLT z!LM4=Sj$Yk)L#zPa=uXR<(cYvbr%V($@o4h;p^3SMhG`+&he z>*b+_Y!tGXDT|0)XTfs75P=U&Sd0JWkT?+FF=8W!Jfak`maB()u_tQ4Lo<)(py{#s zP-|Y+tB2-e&KlNoKCOXk%L1@JBQZ9tL%nFJXyP%ydc?J;(;QK%COxKp{KUgDhaWlR z=wptX7E-XL@V`pN|IQWp0=)u|gVVTrM$`o8$gO4pa<2g&RD(Kf_|LFIqmEq~qiF&C zH5Ym12WyPc-=YB9AjXHoZFfxGr3{ zw4E36oo9zIGv9fAzH!}puI|nYLI73f8k?HJli)sI=B^K6F&-%#@60u;R^(SSo<$$d zPnf})rlysvuMMFIn2-LbNx(0_4hW7Yzbmvz#E^1M~9izA_)o)jl zTWWQ?M{!oS2i$(V>d(C1=H2x|2)72{PQ%OpL8mu9x3eLIyTMImjh*jiay!5!9)mgL zcD)$F!#oD76O|w|B z9NZlhPHQ56s4WFCg$44NpGxDu0M9rs6~<+x&6-M5YECLWACx} zaLrL4#$Pg~i{3P9H=yn_*njTcRmlGW`z{iJBiy3E^1uVbmHfsac%Lj0qVWLk7o?nLVK^H7Ok?%^4Ra%^tIO9M8$g zA;dlY=$H_WKNgQYdN8JH@Zz`_adLf_ybAl$v0*a$OrG7)m6}qA^Kx&PlG+-^utvu^ zCXAnr^Qk#8-<-OTIRbOqp=q6|hB2XG>A27^r#>{yp19|vv(n+L{Mb;pYD`#;W4Q-= z{<#oN;@JWp4&ZSuBkT0sbMAz2TE4O7v{k3noHpm=n$uDz z)J)!g#GVGPDWPW6spLw=DpY(@%q8zPU2DhdHuXV zUu_t_3Xj8_R5<3uI$Vd7!=%&^VN92;y?`Fa%$yLS{QE+*>Z}mWnG>Q^Q%J6z7n82b zCPZGpN5GWWMz9ech1VvO>N8a>;_Fyr8e9W*UOyqiYZ}g1Bk(!cJFwGH6P9(a_{6eR zojuJnyKYz&THBU1FYC##=*}sRvOYg?c1z9EF?sa&?E zJKwuv?WOBKv1a`$oaMSp*I&0{{i?N{tJ89=75UDX3b)iQ>T$54`-09DT+W2!3S{^d*(EphB;BNF7#uy zt)18OUVCk4cf86Ev|aMMZSBpoXRcbimiHygmMvV?)780h&FVEPm)Xg~*_sZ%C%2-oU3+zI($B%hsZK2=5Q;I@k4dBD-wiC71s0(nTF@v*t8e&!fXitZBna z-f^t%?(F2Y&4Sx@J6%U0__TE-pQQ%6{mWpBQ7 z*(l*C%*VPAR&=d7b6wAwxRlOZ*_}W0y3Q3{XRe)f=BzXQxT^GSJhpgPk|pzaR#vX- zim&K@+4#uS(c`PXKQc1=OV6D7C#Q!HzjpFRYIe;(q+*k4Az7;zu=7m&Y<_^u(v_rpc(%x^(# z#659xBZ_60p!!my4Bpfez#sc<$`$opkU~fR4ydO>8 zyYim($&>L}4K<7}V_%A#@lEWjkcU%3_#fEw%p38!1A75^@}89Us;^-GB6!9V-h=fc zzZ-G!X;?q<<%pM^iTiNoi0fwJT0&kx{M37KExmxxq=+AB!g)sCg}87Qt|jEj`%K<{ zE}M<(7d&Gx_IBh2#M)+Do5&jwk3I+2HS!4Y>~ll75qTEz&)Y({4LR?fAH&Y;#gXXU;t}!oBtSRV&uwEp`5>H+1%#dj9(-jX!TiPfzE%Yu4Tn zp~3o|`KR`FuRphE<#nCwR`i^?Zq3T>4LuuH=g(ZZVcoeade+VS_^eYS+%BzIjXR9X zlC!bZd*faJ2U+94X=<_kOsD>?hP$TT72P%CuCBYTzYAw%m+^Oe?e4E_{n`~@U;g#$ QzTWlqf755`|H*^@pOYZQD*ylh literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_ecb.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_ecb.pyd new file mode 100644 index 0000000000000000000000000000000000000000..767cff117d8dbffa02e6dac5145c18b90eeed974 GIT binary patch literal 10240 zcmeHN4{%gPn(s;GC6k0d3dPBgyuhr<%z+5!_N&O(o+ z`d_-`C!Ux#doWvnq&zmb8}v;N%}En{WYe7V<3{_O!2<^Wn)K5KEKNUaz_~_y;lm~A zAAl}=c(w&U7&PhY40=_leKU>mRWoX7VyxqlJhtV>^V+g9<7~cIlrw!cD+Xtl$!)zF zkZ8uXWBMkZ!&n~SC|Ss&AjqjGIjjw$W~VIpB5iWUZ!p*3g>uGPs53s72>KX%)fkWn zls3l5huh~dHk;_LD0|UfJGr08nD!O2#I91L9tH4)>G+VzRIPacGm41OtK^QL5@ak| z1Qr-<8fppZq~e3sRi+r@8hwHXv`4|LvDUn!h~S^s^pm54$|-m zd}GLEgUJ*lS3yqdia^k4YP00v+sVbp*z(ErHxWN(l+I4q+&*Lae2Me?T*fZMXYj$n z;?9o;b^Ss-=R$@L9xXooLu3lb^TgST{u9dL#2K4XL7$n4Pi#uz0EHP(9InV{4`a2* z1no`$*Gp-G*Q|~ZJne$e%Ko(L$w7lGMJJxT*W=(x#Z%0a4|>GIJ`g-g{I# z2PJ)JSKlx?YH@+FT_<-odVIc@ZHc-$WnjAcU&nVyaDpHcv^0eJ(#oCMTLqv#LG)_# zz~U+Q&4^~6a+U*V#faGcbbNuf!4wzKo@cgy3?sEOi0mO@00BfH>#Nasy{VnZ2iB*@ zmuaOAXi`t|L@iT1TpiZtq6veR`qd+cs6(^*Ph8FQtIj8&tX_Xx7v8=OTbm3V%kieU zdatLNtBR+Vs}FiOd?D7~mpKR}Ogo0}Zt^qvhj3w_;<7jW~LR={B7_=qU_xDP>*R8VF2$5->$c`GGG@^Ui)6+ z1Dp0eNUNE#{iYGOkrBI(arH1yt`q%9Uum)h>%$#u?=}|5*Q}1KpJ0jlPsCk|-M^W` z*tUFc`t&&rky@@NF4?v;aaA~rJi_7T!l6b_wK4e;-MtM=^<)wp1VZ1>&;wpm{}nFN z_WoAauQdH|a+4F$XEr5tTl{7ytwA^M3F>wpF6aFh5mnle|JHR395V-2re|ezw*#zx zT@ey`ZrnF9V%4M(l?CXz*fO~wwa$hWs2(fH%Ifadomm^k^1Sa+tD2#u?7?dI>hBBO zm&vZ15zn-EVzYWl{Wq?j$0msj+K=D_zdEG8!zn-}f&l9uIQ%gH=hlEmKw3I`_ zdM7m58rzYp|3G=)t2bc}q*^^~@S*Omrxr|hF;8@OYGH!kvze=C^{d0=mTD{? zF{4COc#qPy5_R_sbkm0tdd`DCrNs-}cTaZh#5G(!Yc7HCAblB>Zflfu~lACjBnaO(MD#^TAP(dx3kKin(`? zAHi0`hhuvGN--FtLJys+cG05*VAs^K`e0fv#lY zWAN)KvzWLfq$VX$>XsbaZ zZ9ZkP?KSDKeC;t12W-ZG)CbAG4snL$|D>ret7{wN`qFXgG((e3B3}yu89O_vzjik` z+A4f3CiSPB%9CAm)+nA*YlB{-d0N0+o$=L49bP-a!2lvMU75zKO&J3Nos@Vf)c^gjn7B`|7 zu$4kqce|hy7XdnDppOodRMWU?4;<3y;WX&;a0HF!L*4?4=$i)5J5oGX`?+z9Kj#@C zksXK0Wg8IC6L%t!r{;VgDUGMxMp zaqrRkRD-RlXxO#Ce#eDSSZYq%JxxV#K#yI$49`1~t)6M|7M^ha0Kc8|vhiiQt1g{q zV@gF`TV4)R=G8?B%n}Y<{tS02af^o^wBhh2|8mz~55`IwRUwKKp*}HeYfK6f(6M~* z?gcV{9M2k4{L>}l58gnwV)RCpZY}og*sen#6vPUfiU!el0lEwcI)31i_Qs_1GoY?R zAC zG3*!;_5qjA5ROy#FX-w%$Cr^t%ukl<}twJ25_FmFY-H zN&$WZDR!<09a)nN8ESV@yMx+uxsHbwxMe~awHHx)9#0m+fT^uC?+MX~F^?U)^7~F7 zoy49%C$*RT65HChhE5Hxp4qk1NBM8V>64$}YC4H6YDd7?Z8T=CG?>EWm$Sn7iF1al zHWk|j35VjspK`u>Uf2CS|;MZi&g#Oa+6s;aJT@7#pv(|L?Jh;CSgpAHT6Kw0Tj-g@GRQ+UmWT*XwL zAy++x*J8-kPURVLzA3y`Lyk}78FDS)9k9VZ>phoZk7CfIJzKYK?bwKEFkhK94_RsV zG}oK@rtsFA`X=*QO}TYbc&1z%c)Mxb9`oHd`|e#OnKZqBFU7c)Z)?a$AA(0u%ofi} zyi4FsT7yNzqnx&z=3uT1Eu-=F>;uTYNBi*e)q$%_)y7zinwighU(dcn)4O{c*v5Oa zr+Oh!)&?G3f-Rn56Y+mn!4%n5ZU=KK1tdw;jqb@WL64t+42;fnSH3 zZ6|u}q;{L##_Y@SsxvE(&06GQv*ekhCDE(fNI%)V2C_?VhguJP1PxnO<+RvvuXkZC zF3iPMQxvro*jT|vv^(s~A={(5Z8_DJ-PJayojDg3FlS9(O972H9}kX}{#YqvzeU{* z{RC|}HkJb)+e#d)qyx1zpOs*YlA7YE%`L!phnU+o!174O@JAjitjUX-_P~}!0&~qM zijptLekugB^E4RvPy#0WcEk>~3sx-!eG{q=HH!Ve8}$IH4|N3fZt&YtBd7_~r%*{l z4z9Fz7LbEm0#f_Ns%UQqYid{?=#bjw-l$^WQuqoUQ-ZP*h(skhsDvV6qdlfXqLWxH zz0Ki}63D8=C=}<}vVzV$k+B|1G*)>EyzL_5a}w_koPm46`?B&)IrsnBzDnt82u78- zEHy;%0}X%O`gh{_Ba!>#(G~G3hGZok>{=7v5(;TV=m1F3G(ulH3`QyMy6&%p#6y$ZXyhwKsRLvPs<5&9W5i z0F(WixT||(iqzd2>Xzzbn1DnWE3-9*V!@4|3}Jt0qa2ib8-iV3F&4?CC=3O=LR+QP z{!mQeP{;aVZKJd?zG;&rXRZ9Rrd4+|`Mot&9bH{?P6PrCfml>(4|Rsx1E%xftupXq zN=HEHjY@$~xHEz+5>wv&O4%kU{6I*K zguBVzY|#{nNLcE@K2*p-jLl~OsYhy$D^g%e!%U1va*PF|p~~)9B?70iT~;dbcW(tw zuIyS^xvX@j!7gV+&E~ zKDG$35A`$P1fRuyq67GIfFkY!F90Wa7w!V~8yKVig?Jow5%4{L!>H?k6MUuwPe{O@ z19ak5&=0&A@Kw|{-~``CrLm^XWUL&O@Cv}7g?9ixZQ%reV&Mk>Pg*#^|FrONz!v;& z+5`Orx1*9yf_p5S;4e_g&QZYoaql3!3($TIAaH^esKh6DmxXr#{{1ZY0sI4iXHf@$ z6U6UI1HTFIIPN!x!KZt}J*Xc79|ydEdJ;H(a~k)(QQ-8Og6?N~VJAVlyAe*1?p=fv zq&w10kSF*8>Z8E_?~4t&6TeU3vZ=c1j$>S*zre>|1Q|yg-Hjig+R31nF6-&;5+9J{ z7>>=l@`Y8^Aocx>#llWVY!IoG1Q3@W$onANY%Hq zx{@Bi(4JkS4s-#ZUIRb5?OU0vN>UDKJt zXG#`E#W0KpNu|QDe1PcUllQ-WYlmW!UIixD3H|F<`4sVWEC0x-1Wvp(Zl*LOh7%SN z8yhF%gi1J4c`PR?mLnYJ$BBsxm)IK{8*nwEM_ydEN@tInJnLWx8Xh&@>}mJHLy#)I{%!^PlyVOL~Uq$eZKJ72FX40C{tay*9h_)7dU!kU0#E~KKG zGCqbeI_7_^;vLGQoutmp3gE{TFKpAQWUh+m6qzl$nJYd}WCrW3ns-I{5`?4{ zGE>W-qEb%BuoX4wLN1?wilT7u8306P+9kM`(v2a5R8STIQ);1XobrY~49e&j{=}%8DUsjp}e1Rx|*E><$cxVT~PURLnJicUpL26)rYbSLQ<% zhoBNwSq41`3Y1f!o1|qp3U^(kw0x*C4tl0EK>eqHB^1^#{Xjf)!w?$aRq7eF0P($v7UBa8QCgLp z>B@fNgJFyHc!folGs=Wr2<7w|;Rw*xkQK_Kebj<`3Kdn(|JtH`hZbf1pyp(CO}Tz%AO%Liq1jB!t789Vr`4fZmsf{M zi4ruFNiZ6G!Hl38CE9T#=QA2SjyKUx+{202ut?-|sTvM@U0it5d$ZT1GJ`F?biZc~gSw|vTh8Wh6;)ToUE?_ey z8^#+tuXh?v#q0fG&QoE4l=~rv6GE7D8T98sz%vo9D1_BVpNH}Uy&8cGx=^g<)308b@?HIV7g4F(&k*aB1nopr?( z;anF`LClTB2`Cj4WymZJFnov@k21ErD`;79RBThIrYcpabQT;%gBLVos&?!+0XXqO zJp}Yd*t_sEPCFmkZ9&brmB?kdLSg}}_*kge=A=?03p_)_Qn4|}5rw!T!e8_!@k}3% zI)4l0MI5Nc!iyR>I1g~1h=uct@YB#`xIlUz$nd^3g)@akVEz!K0?ZT~Z>xmtXq1JK zpI!ng+M-Ft!WHhjplwX9t6~$-&^J$94tA{te-5)`9u$C2$uzzK`6djOH=aqq4tU8| z2=bKAKowlU;3}W05glmDwuJQg%3r`0gQ2Y?I6<=p9|>yVjAlCSRwgs!6}I5aqsHq& zp!}ti+UFmkG`FGkfaK0r*VR+zfj?^Qs`P_AxXq1fGR>*5Sx>ndGDj$6gQyNTe|MN? z;QTkJsRo zoEfaqCgnwNg5X)q6(1?`PNeI=iR(B~s$xIyhx+T!4J5=|U(L#mM($h2<)iTkYC?6% zzAh$($XHIyyK)AtHKwp6 zojwBc%DLbMl?#DQV*xW|ygH)~_L2bF{Cow71=pZnL(l>z#hnIKF``yJQ}LdXI7?X% zg8(CGn^q_1f|f5~g)6&2UFbT}`W?#ak$IUtSeX~WjH8Dj11QQ=e=rx*&jAC>#mfLt z7D33~0C1VU1@08m;-JE55CVHJ+{k$V+hb9_&lC)caHTJSsU}at!;DV=0-Ns~1gKY+ zxT4rx9+Yng9wfq5U?KMx4=84y5}4tc<1KJ%C7`-^rcsJG)95OmVaD}#yD76EMjMIU zilhdFCxcofb}N*EGc2}ke#ooBVS_?5#Y{1ei!DT$0nAHXfI8$fwJ1^Bdqu6X6imyj z0=pQd*2(qoF2avXo@{<_KY}XDU$va)n4HLO5uj zp^~E0%1|LVOJ6)A4K;%lA&G+g&8Ss!uSiQrdX}I)mr&P$y*wCpcwaWmUV#2+%Bt8* zkO_eR+GRi<1Z{YxsD-&63?k&>sL^a|b!&X^TY$psGXl#qGI6T%0=bQE=T?HP@Ff?K z$)>}E{u&c;@XVfiZm7CD2B6s#qAg?qaDjZ(6-5aHcY7#pQm<6BKGowuu6U?hH;k-% z3qZ%ZwEv=RF-&08_BXe|;uZ3)Fn0=xA`!1nNL&*vX^E9dq6JG{kCMd7g+vw6!n-2K z^rDCi%a}U_OIo91CEgh{uE_8z$e|JM!Saj@f3A_-7grds6N164TSlmu?uEJ(OlIpI ztgC~`tlZ-f=te*C{u9JhoJ1~ungW|IvMRBzbSPH8kt_^@aYbvUI|#=!&{US|<)av?c`{zgM~@pY)s3$M6Q02k z6E`4}1vDRsZ6GMei90}my%$Px7MMN>Ck$bgfnka`kc|i27HxYRy3D{F>;31Ce z`bVT#S`{<3l|<^8sZFG+WXn+&Vz=werLe*|Us7pn{DYP+X~v$H>h0Cj0-$UXNgs{y$(Z6wfg8JSk%0CNpm|4Q82;nP-5e z2p@)507;0Zho{arl==;&8zi(wfQ!NR?&3@{}7>Y^U{4zof% zBVe|GCHQ}DVvte8_hXH6+Su;jtp8xJK%>!12FM3p-I=w`YkIo#$;FoX+09m~tsu*Sli{6$_8wB*s z!>AZ1M80C8Lcr`Kf+yz^Z{iNq*7%J*D!2hec%XvjIZ&qsXow4NvK$pUv@%)rHYjLC zC46N$xKzR?kX(_~4agg>2Z}gjH#Y@T6Y(0wa0JB{6751)NiZ`FJ%2EJ@v*PxV`5MS zoUJ!;UFis&5oSmly@`4N4d?YiK$bXD)ChXvnE?&TBgnys8o-%nPGu0H#tg4dxZ)*6 zUWXG8g_(0{Nbq^oY{a+ow~qDWwe`umG%}wBa7B~S(CDm)9~Na=WG22Bh(+Lk2>Y`Cw*{2j(aO!QJ86WA$DsT1g49~ zQV$q1mySk7wYeHeb3u6r{Zh2@)%NTfF zcs4>VL~ReXW!{jFThq9aD4>ppvoZ)>1VMrsg6Zlr&5rX8&QBI_S>6O3#Q7lqUd7Do z!N668xLYO86qdqvZ1jDW)*;KxGebGCniqwfEbk1e0JhR63`6oPl>tL8Ff2p^U;L*C zlbr7XIFuSr^a>`OY7Atp5O>sMPi04s|s?kgN|AOkSiur-QveXa;&({qu$N z=Yam?N`-B5$qUpVoV(5+0)PjH?V=VqjDk&5OuXmSWnI8gai&iWKHwwle7IX1&h-3( zQ?J1pveE<`I!=9vD?Z}VJIp*$E;G+3hg^G**{MZ#X^}kwM<#IW*5-fKBEM*neFDdE zG+^1QCG6KC2eil`U||5uK`r4|Epk|k90e95U^${C9MdAlwa7_eVFAktEg?^foYEqv zfyE41^0kBlEpkSSoC6jMU^z~mks@<59`)FO|7WjL@jXbF$C$P+E{ z8?f+!rBO?GszsiW2owu(0*;V)<;qJ>;9v_LY0J=TU2-u60P-GxUIm)xWmxq>B14FR zKcZI1Gk_;$A+d%JdGG`x9t$V#!E+chuM)C2Jbej?)HUcQVga#kI4}vRv?T)GP2@F{ z&~3B>L9J8*gY_+ z14$u@U@85uj@E0|k=c*y2nxv6xL&=I2?Y&A2zgZkqFBWHx*ChoniddhDFP~zQvB5) z6bpE2DFUimihxj{m>4Z+UU5bjGV*{p0D?tEhn-sEpEU0xfZu_0<@Z9~8dMdW5^T+6 zBPh|Sok7Oa_?Lb`-5G7huz?&6H3u66h zB!OzSoGj6}r4k;Hp+0pzSzW-Z;%!2w$pQVr6A6ix8UaSoJa{RsR}}J~fE2Nk6aijQ$XiX$tRGxrNBuiE#2@vq3#oU{m~BaPv}o>L;sxqr{zA8CFS%n>!!4IqO$Ay3<5{e>z+>$%a;ttE;`&mpZ z7ow`*i{N2{N-XC?j&vftV&qaRGIY^dI!o(Ph+-je6_zXbi$d?BCJRp@bqVP#h+gI@^HYKKS%!WWGIAweN@GT^5j9j(OgpDEC95FIaO-gt`Usks<% z9cG?4m3)#yx1#TPQ9X2$3cA5zX#<>_7OKxpwJ4mMlBh5Pe(g|SLPCc$n?*WBZN7AY z8eyahbcE>wDlnqo415TI3r<}1CT=R>A*JIR3Fr(|L|A|`6)2v-?;}?P?=Tu1=E^z? z2tLCn%$L?yBc|TO!^*^cZhD86j3&$=9Q0zrs|ElV?}QX8Q)UR~{}igCp+!tB6Z4>% zD^r^UweWOrT>?+M8ePAEZ|7^OhHunwSch_J9XLYZ2cT1CoOr&%hYwG}oas$9kKomu z4rGqz=IlCrh$(& zuw4U9rm5xHYM@X9r)Z$IGYd5N^%{6W1Fufka;V`w4ZTtWwWXglc@qu4tp<+Pz{i^M z2u*&G2EP13j%{}=1^oc0g)KwWdDhVHp|;(B7WxLO<+f;`Rvt{@@2;N>9d8~y`EYZc z^z)!o54%)x!EZ7>eZTO|#=PRw~Zn!U8=ZvXiwCr>GMz3s5Qb8u__K1Xg|YDo*WwMZ6R zedSV@f8>7hDie!g!GuY6w8sj|BSneqyEbDS&)pkzI4O8qaKInd0Z-Q-2-qLG8K1;A zQ-;lQy&90mI?&&FTKS0zrJ&Q{W&XMAnxqRlnY}#T|7={?^h^5&ymPdWnR`sDuw1u% z8?WHb;b}Lx(M$ZC<|QuMV7nP#?RNO|nAzQx7cQ=|vW+O$uL)VR&+23F!3DTe%8TjK zr>DL;Do<C^XqdUr|l5|4~Nmkw^M{v$4i>FW7Lu~cYiPvfNS9^FzFHDg<^Y|qHc zR$HT+-i2&8n%RJ7Po@U{XZ`1DS4ec zmvY$EWAfFAbQ74nl@0TZT;^e$@ z4bhtlbPH*D zltzoic` zk28~|W#_DnyJ|midAGW#E)U=B^}Jo8T4a0KY39L%fN?92*i4No3%q(}`YzR@oK7wI zo83RK@)cH}qfdSsJ+=C#U2>z}R*9%R@2vN#s;o^P$J}eJf5cmKZMUHF$57?v#)^Aa zIj<<<#35g@7ge6<^?HEMy}AvX*JS-VXVszSIk(r%uDrRbvcbCa@Sv%~SDjifU-j|v zyVcO#(?)4%D=rS?_qyRjiOvTIo5`Li1&m%z-6zd{v9Xn1Yuw*T5|)?u{`50@ zy0a>LV%nwYc1QejSMS)jWmExUmR?$HQ~bO!!hXk3+Uv{f`t^^?ZPD9amHXB~`Rb{Qolntp>y~AldU4<8_|(fyOT%WATk4FM9jSliRe^_@#irr8 z@v~;94qhC&>~YQny5IIwL-l<3_ej>lSLL^wPa1k}*b!qR-xgZ09xbZl zm`=C1M!z1Dog5xGPhow|O8@@*&TDSI8?VcFZu1aVfXJ94|SeR>6md;W6!t?G=q0p zruhSt7i{86k0eh#oN@SUuf(knSd4YGroR%$?MK|W-%Dy5z@0PO{%L(T-{sd+cUX)H zoY&gH8%UQo>S^2s$F ze^Jt32V3u3-#DyucKgFQ0b?CmcM=aS+19mF(CdkVX|qdr-F36*G+ciVck=H~SGRun z_SKka#AXJGlu*tS;p0X>H>`?_`wPA=}s_@&Q;Jk!o? zv0n}~NAxOrzCm>MiK1-&&o_rIK5DC4NqJT8RI+DE^!YTaK#9)g1Ch)<6))X345~Y{ z@KyiRL9tV9^Q_qMA$tAfqv~I<*gHZT_Lg_vcrK*b@{2R|Mi_N}*0h9^t~=w!hS}M- z%^vK(c6M3$%U=eD1|OJqV0&!pAIA<`65&@{&UHN<)hEaI%oK^!lt}*Z6a9zZZpwCS z%eN}*YUsahOHR=8MO)f7>|N*{x;1@hd!pTwO+o8^@3h1;(!5{#V3Rw&o|fi5e6Vp# zL15CJ)VGtuq`kYVN)+2Rjx6rhY;gR#zk|JBBVN1O*70o2tJdbg=c>!jA-~&xNf^H1 zWv$uh=}%6DE6t{}JR3TDaPl6%6g@w%e;0dPbY11RYacw{c$S_#GwW4Pw{a4>)pbF3 zvSm%FPuj9sqsBD)hAwe$%?-UVvTx?p97X!4U3sNNC0zF6wX$nbhAUD>r5#&45xX!| z|0lfbA=%MTGt6XZpZ=MifzRIt7{Mq_ z34P}6&sVZ7O{(h>>aM2;u#)cu&E$K{d?Inac&Wz4V@`Ivtvg2*^ZJ>U2Ya;pk<|ls zJbBu7NKqF##l5MGzhr%nzT$Vz{i4>laoB>kTRL;?XTLcciS4{Khs(G%>kWr@>eeZ* z!y%Zj{X197ks|IU**GgRemZfX=cf%mhu56kSY7`nUNk5t;)VX%3u&9qQ7dB(E~cej zb3AU-yiwr#IpFCi>w8-bt{$*X^0j@_kLGu8*+7GjF?mg)X7?kpDE#j3LX*gsnYlJQ zzl<2S^3ZanZo|rxd3WB-TVPqS|3%Qy>MgsHDm?qo<`2&P!a1GxN9}>Tt}ltPrfzK? zuJoK}5i`ayVprc|o1Q$EeTW_A`orV?-H9ny?K0))6K$d zw^tXPZ4DpXN#L9`-#U1E|KMH9_J`f0!+I5;Fg3|`dZcr6T*P7alABi>HuRbOXsrh? z)R61e`}FyjCOeO%Z80u*y3$Dcz_x9tohZq)^7Hyr8$uZi!&}~#Ka1Pt?f(H^xZLLR zPbHR_9)AeJMj6Od_FE3b#tj?#x&2M>ebc?|_075BN8Ps=&yKBQ1z5%ZNRL{^K*Tx%PZNN;&U0xOWsHL$yOaN3uu*}I9>Z`WJurU zd8rS363HgBQXg*!TC-|$?8x{@Cpc>YZm$$S(7E#HpyBCC`qDFa=-y7*`(*OGu8U9U z-{04et8W*<8|idWr`Hta%JVJy);U{7v%bXk?CE~G+r!;^44JLVa-&{H+A8jyy8lJj zb<(zq$GH<0Umr)A(aHCNLid-8Cv|dcDxVtLtZd&ZST-V2R~%e&Qg?5N?Yu564a?pZ z?hg#cLKoh5xSRd7Q`mEtKI`j?{hZBa_suERdaV;E-UtyyliLsEqstUvZq19?aMxu;<_`g{d`9j>{IsLaFF(7 z#`%`pA9A;jx105N<*TsXFPCl2FU}}@XV3N8Pqo~$yRvnAK}ey(XI9fTU-oaS?@SFZ zc)zo+?W1ACep$9*-`O z4)$w!x~FTy=YvN^ZTqn9_somc%JNBWw*4Y*x7*rxPF_O^N(u;;|1LDy*tT*;^^;pM zbap|^n{?}tTmT>~+Sc;q zZ$AedUFtlMdS-aRicX2+ZQE-Nif4{5KM=I`Q2F>SN}pBt9b7^Ke;T^xJ!f5U*1YEx z^F2O>S6GdAIzKLP(UZKrBi;LzMEO0yQ=WDXXEA==$vqZR|Jg1rD#A)n-(cj{NT`iZl9E1pZwq=tX^ z)ife=RNU|jVU63WvZoL7@19uvYkb()cbUc}vPqq%KAU4MZJ)l-?=a&UpI`p?#MoIQ ztDI90vnjT;BaeCx+q~h#j(lA_C$?g#+laWO2RMVr^z24yw~^W$4Q7YjAF=M)faGCk z&RL6`AGxOIj7hE-f9i_UpeqHvau${KXn1$C^lV8d%8k3jt8+Ys4gL{=O z_Y7|LFL9;L9J1!}0>}56;|3gUI5j!DZP=N&UaGFQTepj9Z3gz$ioh`|U&X_PmP8;d_g3?kzB#p;we=G>iU3;#(5_@VuvE!#%L89wd#`U%tj zp7^&lHTNuUB!^bSE%LY?an!<`-gDsROI52=xAgw_Zqgrp-u9X6^t0pOz3X(Ymro2` z(sJ-*vwX^?A?C5=303K9LpMIC^gg|$dzP=*Y~tlP=SnX>H87j!q+9t&v~_-K$!EX) zS1{(@(8m|7&%CQTH<%t}I;Xb|?bw1F*YXBzpL@N~qmy6t6TW%yxdSdOhela%=eQKi ztr*#MDfe(;ub>)hWI*qeRDDNLN--&9!m6h{g z+Rt{@`XhEcWfVJwj6F5=abm@jhpQ(&m6i-kpEZJ8&DfP@zoKhrVM08$w~PMnO2-}j zHh-{ZWJHgB@zlyzl-Rk_WD7-qfXY|HecKQV{!&M zU(kN*j1J`Z@YY)jd=8j00+oEbT+kuNzu{v1aL%D^f+0|!gIH*wmi`s~oAmiA|G$Z_ zH2QHg&{hK-BGmf-H^uykZIqY482z@?0N)*u-(quPVi8jVybc0?Uuj$i2h4mZhOPZd zOM*dEf7@*zg}$Zc4u&EO?5X~I-N7L&VkUeK4S6_J#_YW&z^6-Ikc02Tzo$(>H2!xq zZ;}uHo<{Qden&$&3=908M)C!JNAoB7BEP4ReDOeInSy?r@5|AgL~~P}=gylqFFX{w zffd5Xy<|V6vM{cCJud*Y}U(HwZ9Zk*G05lf76lb5L{tjLH9lpIpok!o} z`$D@K+d83sz5p7$ovNkr5DkM?(f9QlK4(NiXR7EuQwtbxDP7UvRsK0AI3v+KkCybn1NX{z2w{*x;;PVq*Cmr8T@ENgzGXpc2pc{|s!mpKB zHSo1UvV(bvt}?(4U@S*!@|^*! zAL$(cyl$Pr#2^nrQkMxjz7(b|#)Q5wp)X7)!+4551=A0Oas~}!NNMrZU>ygI-VPLF z8fNUQj~P4Z`s$%SV9+UM49qMXlD{5i25p!*vEnIg zI@qoNV^i`mUBpA$Ll-k}(v4T^1G+fVF{Y_uJhBDSA6*Dix*1^IWX4#xa3idnzajJ$ z^3Ydp|4F?ksP(p@>tJ-?@rD1dumDo(Bn%^IjYedC70|K}4dkNoj`@yt{+7%Xc)$7} zq?!&gQT{xnMvcrL^BwDawW)eeI8eM9nD;GxtjAh*L+5zY;7$%KUn>?3W4&QujEgLc z54P5Y&sl~Cs`c%dr` z+yI#@AkT&50Eyod!#H5y!oe6e2J$l@Ero1NEGjPchqAc%4n@An-my`#pdUq|rbv&55Dbf*1~)$J*Bxl* zftCWay&Y)j@cYJIpcQtY?FCvP(Eir-pWlDfR=G`^AdB`2iI>Tx60bP;kPrSV%6&u= z$HmQ-$B&f9hM|EJ#mc0~81@8}OpKDsO1psi36DxW!1BGqa+Ml@lpNX&Dx508#U`*~21S5QK{BrGZ-DlABCJg}BE z&=X|gL9*m{Nl;X5L|jm~Bta&POAd+#;TYzD#YkcjB!Erynh-G7-`hvz=TO^5{8zqg4jX?u|Ie*6y7b{7ERV_m%!?501kR(YGCYMQq zzAG??_7ES2g~UhM#U$9lRI&?`%IqQ~A@O$6j&_cAntuII4*yX}2E)>kWjL}*SUk+m zzj9rl&Z&yHrBZ4C!-3W}BEP5uX%I&vtz8Wf{Xf_L91Ebgxf~&(co@BzlL~3UL^a$B z@mwR&w*%xrEM^OI1jj=xmI^rf-&kuPodA42z%ocRfFroY48tx0z8#=38{%QWSpd&M zY5*L;hmcTPM&=l10}0`_07Eo*IKa&s9Kk~x{0P7r4UXXN8oULdZx@KsA(lZf1rm~p z;Cc;?U_B(HX9K|55ceTG8X&DJ#Q1Qc27v%FuFyQE!7d=04CaNKNwm>+7=(zyl2%;GNAm9k@ zg=91d`_J{4EkJ=C5bc~;%J)@K%uD`nwLj6}KLTMmlm$b{8h}4u!$%~=M04g!qzMoX zx!X9}JJ@g}v0-uHQL!`KZ36s9+PTC2*kK7_k&>8@1iP51Flk&uT!hRnEG}kPNJ5PLTt^!YggsFa5GhRT*cuWw zNGl66KPvdYJxwg4Uxhc6|6kjzxa%alU^qsc59Co?v;_v$3Zmj=# G{r^A7EB`G3 literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Cipher/_raw_ocb.pyd b/env/Lib/site-packages/Crypto/Cipher/_raw_ocb.pyd new file mode 100644 index 0000000000000000000000000000000000000000..5694a13179ae9fe28860fd602e68ea53c5b23fcf GIT binary patch literal 17920 zcmeHP3wTuJnf@np$>cJefC&bwWzeC7D)DfJDhkw@%q1r{(Fg&lw?yG{qO&sORT+hFEcX6OgPeM#s(ni=i=x0IT;w6v*^$q_C&$) z#RG=G@x}GqLJ@mcxN~c`somb()X~uywQmX9!?6x~sKf5PX@kAJvn5z*vlTk@s{Mys z6M27ceI;`g{q(P1u^|2A3k9bbZ_XvG0*GBh>o0;V;d(bN zThsFxvv6oevb_LkF!9S{jmWAM%7_co8se&9>^+oOvkv{C!d;A=BiYE<2-=zcbu&g| zcfifqCDY)A$_VVMct>@c3+-g5&nr?H4ep2ndu%CA)MdJM{W_rWpB=ra47W5zn;47F z0p!9#d9TEg^>e{esbynhG(dGY=o;4G$ojb$OICJi8TmCf!boj?97opAMfHUv;bvfb zOfWXQzN)NDP+dGOl82`1l3X6L9mCLJbZ(h&X z+1NaJaImEH?ZI^VY%K5WkUV&_Wa3L;3gnlSGZnp~(QB2{hG+$y<}2?SqJ;zGUb1qy zVkmV#W_{9}x(Pz)PjakA9U(a(AX9B`Qux9kPnM&Q+;_XfD)&VlC34>`huysi4cKM% zY(Ra}c{KF~D(OiIJ;NxNikTVPGZyzcT(0jKl(pZ)>SaRj(>Nb7|1}cSg;WxmdXmwb zQqLD4^{xr0#en2Q*_|Q@+f5Lul48bsld)x~JGAU}*ef;rBC?+@qDo~{N-pG*(tDmu zjpjq^Nyn~Fm0MAhIwmWtSkxh_!>L6uq0#by`ph9JP^11(R*eDGcIYa`-02f(a}&mu zkI`hFGbyXLJ49KHI#$W*E{BYKM*@X`E?XZoy!Pu08Ll=EW%An|#2g z6?UPB*f5kH?lW6qNZp3ZOt`kJ2W$3$d}ngv4K$IcN-O6K+kLWXz64hTblL06Gkut} zIA0>GIz&^s1GC|B-`Q~IP-fn^KhxydYt(bT~I`PeMtTMR{`gP=Sfjy0YIUrM#w9i0LD+ixL&iqqMB->9Tn!mOi4S^LBlkCa>Yn+cI-! z-zckRv?(yZb{#7JzO?_;@~==ctA}DySqYY-mD=)j*?it9U%A}pD@m6zq|~Img7h;V z<$9vluBBg~w9k%WCGz07U3Nlu`buQypnMRQj2~*h90})4Q?lS4#PBy!7vL$f{PqZv z>nMqtzhekptRSZUONF&WekufI@=R9w)Ga_#>v7USDo>m$_qEYp6Lpm93-lc# zGpx~x)M1bym}S25YeQ@>g4G?s20?qts%5v?_T>biZbEoKQ5Kg@y?&6=`8*x1#3(ykg--L`GyuwQ`d@_y%(ch>U)%&iNs>r5 zKs;k!hO>c>IJ$5Um0%CXiTeSz$H9TxTN!J;*1Q|87TX~=+n#`e&G&8+#%Fh5OEGz@ zVW0>M(t54CJXRmL;wj;K?ov5%bGd9BfohQx#od8K@w0MYsiS7~iD{L%uM$KAAiNE`1xCsRi-w9ZkCOHt2D zv3&V6Wt+f!+E8kFy4T(SgzY&QHT8_fic@c)0QS>gL1ZqNd@Ny(A={9$WFzGs;y$-g zPCV{#{Ti3J`wRsr>Pd~M??PF$Xi}d$zU&}$mUZ6g%xRMqYLAFd0FX zb#h-XHA5ZYpnKrU)GFUAdgIj3A=-0QtpxV+M(pr)W$<3edUDxxnSeSG2+%Hr`e-vr z_@;!1F(6)tOpX4{60J2Ccnd1LuNWh5O~}U75BN6zRmTX%O5S~#hU^YF^mI&QmlK!f z6a6OE67NpVL$pN$01A}pQj{U9y!uplLR+b{KDBw^D;{NE0v)0(@p5+A;@?sBDL62- zJwqEM&ZP6Gu-~f=`kXI$)#u#1r#hm+Huvr~tAiaeuR5wuIgh#%9)quFSlI90eKyn) ztm!j3d_~Wq9+UbGM&8<2?$L27Z59|z-0dPF?CgfkQp4#;h z>ML6JsuZ=DGHnZoUM+|e`icfo_8^KJF{ho{=I-sYy$4k|^vhB~`7Bk`XZt09?^~ev zT{^DWJ`~xB9G65QsNOIl5B}UP7mdicDmw)a_REH@8;aecn!khG%CPl_c^?#V@;>w5 zLUX@0WRI>=hV4hpaU?OtFgyrt0J%oImydZ@kHyM)O^)$3<1w3J+X}0z@6tOUXl5%Y z7>**6Rt}dm5a02w4~jE+t+@`FuyqhT{Yx*Yx|~M3ho2wh=Qux~=I6)x`CI&afSbc`LzvN$+DAMWO#Qb+C#?C^wVhF+f|-wLEAWxJK4`u9={183mLA#x6#nA5Hnhn~WT$=-}fop;@Ice5{Jp?79>^zoQ zGld-n3!@R<=He?N+uJmMvu{LP%_CgIG3GqF=Ow4}O(6=B)prqYbGM^DbGN$@3b~!| z=^pY#Rb2(ZnG_azaq~E6hzatEiF8sq=MKL);KHCG>^2@1`n}Iph>8nYg?=ui{J3v~ z;ANuX=5Vz^;#%Byg7A-uN|lD^NnDHjMhU+{R6H7Pl(-i6{fO`@Ma8S(CW&it-wT8< z7Zsm|ntn6;=VD$SBOfD zh8IX&i~CLzE{V!I4KI|q7We&xaHpu;py5Rl*W$jH2wy2G0SzyfxEA-lO!&t{rB=gj z64&CspAvqxsL)gh{nc|MuEl+?5PprQ+@#@x#5Gw_Xo?JR-zlM|kI$JgknoLb%pfN= zju1I@lJuZjveJhazDg&ZE*zlaJs zJhWek3OPKqUy2Gj544o1ki$cp5EXKGXs1Pm93I-FsF1@$J0mLO@X%ft6>@lJZ-@#x zJhWel3OT&2{I*0;cHw@VZuNT}$0jJ;l0+0ULU(xTi^5#rNb22-s0epBy5wiP9~}HA1DvaT~jAIPeBpb#CM<}8Gtpsc=0 zqO7kvD5wN$)ivBnfu8zJW7gc z2=uk!Q2=iCHrJ|C0X$%=3#g6O8s#1ES+5a=)N_F=-kpB_sj7`AN9@*({=6^0(T0mz zApw5lyDcX;+H0{;L2#tOfGw9Z9BsHTfZ<4k0sAheINEh#0L76812$jIakTZq0FEOK z2JFC`Ggoa|_y zh5=+p8VuO3Ior`@4FlMYG#IdFbGoA)8wSuFX`nlrGXpYQCG)a26E`pD@WZVT9*#JWo_8 zgaS_UMYxgUMp2lV46$+<-(_|5D;kZRqD5L^Tb47SQ z$MZ#n!Ybf2RfJnPZWR>@t$@=^5njOY0#Tvx3OG#^;e{M86cq}wfYUq?Uc~VtQK2vk zI877b#T+jd6$-V0(<~8g0lSKF&j?WPl3b}yO91$*XT+p@}3cY0IHz4%B z_lVG67kW-(y$SsutDN{ettT-rfEz~v$2+*PD%qVrsSk-Ml=jRlv@kB|Qz ze!T0y#*d)jS=bTiEZhim7G?xG3oinlg%yF$!ihj}weUq)3qy;3yRJA;+T_bb2FDN0FjTk)jI^$%O|$2eW(Ni)2ld?V_p^HX8)q zU(Q8>Uhcj{p``Qm8YPWj@&>TwUfdm1ETTUBUy2_QkkO8Z6>KLO#0~NX@#Ftg{P<75 zhaW$Y&5yWYM%?%-9zW*9j-sC;M}ICgihc?k{khC2`YC4g=Mtmnr-0F)%Zs9)qD6l$ zEsA~$75%xaDEcW*^yiYI=%*mjpUa7&pCUwmE+vY73J?9cj41jkHuUEbqUfi<(4WhP zqMxEde=Z%0ehLZwxojxd_6u70O(7v)N=iQnv9>oY^!#ErLc|*RKM@jE%Ia~@ z;kD_fC2z4kZd!qG(6mDASwQihdc0=!^WkxK_Y;ez!$HuGtP&CNcZ!)udZCv&8*X+t z&^v1IPVZ4-9>IH7=-q`EN&(eAu>(?z6c1{V;-DTOUht~(O<6sid9ZvIqgzW)m}%dU zm7V1!Lhn(i{9#}351<{2qpS3cT68DmH`8VJ6;skeU%udFO?kqTH`|*~yk*);7bW+t z((k|$E~~6QpS~D{ekdn4AjH3+JVg&Jpx`~!hVs?N__anK#OW%z_axu+7mA%gt?qcCz|mZZ2;W`Q|G*<#Rwv;KQ+y~ zyPbaE;7rc;M047FOz5u?0_q7Pg7;_RZo!N6oWYdUXH0sFlHRx*4`-`v#-tmVyC@lV zFIooU93=h9Wi87m%GugjaU<_Z1l4aQOG)d_ceJ+YTz@U+#K zEP6^Ef#Qzm35QNw6Q+4Zc!2wC@@IL4&kFs6$#GNRpyM-cu}YX8AbKZ4@3d=GH(rfD zTce&x(W?q=xE1=6fWGVSsd%gN6Swl}xqy1q{T4G}NTV0IRhPB4xz6qE_!Wq$HW*yl4Du0J@pKbz1)bmK$-kn=B4AA%pzZH z1{JRj@xqnfiv<$4C&y^f`nuGOc&b&~nA&zqt4YLjFVsZq)8IV1dllY?C549v)j{AG z!#~OIzztfphjsaiF3;+6PC&C?sY{nG>vh?s%RRb$RG0m_9MR<&UFOy1UWP+YuhHf0 zy1f6!+123U;C!JGDP1{J3a;z!+h{hw<|@lxpgc20ck6h@vO(EOCF?9EEl_# zFgXXS;rZ%vF`jR84n{ffQFt!K^KHw)>Uq8{oN=joU+Q_jZnD6KGvAeyFT1AGj(hLD zw`B`1gC%|Xb%dkMjv6N6v`Dea|r|smh`$YKf`=M)WZkd&pMGIJ2OFk=G zX<=p3-0`0pA!{q`bPhg(g%3c0A4%Sq^?7v$!NLSwi-2npq@pfEfq@llf!%6i*08C|*qA5k{T2;26SJ)> zU^dB8S3vDuhIcx8{gHCUeuHB#>L+Q)GqAim$WkjSZNX8W&q~oosZ`QsC^KX14zV)B z0JBgYJ{}fUC|SC+{-7@_%}khA)J5Y+{ih$vOkWl(pfN0Fg<(rqJ-kAMZNa5fZY~o( zXYI;wwA2yqBD~zp3Z61|nHr6C^oBlzFP>*%^Zu=n6~0!)%!iBFqYlP?j^oSKj4ejF z+%y~vc^UGu_sRR8Z7;bDe)CcIuB^3}o6c=7*M>EmRd&GwGb`M0nbKxUyFMC^#Wc6C z!OspCFmrt&{y>DWeC%yIGVSH0(H`bpEtOq6Ti8vWs)m+eb9iT0 zRMSISgAq*)cFe$$Xj3@a(AgCXH$_989Xx9!+Szp;6b(0ZL}p6G=r44FtZrvbM=08m zX_tCL+?V;yr#Q;Rm|c$>32z0~4NT6$*4&8k0M?j=-3}}XY_{JA@oDa;{MfWU+U9BM zipIi0PbWS wW_Ojw|^^R8HzKi1JqbL;DfhIcadDp@v#!qHe$+s2OVp^g@ew0UF4 zwx*7jwqT2IM{}@?vin1A(O?*V!C~GIjQ&ouJ+C?#^~A#AU`Mnr+}Rw&*7Y~$`e3*{ z)BzXCKT6v%7(p?%-gI*;7~WYI47YZM+nYL?aTPIKMwl(Wu=bi3 zwj>L!-xdxwwE$%QO6Z22k!Y~JKGYs`M{of_(%2G%Hxy~wf)vjj2yF>Bg?DE#W+^*8zgPN}k`t&P@lLxZOw(iLnD zwT7A-w86t@Edd^hwlqX{b_E+k9j%>Mgpp{tb7w;vDyKhVYY((3?d+Nvy&=f1XAP0ywuV+d zVGY#1a@Nq?+0h!>icxOpXbOirw>LC}x88%tB3eo9f-|OI$33BNXGc3t23whvqq8Hp z14}$g!^hZVtRc7~*c^)n8*&Wu(H`Yvtf?!sygjlUZneBQ99_OG*wnSWZN>5x%k^tK z&yK$Wjbe(jv#g;JYVLxke(?LB8G++>*hgDmNT)M@;gZ?QC~m=S$gZ;M{W_<~OkAU* z@LCW4n6iQS*p1NEvriyuu4ilUuZ4wJm<3rgi$d#U9n8*l;h%lFj2ETeuS}+$2OC%p~Q zw1BZQ&`DO{Ae`jQy50i$t%bl){{hG|I95R?iRVRJzY_8|?k~K+DaN}SM+KjJtCJv*MJnEWBclXk?hB=_T3 z1D)hQ;0Qn`c>u>Y=-F{K#n+EG>E)#+zYYCy=p;vRJPDoTNgQL)v*Tfkoj2h)2b|<9 zIErgDY5f@b1D)<&?#4lL5rupnhX@?c*4SAbn{h38zQ*rC8==#^DBUyKk)I^pPm)fO z?gdFFN%w7!Lnrwdj$)Mk;P;1&fC142MI@{{w-&nh;P)NFo>;21DEw$suq@$&&4PnOGr8|R>rPqJl zR=9>@q4q6pJME~TBeHgBEZlKjq_i#3Bkwik!&7X^LYkwlOg-Qf$L0D;Eb0p-G4=R9~_1 zz!mK2IY+974gMol9X;WMGZv3-jtBdkp8EYRRa3^FHfFifb)MedD5WPPo7$&@_%)5jtbA5EK}i>TK~jP zCQcTEe&Q#x4zD_;(Wg~Kjx;^}0I7%!Tx2O=?AeJnAT{x&Zgg08w z-on_q^$M@LlBfkr%iuglESqRYn954fD(_2|IK zrE`AitoxAs29ykDt%GA|C?`#f?K-}*#pUsQ&yZ@m?ytbK4n2qWsL29?Opvn>8qA8T z<(KAwdJ8K{t^$i^?CYHbEUN%y8&>jAHhGi0Q4@Ef{zk3-DylzSPD+QDYD>veuk3nJ z9xugG8&r}jWQQ4=q~ko*z(g09#^i;lLZ=SD^!x!@dp_w$Tq^cUmhUcP%&QzzOvkVs zRUb_ixwBll*HyKSLMsf3J9-YQ2iGP#V#Z-6IK)o-|=9>XX{2kvptu9xI$ zVCRot^GfP9gZv3dOS!4Nnh~qWh=&hz=^#&UaQf38N4g#J!_DjOQYXmclcuG&FhxV- zN$WEEZx%E5V5vJh`3Abk+^(c97`C=@$y9;SbGW%|w8d3Nvolqx*uR9}DjiRQgM~2o zU2x$w=_hcR{N>*(%1qT43aczweOgsYF(f|>rS)j$9;aq!;c`B79;-@z{tt?Rjz5n+ z(6M+$UPiIkfHkCurj$~g^jzw(wxCDxCbayR)-pLKv%!EFsGF+H%PRI?T$i_Ds>J;Y z)k-;<%6`m-$9rGkzFdCZ)b&i0=k!Szq@Qx>EH+8fB>w}P;Fm_FA9GqDmlgr$KQQ?@ zn2z?Iwb}+a;Xb6;KZ$71r56?Z$AKtzQ6$sYb*5qeRq&Z? z$9!Y(rTb|q9=Qo?z8DiEe*+YkTCnKSrqf{X4B@FPcAsfG(UR%M1k$D!#8{H_0Ow$h znmzyy^*U;D0HdiKHC?Y`!`Kas-=C$;wSZcV8V%I*3}3Q5aWnX7ztNdz8n|TnRGx{3 zlNg4KBt*fu8pw~Hwd=`&9xk0w>}#({Cg z@Vk1rgj&BeMsBIY^bs>gM2-6pZ7Zi@pND43Xi6!1;5#&Vf&1>s&z*A}m(FNYV0!(7 zQ2uQF-d7htPNp{Ik|IwD4)oIIQ0((nqgow2-D*?p43w1RO9(%+K*cj{PK|z<=vF71 z+4#tWle>YtTW#Du!uMmyc%c0T5bl|-xV2>j?hld)SSg&pc@*RwL}KPQ=3rtCEh-yN zHGKtrvt)^4m(bBnBt8tkoyU?aec$BfxW)AoR(X>R?FWj7Qi->BDO{yxLho3tND z@BLcaQu!Igqp7RqwV)9;|6XGo>$6j(^5;P8GpHR>8wCF*tTP1vM>TzUT@ONTFq@=C z^W?Zjl*(ZsQ)dd=%Xfhzufa=aQhSQ2Jl#uYjp%ad8}uRtGwjhx`5=NIBFp^LZw<-0 zYUtq75iX4?_S09>cspDhB)_X&-?_aMzC~;jb^B6Vb*vAnN<OVyp_A3; zi7r)Xu@+u-cUz;TAUPm4ru^Fapy^*AfVKEM0FeYiz69{R>2bUbs>fl%ekj2ijF-9& za6FC-x82QH_cGHCEVX1m4_TfDg2VR{1d3nSv5a!_WMH@)5u|&W*OBb--~2=C_q>%n zbEktB9|c;%GZh2=OvMX4UFq^Q9upnX9Ton@k$9POySaG8-Q(nR#{oqv zr82eM#Xdz?8P;V9V3E2^9XZY4yH3cXaIJeBekmV(C0WWpZGWC(nd(YSW}xX=aM-RB zqH%CMSs|ZC1DvP-36LK!t+9;h7ZA&(s-Gk2oVw=L@ys_}p5J1KJI+voBAw78_5Emz z9*xqAM{e2=oXO8&`E+gH5Dj|LA;R(m%=&8xq_x0V-iZ^w-Hz1@*g;EHv0I@Ni2$84&_{Fsk5YriB(N80t-z0F8&TVm2<0$?>FG^CI52kS4I+*Es`mQ z6TzDrGqj{l0??^a@a_dNj2O>aGW@rV(+}K8wxahIiLw@BzHjS+*XAV3TFXb!_9iqL zH7Pe}hkHxf@;ji`1HY`arXC?hY0EFccoyN5rLUXN>aIy_gT%Fo1k@Xj@{ylAdHGR} zQ8_7ja9*FgEtkAYGJO})sWJ1YX%CQMQegTLFz@-CQ*20$IY&)9L1Kzg@er_KNF8mt z;do2q@uWl5Yyz^@lHMZfVi{EOzjZD(LwA> zk5YX*zUYiete`|Ila8|wR;>iZe>J&gB0Lk@l9W^y$?bO;%v zmrgH)JuV@;8B>rkQ|h(?4PdBRbIB47@vPJHX^z}$>-89w8wocu<2LCm4ka#~=F&yz zwhk6!&JoM3gKxo2^aV9|76>}>oQfynwhAY6H+)s6w>3)CuDHm9@kq&b$5T&1Ro; z*e8v)A*Z?-fp*$&ZO;0ngD^;3WL=1stIa%9S<02H^_h+`Tm3rZ9?$KZ&z0wWh{_XG z@5@x+2wrV&yLs9=^hrcJuXNBaX=c)-_WSWk$v-{96n1K$w5x(^~Rj`1@-g1 z8)y0N)+(5;`_Cs$wvqqN<^5Nb_uoYfTlL>GkazPRR^V*^sm}Yn+QMgsGvC#JC#auy z_TR^5{O6aRga3G={Kx!9p7ZXU?|TRT`EHgq|GBg8;l2+}zlZzYB_BHBljKXT6ML}i z#1J2PMZAwoUI$NIux%+-m)X=B-uRqYia9A}Q!5WR;hxMLKU3`MZR9cDmXjW=9vxRBTQO?vRd69a(;FFHY z7PQt{s+lT%H?&HK8b9@>Uc2Cz4tdWrqhC5EUg?!Q=C)9~@y+A@;$u8C=D*>XFO<7O zs>CjBwjq@TFrRNR6kEk|>ob6 zg8J&cP~cfdSs6C>t~L*ag^iQ_Z{pLhfbEpK!FgD%M3l+PYr>=dl^3p41B5}sv4jJi z;;uhqxfh1|rPi3Vu7^u)ow6C?q^S-EC^S(>F}M%yXh6KpT6d2~BjCe_?>~Qm9=Nbf z8+65O8oW`5x9M=b4n-aA)Zvpl9M<8u4rLwwL5Ge;P47}2-l4<1t9x~NP=`yS7~(YGMMl8{|ehZ zn*M1W=Jg@O%zB8QzoVc)=b>ooM!5zBPXihKCwR=4*_H3YyhO9xp~F1?68sa{b4maI z5V7g@Idxd0!@8B4{dqO3nmhb-@8Ow?>jJt9arH&0&QrzMhXM0ES6v<30^Z+U;`OWd zAau7<8>YJq7k+-mJqqq$YDlK8F4VmlBB0GgH?P4@2hCoRX_(Esn|PjCJfAAZXY*9K z_F24kRc^yDTUo^7ml1f=1JQSUdXFeOpgHIu4#=hDW=A zO@mi32l(L|cSVS4he@|KFEou9fcnSs_fsCwW_-%jZb~GmI)C4mzn5a2xby;By?=7i zEP@8(u^FCf6Y<~cfos`ZyP4U=In3Tw%Ir%^n7w}P#QAHrdxh^~?cF?=scVb$d=~iQ z1+)R5el0!$R={fC=8J^IKwc$!p?#cx?xV9AV~Odx z3Gu%T-T-)y74Z6(Gxl}x_7(6R123!lA+N6xzMt=C-uPGJ8nL%I7!#9mp*e~luJB&~ zCyD2eM(M*TmA0(+OFPSIzIz}=0GAQgu>n7P(X7Yyj2Z;LhK5N+hRf> z9O;f?YbC^ZbX%Yo${G6z>l6AC0eSa@+?VlkG|;!s>% z+#>{Ii+h(WUb0vp*Jbti4%s2qjS9bWkt!~aOTerIyZ|@ zZgJ|mFKCeX?>~P&94lnuZMK3<6%1K>{rTI~3j2jDiq@1t}AC-^D~^;I#Su|+6^*8m1}ybJI# z9VhrL9p49dT*nFiO2?-G+pmHxAJ*UiaMDR|w~iA$g+g{t0N(G=@LoXU)r|FmPp}4s zWC-4=<6VH?UI0IUzYmb^_=kZL#P0?w{vp64$ZrpVPkGfRP+kXKm?IYElayCJiagA5 zs|KF{UI(1uFp39wVeVI$7gE0XGRgyxA$S61H*kV)qC5qh^1H90JPrJ5z+))nla~Q+ zslr@=kKa1j8kFO(6Tf$;-y##h>3f&H9li{Cg7jTMI6?X*Ae@x>773icvZXj-&QNGw|M5lh+fU?L&(ZR*|Tgn~$-X;CsB zxh)au5&D9O#eLyWJer7hi;F|izT1L{zS=EI7CDgyhP#nDtuO42RNa!-1>$l8|I+C? s?SE#?du)3gdz^b}_Vn)=*yGq+x3^*M@x2p!<-OB;|I%Ib=RE%Z29YK&hX4Qo literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/BLAKE2b.py b/env/Lib/site-packages/Crypto/Hash/BLAKE2b.py new file mode 100644 index 0000000..a00e0b4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/BLAKE2b.py @@ -0,0 +1,247 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes + +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_blake2b_lib = load_pycryptodome_raw_lib("Crypto.Hash._BLAKE2b", + """ + int blake2b_init(void **state, + const uint8_t *key, + size_t key_size, + size_t digest_size); + int blake2b_destroy(void *state); + int blake2b_update(void *state, + const uint8_t *buf, + size_t len); + int blake2b_digest(const void *state, + uint8_t digest[64]); + int blake2b_copy(const void *src, void *dst); + """) + + +class BLAKE2b_Hash(object): + """A BLAKE2b hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 64 + + def __init__(self, data, key, digest_bytes, update_after_digest): + + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + + # See https://tools.ietf.org/html/rfc7693 + if digest_bytes in (20, 32, 48, 64) and not key: + self.oid = "1.3.6.1.4.1.1722.12.2.1." + str(digest_bytes) + + state = VoidPointer() + result = _raw_blake2b_lib.blake2b_init(state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(digest_bytes) + ) + if result: + raise ValueError("Error %d while instantiating BLAKE2b" % result) + self._state = SmartPointer(state.get(), + _raw_blake2b_lib.blake2b_destroy) + if data: + self.update(data) + + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (bytes/bytearray/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_blake2b_lib.blake2b_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing BLAKE2b data" % result) + return self + + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(64) + result = _raw_blake2b_lib.blake2b_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while creating BLAKE2b digest" % result) + + self._digest_done = True + + return get_raw_buffer(bfr)[:self.digest_size] + + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (bytes/bytearray/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = new(digest_bits=160, key=secret, data=mac_tag) + mac2 = new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + def new(self, **kwargs): + """Return a new instance of a BLAKE2b hash object. + See :func:`new`. + """ + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`BLAKE2b_Hash.update`. + digest_bytes (integer): + Optional. The size of the digest, in bytes (1 to 64). Default is 64. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (8 to 512, in steps of 8). + Default is 512. + key (bytes/bytearray/memoryview): + Optional. The key to use to compute the MAC (1 to 64 bytes). + If not specified, no key will be used. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + + Returns: + A :class:`BLAKE2b_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 64 + if digest_bytes is not None: + if not (1 <= digest_bytes <= 64): + raise ValueError("'digest_bytes' not in range 1..64") + else: + if not (8 <= digest_bits <= 512) or (digest_bits % 8): + raise ValueError("'digest_bytes' not in range 8..512, " + "with steps of 8") + digest_bytes = digest_bits // 8 + + key = kwargs.pop("key", b"") + if len(key) > 64: + raise ValueError("BLAKE2s key cannot exceed 64 bytes") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return BLAKE2b_Hash(data, key, digest_bytes, update_after_digest) diff --git a/env/Lib/site-packages/Crypto/Hash/BLAKE2b.pyi b/env/Lib/site-packages/Crypto/Hash/BLAKE2b.pyi new file mode 100644 index 0000000..ac3bf57 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/BLAKE2b.pyi @@ -0,0 +1,31 @@ +from typing import Any, Union + +Buffer = Union[bytes, bytearray, memoryview] + +class BLAKE2b_Hash(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + data: Buffer, + key: Buffer, + digest_bytes: bytes, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> BLAKE2b_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + def new(self, + data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2b_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2b_Hash: ... diff --git a/env/Lib/site-packages/Crypto/Hash/BLAKE2s.py b/env/Lib/site-packages/Crypto/Hash/BLAKE2s.py new file mode 100644 index 0000000..9b25c4a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/BLAKE2s.py @@ -0,0 +1,247 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes + +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_blake2s_lib = load_pycryptodome_raw_lib("Crypto.Hash._BLAKE2s", + """ + int blake2s_init(void **state, + const uint8_t *key, + size_t key_size, + size_t digest_size); + int blake2s_destroy(void *state); + int blake2s_update(void *state, + const uint8_t *buf, + size_t len); + int blake2s_digest(const void *state, + uint8_t digest[32]); + int blake2s_copy(const void *src, void *dst); + """) + + +class BLAKE2s_Hash(object): + """A BLAKE2s hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 32 + + def __init__(self, data, key, digest_bytes, update_after_digest): + + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + + # See https://tools.ietf.org/html/rfc7693 + if digest_bytes in (16, 20, 28, 32) and not key: + self.oid = "1.3.6.1.4.1.1722.12.2.2." + str(digest_bytes) + + state = VoidPointer() + result = _raw_blake2s_lib.blake2s_init(state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(digest_bytes) + ) + if result: + raise ValueError("Error %d while instantiating BLAKE2s" % result) + self._state = SmartPointer(state.get(), + _raw_blake2s_lib.blake2s_destroy) + if data: + self.update(data) + + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_blake2s_lib.blake2s_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing BLAKE2s data" % result) + return self + + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(32) + result = _raw_blake2s_lib.blake2s_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while creating BLAKE2s digest" % result) + + self._digest_done = True + + return get_raw_buffer(bfr)[:self.digest_size] + + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte array/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = new(digest_bits=160, key=secret, data=mac_tag) + mac2 = new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + def new(self, **kwargs): + """Return a new instance of a BLAKE2s hash object. + See :func:`new`. + """ + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`BLAKE2s_Hash.update`. + digest_bytes (integer): + Optional. The size of the digest, in bytes (1 to 32). Default is 32. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (8 to 256, in steps of 8). + Default is 256. + key (byte string): + Optional. The key to use to compute the MAC (1 to 64 bytes). + If not specified, no key will be used. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + + Returns: + A :class:`BLAKE2s_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 32 + if digest_bytes is not None: + if not (1 <= digest_bytes <= 32): + raise ValueError("'digest_bytes' not in range 1..32") + else: + if not (8 <= digest_bits <= 256) or (digest_bits % 8): + raise ValueError("'digest_bytes' not in range 8..256, " + "with steps of 8") + digest_bytes = digest_bits // 8 + + key = kwargs.pop("key", b"") + if len(key) > 32: + raise ValueError("BLAKE2s key cannot exceed 32 bytes") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return BLAKE2s_Hash(data, key, digest_bytes, update_after_digest) diff --git a/env/Lib/site-packages/Crypto/Hash/BLAKE2s.pyi b/env/Lib/site-packages/Crypto/Hash/BLAKE2s.pyi new file mode 100644 index 0000000..374b3a4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/BLAKE2s.pyi @@ -0,0 +1,26 @@ +from typing import Any, Union + +Buffer = Union[bytes, bytearray, memoryview] + +class BLAKE2s_Hash(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + data: Buffer, + key: Buffer, + digest_bytes: bytes, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> BLAKE2s_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + def new(self, **kwargs: Any) -> BLAKE2s_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2s_Hash: ... diff --git a/env/Lib/site-packages/Crypto/Hash/CMAC.py b/env/Lib/site-packages/Crypto/Hash/CMAC.py new file mode 100644 index 0000000..7585617 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/CMAC.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# +# Hash/CMAC.py - Implements the CMAC algorithm +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Hash import BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Util.py3compat import bord, tobytes, _copy_bytes +from Crypto.Random import get_random_bytes + + +# The size of the authentication tag produced by the MAC. +digest_size = None + + +def _shift_bytes(bs, xor_lsb=0): + num = (bytes_to_long(bs) << 1) ^ xor_lsb + return long_to_bytes(num, len(bs))[-len(bs):] + + +class CMAC(object): + """A CMAC hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + """ + + digest_size = None + + def __init__(self, key, msg, ciphermod, cipher_params, mac_len, + update_after_digest): + + self.digest_size = mac_len + + self._key = _copy_bytes(None, None, key) + self._factory = ciphermod + self._cipher_params = cipher_params + self._block_size = bs = ciphermod.block_size + self._mac_tag = None + self._update_after_digest = update_after_digest + + # Section 5.3 of NIST SP 800 38B and Appendix B + if bs == 8: + const_Rb = 0x1B + self._max_size = 8 * (2 ** 21) + elif bs == 16: + const_Rb = 0x87 + self._max_size = 16 * (2 ** 48) + else: + raise TypeError("CMAC requires a cipher with a block size" + " of 8 or 16 bytes, not %d" % bs) + + # Compute sub-keys + zero_block = b'\x00' * bs + self._ecb = ciphermod.new(key, + ciphermod.MODE_ECB, + **self._cipher_params) + L = self._ecb.encrypt(zero_block) + if bord(L[0]) & 0x80: + self._k1 = _shift_bytes(L, const_Rb) + else: + self._k1 = _shift_bytes(L) + if bord(self._k1[0]) & 0x80: + self._k2 = _shift_bytes(self._k1, const_Rb) + else: + self._k2 = _shift_bytes(self._k1) + + # Initialize CBC cipher with zero IV + self._cbc = ciphermod.new(key, + ciphermod.MODE_CBC, + zero_block, + **self._cipher_params) + + # Cache for outstanding data to authenticate + self._cache = bytearray(bs) + self._cache_n = 0 + + # Last piece of ciphertext produced + self._last_ct = zero_block + + # Last block that was encrypted with AES + self._last_pt = None + + # Counter for total message size + self._data_size = 0 + + if msg: + self.update(msg) + + def update(self, msg): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + if self._mac_tag is not None and not self._update_after_digest: + raise TypeError("update() cannot be called after digest() or verify()") + + self._data_size += len(msg) + bs = self._block_size + + if self._cache_n > 0: + filler = min(bs - self._cache_n, len(msg)) + self._cache[self._cache_n:self._cache_n+filler] = msg[:filler] + self._cache_n += filler + + if self._cache_n < bs: + return self + + msg = memoryview(msg)[filler:] + self._update(self._cache) + self._cache_n = 0 + + remain = len(msg) % bs + if remain > 0: + self._update(msg[:-remain]) + self._cache[:remain] = msg[-remain:] + else: + self._update(msg) + self._cache_n = remain + return self + + def _update(self, data_block): + """Update a block aligned to the block boundary""" + + bs = self._block_size + assert len(data_block) % bs == 0 + + if len(data_block) == 0: + return + + ct = self._cbc.encrypt(data_block) + if len(data_block) == bs: + second_last = self._last_ct + else: + second_last = ct[-bs*2:-bs] + self._last_ct = ct[-bs:] + self._last_pt = strxor(second_last, data_block[-bs:]) + + def copy(self): + """Return a copy ("clone") of the CMAC object. + + The copy will have the same internal state as the original CMAC + object. + This can be used to efficiently compute the MAC tag of byte + strings that share a common initial substring. + + :return: An :class:`CMAC` + """ + + obj = self.__new__(CMAC) + obj.__dict__ = self.__dict__.copy() + obj._cbc = self._factory.new(self._key, + self._factory.MODE_CBC, + self._last_ct, + **self._cipher_params) + obj._cache = self._cache[:] + obj._last_ct = self._last_ct[:] + return obj + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + that has been authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bs = self._block_size + + if self._mac_tag is not None and not self._update_after_digest: + return self._mac_tag + + if self._data_size > self._max_size: + raise ValueError("MAC is unsafe for this message") + + if self._cache_n == 0 and self._data_size > 0: + # Last block was full + pt = strxor(self._last_pt, self._k1) + else: + # Last block is partial (or message length is zero) + partial = self._cache[:] + partial[self._cache_n:] = b'\x80' + b'\x00' * (bs - self._cache_n - 1) + pt = strxor(strxor(self._last_ct, partial), self._k2) + + self._mac_tag = self._ecb.encrypt(pt)[:self.digest_size] + + return self._mac_tag + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte array/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + +def new(key, msg=None, ciphermod=None, cipher_params=None, mac_len=None, + update_after_digest=False): + """Create a new MAC object. + + Args: + key (byte string/byte array/memoryview): + key for the CMAC object. + The key must be valid for the underlying cipher algorithm. + For instance, it must be 16 bytes long for AES-128. + ciphermod (module): + A cipher module from :mod:`Crypto.Cipher`. + The cipher's block size has to be 128 bits, + like :mod:`Crypto.Cipher.AES`, to reduce the probability + of collisions. + msg (byte string/byte array/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to `CMAC.update`. Optional. + cipher_params (dict): + Optional. A set of parameters to use when instantiating a cipher + object. + mac_len (integer): + Length of the MAC, in bytes. + It must be at least 4 bytes long. + The default (and recommended) length matches the size of a cipher block. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + Returns: + A :class:`CMAC` object + """ + + if ciphermod is None: + raise TypeError("ciphermod must be specified (try AES)") + + cipher_params = {} if cipher_params is None else dict(cipher_params) + + if mac_len is None: + mac_len = ciphermod.block_size + + if mac_len < 4: + raise ValueError("MAC tag length must be at least 4 bytes long") + + if mac_len > ciphermod.block_size: + raise ValueError("MAC tag length cannot be larger than a cipher block (%d) bytes" % ciphermod.block_size) + + return CMAC(key, msg, ciphermod, cipher_params, mac_len, + update_after_digest) diff --git a/env/Lib/site-packages/Crypto/Hash/CMAC.pyi b/env/Lib/site-packages/Crypto/Hash/CMAC.pyi new file mode 100644 index 0000000..33773aa --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/CMAC.pyi @@ -0,0 +1,30 @@ +from types import ModuleType +from typing import Union, Dict + +Buffer = Union[bytes, bytearray, memoryview] + +digest_size: int + +class CMAC(object): + digest_size: int + + def __init__(self, + key: Buffer, + msg: Buffer, + ciphermod: ModuleType, + cipher_params: dict, + mac_len: int, update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> CMAC: ... + def copy(self) -> CMAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + +def new(key: Buffer, + msg: Buffer = ..., + ciphermod: ModuleType = ..., + cipher_params: Dict = ..., + mac_len: int = ..., + update_after_digest: bool = ...) -> CMAC: ... diff --git a/env/Lib/site-packages/Crypto/Hash/HMAC.py b/env/Lib/site-packages/Crypto/Hash/HMAC.py new file mode 100644 index 0000000..e82bb9d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/HMAC.py @@ -0,0 +1,213 @@ +# +# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord, tobytes + +from binascii import unhexlify + +from Crypto.Hash import MD5 +from Crypto.Hash import BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Random import get_random_bytes + +__all__ = ['new', 'HMAC'] + + +class HMAC(object): + """An HMAC hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + """ + + def __init__(self, key, msg=b"", digestmod=None): + + if digestmod is None: + digestmod = MD5 + + if msg is None: + msg = b"" + + # Size of the MAC tag + self.digest_size = digestmod.digest_size + + self._digestmod = digestmod + + if isinstance(key, memoryview): + key = key.tobytes() + + try: + if len(key) <= digestmod.block_size: + # Step 1 or 2 + key_0 = key + b"\x00" * (digestmod.block_size - len(key)) + else: + # Step 3 + hash_k = digestmod.new(key).digest() + key_0 = hash_k + b"\x00" * (digestmod.block_size - len(hash_k)) + except AttributeError: + # Not all hash types have "block_size" + raise ValueError("Hash type incompatible to HMAC") + + # Step 4 + key_0_ipad = strxor(key_0, b"\x36" * len(key_0)) + + # Start step 5 and 6 + self._inner = digestmod.new(key_0_ipad) + self._inner.update(msg) + + # Step 7 + key_0_opad = strxor(key_0, b"\x5c" * len(key_0)) + + # Start step 8 and 9 + self._outer = digestmod.new(key_0_opad) + + def update(self, msg): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + self._inner.update(msg) + return self + + def _pbkdf2_hmac_assist(self, first_digest, iterations): + """Carry out the expensive inner loop for PBKDF2-HMAC""" + + result = self._digestmod._pbkdf2_hmac_assist( + self._inner, + self._outer, + first_digest, + iterations) + return result + + def copy(self): + """Return a copy ("clone") of the HMAC object. + + The copy will have the same internal state as the original HMAC + object. + This can be used to efficiently compute the MAC tag of byte + strings that share a common initial substring. + + :return: An :class:`HMAC` + """ + + new_hmac = HMAC(b"fake key", digestmod=self._digestmod) + + # Syncronize the state + new_hmac._inner = self._inner.copy() + new_hmac._outer = self._outer.copy() + + return new_hmac + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + authenticated so far. + + :return: The MAC tag digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + frozen_outer_hash = self._outer.copy() + frozen_outer_hash.update(self._inner.digest()) + return frozen_outer_hash.digest() + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte string/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, + as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + +def new(key, msg=b"", digestmod=None): + """Create a new MAC object. + + Args: + key (bytes/bytearray/memoryview): + key for the MAC object. + It must be long enough to match the expected security level of the + MAC. + msg (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to :meth:`HMAC.update`. + digestmod (module): + The hash to use to implement the HMAC. + Default is :mod:`Crypto.Hash.MD5`. + + Returns: + An :class:`HMAC` object + """ + + return HMAC(key, msg, digestmod) diff --git a/env/Lib/site-packages/Crypto/Hash/HMAC.pyi b/env/Lib/site-packages/Crypto/Hash/HMAC.pyi new file mode 100644 index 0000000..b577230 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/HMAC.pyi @@ -0,0 +1,25 @@ +from types import ModuleType +from typing import Union, Dict + +Buffer = Union[bytes, bytearray, memoryview] + +digest_size: int + +class HMAC(object): + digest_size: int + + def __init__(self, + key: Buffer, + msg: Buffer, + digestmod: ModuleType) -> None: ... + def update(self, msg: Buffer) -> HMAC: ... + def copy(self) -> HMAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + +def new(key: Buffer, + msg: Buffer = ..., + digestmod: ModuleType = ...) -> HMAC: ... diff --git a/env/Lib/site-packages/Crypto/Hash/MD2.py b/env/Lib/site-packages/Crypto/Hash/MD2.py new file mode 100644 index 0000000..41decbb --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD2.py @@ -0,0 +1,166 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md2_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._MD2", + """ + int md2_init(void **shaState); + int md2_destroy(void *shaState); + int md2_update(void *hs, + const uint8_t *buf, + size_t len); + int md2_digest(const void *shaState, + uint8_t digest[20]); + int md2_copy(const void *src, void *dst); + """) + + +class MD2Hash(object): + """An MD2 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 16 + # The internal block size of the hash algorithm in bytes. + block_size = 16 + # ASN.1 Object ID + oid = "1.2.840.113549.2.2" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md2_lib.md2_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + self._state = SmartPointer(state.get(), + _raw_md2_lib.md2_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_md2_lib.md2_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md2_lib.md2_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = MD2Hash() + result = _raw_md2_lib.md2_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD2" % result) + return clone + + def new(self, data=None): + return MD2Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`MD2Hash.update`. + :type data: bytes/bytearray/memoryview + + :Return: A :class:`MD2Hash` hash object + """ + + return MD2Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = MD2Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = MD2Hash.block_size diff --git a/env/Lib/site-packages/Crypto/Hash/MD2.pyi b/env/Lib/site-packages/Crypto/Hash/MD2.pyi new file mode 100644 index 0000000..95a97a9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD2.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class MD4Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD4Hash: ... + def new(self, data: Buffer = ...) -> MD4Hash: ... + +def new(data: Buffer = ...) -> MD4Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/MD4.py b/env/Lib/site-packages/Crypto/Hash/MD4.py new file mode 100644 index 0000000..be12b19 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD4.py @@ -0,0 +1,185 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD4 + >>> + >>> h = MD4.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1320: http://tools.ietf.org/html/rfc1320 +""" + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md4_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._MD4", + """ + int md4_init(void **shaState); + int md4_destroy(void *shaState); + int md4_update(void *hs, + const uint8_t *buf, + size_t len); + int md4_digest(const void *shaState, + uint8_t digest[20]); + int md4_copy(const void *src, void *dst); + """) + + +class MD4Hash(object): + """Class that implements an MD4 hash + """ + + #: The size of the resulting hash in bytes. + digest_size = 16 + #: The internal block size of the hash algorithm in bytes. + block_size = 64 + #: ASN.1 Object ID + oid = "1.2.840.113549.2.4" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md4_lib.md4_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + self._state = SmartPointer(state.get(), + _raw_md4_lib.md4_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + data : byte string/byte array/memoryview + The next chunk of the message being hashed. + """ + + result = _raw_md4_lib.md4_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that + has been hashed so far. + + This method does not change the state of the hash object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md4_lib.md4_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been + hashed so far. + + This method does not change the state of the hash object. + + :Return: A string of 2* `digest_size` characters. It contains only + hexadecimal ASCII digits. + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :Return: A hash object of the same type + """ + + clone = MD4Hash() + result = _raw_md4_lib.md4_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD4" % result) + return clone + + def new(self, data=None): + return MD4Hash(data) + + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string/byte array/memoryview + The very first chunk of the message to hash. + It is equivalent to an early call to `MD4Hash.update()`. + Optional. + + :Return: A `MD4Hash` object + """ + return MD4Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD4Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD4Hash.block_size diff --git a/env/Lib/site-packages/Crypto/Hash/MD4.pyi b/env/Lib/site-packages/Crypto/Hash/MD4.pyi new file mode 100644 index 0000000..a9a7295 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD4.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class MD4Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD4Hash: ... + def new(self, data: Optional[Buffer] = ...) -> MD4Hash: ... + +def new(data: Optional[Buffer] = ...) -> MD4Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/MD5.py b/env/Lib/site-packages/Crypto/Hash/MD5.py new file mode 100644 index 0000000..554b777 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD5.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md5_lib = load_pycryptodome_raw_lib("Crypto.Hash._MD5", + """ + #define MD5_DIGEST_SIZE 16 + + int MD5_init(void **shaState); + int MD5_destroy(void *shaState); + int MD5_update(void *hs, + const uint8_t *buf, + size_t len); + int MD5_digest(const void *shaState, + uint8_t digest[MD5_DIGEST_SIZE]); + int MD5_copy(const void *src, void *dst); + + int MD5_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t first_digest[MD5_DIGEST_SIZE], + uint8_t final_digest[MD5_DIGEST_SIZE], + size_t iterations); + """) + +class MD5Hash(object): + """A MD5 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 16 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.2.840.113549.2.5" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md5_lib.MD5_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + self._state = SmartPointer(state.get(), + _raw_md5_lib.MD5_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_md5_lib.MD5_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md5_lib.MD5_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = MD5Hash() + result = _raw_md5_lib.MD5_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD5" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-1 hash object.""" + + return MD5Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`MD5Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`MD5Hash` hash object + """ + return MD5Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = 16 + +# The internal block size of the hash algorithm in bytes. +block_size = 64 + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert len(first_digest) == digest_size + assert iterations > 0 + + bfr = create_string_buffer(digest_size); + result = _raw_md5_lib.MD5_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations)) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assis for MD5" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/MD5.pyi b/env/Lib/site-packages/Crypto/Hash/MD5.pyi new file mode 100644 index 0000000..d819556 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/MD5.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class MD5Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD5Hash: ... + def new(self, data: Buffer = ...) -> MD5Hash: ... + +def new(data: Buffer = ...) -> MD5Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/Poly1305.py b/env/Lib/site-packages/Crypto/Hash/Poly1305.py new file mode 100644 index 0000000..eb5e0da --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/Poly1305.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# +# Hash/Poly1305.py - Implements the Poly1305 MAC +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes, _copy_bytes + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + + +_raw_poly1305 = load_pycryptodome_raw_lib("Crypto.Hash._poly1305", + """ + int poly1305_init(void **state, + const uint8_t *r, + size_t r_len, + const uint8_t *s, + size_t s_len); + int poly1305_destroy(void *state); + int poly1305_update(void *state, + const uint8_t *in, + size_t len); + int poly1305_digest(const void *state, + uint8_t *digest, + size_t len); + """) + + +class Poly1305_MAC(object): + """An Poly1305 MAC object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + """ + + digest_size = 16 + + def __init__(self, r, s, data): + + if len(r) != 16: + raise ValueError("Parameter r is not 16 bytes long") + if len(s) != 16: + raise ValueError("Parameter s is not 16 bytes long") + + self._mac_tag = None + + state = VoidPointer() + result = _raw_poly1305.poly1305_init(state.address_of(), + c_uint8_ptr(r), + c_size_t(len(r)), + c_uint8_ptr(s), + c_size_t(len(s)) + ) + if result: + raise ValueError("Error %d while instantiating Poly1305" % result) + self._state = SmartPointer(state.get(), + _raw_poly1305.poly1305_destroy) + if data: + self.update(data) + + def update(self, data): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + if self._mac_tag: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_poly1305.poly1305_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing Poly1305 data" % result) + return self + + def copy(self): + raise NotImplementedError() + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + authenticated so far. + + :return: The MAC tag digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + if self._mac_tag: + return self._mac_tag + + bfr = create_string_buffer(16) + result = _raw_poly1305.poly1305_digest(self._state.get(), + bfr, + c_size_t(len(bfr))) + if result: + raise ValueError("Error %d while creating Poly1305 digest" % result) + + self._mac_tag = get_raw_buffer(bfr) + return self._mac_tag + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte string/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, + as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + +def new(**kwargs): + """Create a new Poly1305 MAC object. + + Args: + key (bytes/bytearray/memoryview): + The 32-byte key for the Poly1305 object. + cipher (module from ``Crypto.Cipher``): + The cipher algorithm to use for deriving the Poly1305 + key pair *(r, s)*. + It can only be ``Crypto.Cipher.AES`` or ``Crypto.Cipher.ChaCha20``. + nonce (bytes/bytearray/memoryview): + Optional. The non-repeatable value to use for the MAC of this message. + It must be 16 bytes long for ``AES`` and 8 or 12 bytes for ``ChaCha20``. + If not passed, a random nonce is created; you will find it in the + ``nonce`` attribute of the new object. + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to ``update()``. + + Returns: + A :class:`Poly1305_MAC` object + """ + + cipher = kwargs.pop("cipher", None) + if not hasattr(cipher, '_derive_Poly1305_key_pair'): + raise ValueError("Parameter 'cipher' must be AES or ChaCha20") + + cipher_key = kwargs.pop("key", None) + if cipher_key is None: + raise TypeError("You must pass a parameter 'key'") + + nonce = kwargs.pop("nonce", None) + data = kwargs.pop("data", None) + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + r, s, nonce = cipher._derive_Poly1305_key_pair(cipher_key, nonce) + + new_mac = Poly1305_MAC(r, s, data) + new_mac.nonce = _copy_bytes(None, None, nonce) # nonce may still be just a memoryview + return new_mac diff --git a/env/Lib/site-packages/Crypto/Hash/Poly1305.pyi b/env/Lib/site-packages/Crypto/Hash/Poly1305.pyi new file mode 100644 index 0000000..f97a14a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/Poly1305.pyi @@ -0,0 +1,24 @@ +from types import ModuleType +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class Poly1305_MAC(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + r : int, + s : int, + data : Buffer) -> None: ... + def update(self, data: Buffer) -> Poly1305_MAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + +def new(key: Buffer, + cipher: ModuleType, + nonce: Buffer = ..., + data: Buffer = ...) -> Poly1305_MAC: ... diff --git a/env/Lib/site-packages/Crypto/Hash/RIPEMD.py b/env/Lib/site-packages/Crypto/Hash/RIPEMD.py new file mode 100644 index 0000000..4e80235 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/RIPEMD.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.RIPEMD + +"""Deprecated alias for `Crypto.Hash.RIPEMD160`""" + +from Crypto.Hash.RIPEMD160 import new, block_size, digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/RIPEMD.pyi b/env/Lib/site-packages/Crypto/Hash/RIPEMD.pyi new file mode 100644 index 0000000..e33eb2d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/RIPEMD.pyi @@ -0,0 +1,3 @@ +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + diff --git a/env/Lib/site-packages/Crypto/Hash/RIPEMD160.py b/env/Lib/site-packages/Crypto/Hash/RIPEMD160.py new file mode 100644 index 0000000..820b57d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/RIPEMD160.py @@ -0,0 +1,169 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_ripemd160_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._RIPEMD160", + """ + int ripemd160_init(void **shaState); + int ripemd160_destroy(void *shaState); + int ripemd160_update(void *hs, + const uint8_t *buf, + size_t len); + int ripemd160_digest(const void *shaState, + uint8_t digest[20]); + int ripemd160_copy(const void *src, void *dst); + """) + + +class RIPEMD160Hash(object): + """A RIPEMD-160 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 20 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.3.36.3.2.1" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_ripemd160_lib.ripemd160_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating RIPEMD160" + % result) + self._state = SmartPointer(state.get(), + _raw_ripemd160_lib.ripemd160_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_ripemd160_lib.ripemd160_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating ripemd160" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_ripemd160_lib.ripemd160_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating ripemd160" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = RIPEMD160Hash() + result = _raw_ripemd160_lib.ripemd160_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying ripemd160" % result) + return clone + + def new(self, data=None): + """Create a fresh RIPEMD-160 hash object.""" + + return RIPEMD160Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`RIPEMD160Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`RIPEMD160Hash` hash object + """ + + return RIPEMD160Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = RIPEMD160Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = RIPEMD160Hash.block_size diff --git a/env/Lib/site-packages/Crypto/Hash/RIPEMD160.pyi b/env/Lib/site-packages/Crypto/Hash/RIPEMD160.pyi new file mode 100644 index 0000000..b619473 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/RIPEMD160.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class RIPEMD160Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> RIPEMD160Hash: ... + def new(self, data: Buffer = ...) -> RIPEMD160Hash: ... + +def new(data: Buffer = ...) -> RIPEMD160Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA.py b/env/Lib/site-packages/Crypto/Hash/SHA.py new file mode 100644 index 0000000..0cc141c --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + +from Crypto.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA.pyi b/env/Lib/site-packages/Crypto/Hash/SHA.pyi new file mode 100644 index 0000000..4d7d57e --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA.pyi @@ -0,0 +1,4 @@ +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + +from Crypto.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA1.py b/env/Lib/site-packages/Crypto/Hash/SHA1.py new file mode 100644 index 0000000..f79d825 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA1.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha1_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA1", + """ + #define SHA1_DIGEST_SIZE 20 + + int SHA1_init(void **shaState); + int SHA1_destroy(void *shaState); + int SHA1_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA1_digest(const void *shaState, + uint8_t digest[SHA1_DIGEST_SIZE]); + int SHA1_copy(const void *src, void *dst); + + int SHA1_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t first_digest[SHA1_DIGEST_SIZE], + uint8_t final_digest[SHA1_DIGEST_SIZE], + size_t iterations); + """) + +class SHA1Hash(object): + """A SHA-1 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 20 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.3.14.3.2.26" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha1_lib.SHA1_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + self._state = SmartPointer(state.get(), + _raw_sha1_lib.SHA1_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha1_lib.SHA1_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha1_lib.SHA1_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA1Hash() + result = _raw_sha1_lib.SHA1_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA1" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-1 hash object.""" + + return SHA1Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA1Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA1Hash` hash object + """ + return SHA1Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA1Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA1Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert len(first_digest) == digest_size + assert iterations > 0 + + bfr = create_string_buffer(digest_size); + result = _raw_sha1_lib.SHA1_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations)) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assis for SHA1" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/SHA1.pyi b/env/Lib/site-packages/Crypto/Hash/SHA1.pyi new file mode 100644 index 0000000..d6c8e25 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA1.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA1Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA1Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA1Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA1Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA224.py b/env/Lib/site-packages/Crypto/Hash/SHA224.py new file mode 100644 index 0000000..f788b06 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA224.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha224_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA224", + """ + int SHA224_init(void **shaState); + int SHA224_destroy(void *shaState); + int SHA224_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA224_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA224_copy(const void *src, void *dst); + + int SHA224_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA224Hash(object): + """A SHA-224 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 28 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = '2.16.840.1.101.3.4.2.4' + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha224_lib.SHA224_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA224" + % result) + self._state = SmartPointer(state.get(), + _raw_sha224_lib.SHA224_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha224_lib.SHA224_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA224" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha224_lib.SHA224_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA224 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA224Hash() + result = _raw_sha224_lib.SHA224_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA224" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-224 hash object.""" + + return SHA224Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA224Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA224Hash` hash object + """ + return SHA224Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA224Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA224Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha224_lib.SHA224_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA224" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/SHA224.pyi b/env/Lib/site-packages/Crypto/Hash/SHA224.pyi new file mode 100644 index 0000000..613a7f9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA224.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA224Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA224Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA224Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA224Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA256.py b/env/Lib/site-packages/Crypto/Hash/SHA256.py new file mode 100644 index 0000000..957aa37 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA256.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha256_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA256", + """ + int SHA256_init(void **shaState); + int SHA256_destroy(void *shaState); + int SHA256_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA256_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA256_copy(const void *src, void *dst); + + int SHA256_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA256Hash(object): + """A SHA-256 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 32 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.1" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha256_lib.SHA256_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA256" + % result) + self._state = SmartPointer(state.get(), + _raw_sha256_lib.SHA256_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha256_lib.SHA256_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA256" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha256_lib.SHA256_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA256 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA256Hash() + result = _raw_sha256_lib.SHA256_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA256" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-256 hash object.""" + + return SHA256Hash(data) + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA256Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA256Hash` hash object + """ + + return SHA256Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA256Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA256Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha256_lib.SHA256_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA256" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/SHA256.pyi b/env/Lib/site-packages/Crypto/Hash/SHA256.pyi new file mode 100644 index 0000000..cbf21bf --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA256.pyi @@ -0,0 +1,18 @@ +from typing import Union, Optional + + +class SHA256Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> None: ... + def update(self, data: Union[bytes, bytearray, memoryview]) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA256Hash: ... + def new(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... + +def new(data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... + +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA384.py b/env/Lib/site-packages/Crypto/Hash/SHA384.py new file mode 100644 index 0000000..a98fa9a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA384.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha384_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA384", + """ + int SHA384_init(void **shaState); + int SHA384_destroy(void *shaState); + int SHA384_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA384_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA384_copy(const void *src, void *dst); + + int SHA384_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA384Hash(object): + """A SHA-384 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 48 + # The internal block size of the hash algorithm in bytes. + block_size = 128 + # ASN.1 Object ID + oid = '2.16.840.1.101.3.4.2.2' + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha384_lib.SHA384_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA384" + % result) + self._state = SmartPointer(state.get(), + _raw_sha384_lib.SHA384_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha384_lib.SHA384_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA384" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha384_lib.SHA384_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA384 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA384Hash() + result = _raw_sha384_lib.SHA384_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA384" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-384 hash object.""" + + return SHA384Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA384Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA384Hash` hash object + """ + + return SHA384Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA384Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA384Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha384_lib.SHA384_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA384" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/SHA384.pyi b/env/Lib/site-packages/Crypto/Hash/SHA384.pyi new file mode 100644 index 0000000..c2aab9e --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA384.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA384Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA384Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA384Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA384Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_224.py b/env/Lib/site-packages/Crypto/Hash/SHA3_224.py new file mode 100644 index 0000000..bfc9bc2 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_224.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_224_Hash(object): + """A SHA3-224 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 28 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.7" + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + 0x06) + if result: + raise ValueError("Error %d while instantiating SHA-3/224" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/224" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-3/224" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self): + """Create a fresh SHA3-224 hash object.""" + + return type(self)(None, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_224_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_224_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_224_Hash.digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_224.pyi b/env/Lib/site-packages/Crypto/Hash/SHA3_224.pyi new file mode 100644 index 0000000..3437042 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_224.pyi @@ -0,0 +1,16 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_224_Hash(object): + digest_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_224_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self) -> SHA3_224_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_224_Hash: ... + +digest_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_256.py b/env/Lib/site-packages/Crypto/Hash/SHA3_256.py new file mode 100644 index 0000000..327dabf --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_256.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_256_Hash(object): + """A SHA3-256 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 32 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.8" + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + 0x06) + if result: + raise ValueError("Error %d while instantiating SHA-3/256" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/256" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-3/256" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self): + """Create a fresh SHA3-256 hash object.""" + + return type(self)(None, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_256_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_256_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_256_Hash.digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_256.pyi b/env/Lib/site-packages/Crypto/Hash/SHA3_256.pyi new file mode 100644 index 0000000..c1a07fa --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_256.pyi @@ -0,0 +1,16 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_256_Hash(object): + digest_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_256_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self) -> SHA3_256_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_256_Hash: ... + +digest_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_384.py b/env/Lib/site-packages/Crypto/Hash/SHA3_384.py new file mode 100644 index 0000000..cdbb7dd --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_384.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_384_Hash(object): + """A SHA3-384 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 48 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.9" + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + 0x06) + if result: + raise ValueError("Error %d while instantiating SHA-3/384" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/384" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-3/384" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self): + """Create a fresh SHA3-384 hash object.""" + + return type(self)(None, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_384_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_384_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_384_Hash.digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_384.pyi b/env/Lib/site-packages/Crypto/Hash/SHA3_384.pyi new file mode 100644 index 0000000..d029ab6 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_384.pyi @@ -0,0 +1,16 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_384_Hash(object): + digest_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_384_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self) -> SHA3_384_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_384_Hash: ... + +digest_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_512.py b/env/Lib/site-packages/Crypto/Hash/SHA3_512.py new file mode 100644 index 0000000..355b049 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_512.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_512_Hash(object): + """A SHA3-512 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 64 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.10" + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + 0x06) + if result: + raise ValueError("Error %d while instantiating SHA-3/512" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/512" + % result) + return self + + def digest(self): + + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-3/512" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self): + """Create a fresh SHA3-512 hash object.""" + + return type(self)(None, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_512_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_512_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_512_Hash.digest_size diff --git a/env/Lib/site-packages/Crypto/Hash/SHA3_512.pyi b/env/Lib/site-packages/Crypto/Hash/SHA3_512.pyi new file mode 100644 index 0000000..2c5403b --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA3_512.pyi @@ -0,0 +1,16 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_512_Hash(object): + digest_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_512_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self) -> SHA3_512_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_512_Hash: ... + +digest_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHA512.py b/env/Lib/site-packages/Crypto/Hash/SHA512.py new file mode 100644 index 0000000..403fe45 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA512.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha512_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA512", + """ + int SHA512_init(void **shaState, + size_t digest_size); + int SHA512_destroy(void *shaState); + int SHA512_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA512_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA512_copy(const void *src, void *dst); + + int SHA512_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA512Hash(object): + """A SHA-512 hash object (possibly in its truncated version SHA-512/224 or + SHA-512/256. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 128 + + def __init__(self, data, truncate): + self._truncate = truncate + + if truncate is None: + self.oid = "2.16.840.1.101.3.4.2.3" + self.digest_size = 64 + elif truncate == "224": + self.oid = "2.16.840.1.101.3.4.2.5" + self.digest_size = 28 + elif truncate == "256": + self.oid = "2.16.840.1.101.3.4.2.6" + self.digest_size = 32 + else: + raise ValueError("Incorrect truncation length. It must be '224' or '256'.") + + state = VoidPointer() + result = _raw_sha512_lib.SHA512_init(state.address_of(), + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-512" + % result) + self._state = SmartPointer(state.get(), + _raw_sha512_lib.SHA512_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha512_lib.SHA512_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA512" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha512_lib.SHA512_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA512 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA512Hash(None, self._truncate) + result = _raw_sha512_lib.SHA512_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA512" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-512 hash object.""" + + return SHA512Hash(data, self._truncate) + + +def new(data=None, truncate=None): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA512Hash.update`. + truncate (string): + Optional. The desired length of the digest. It can be either "224" or + "256". If not present, the digest is 512 bits long. + Passing this parameter is **not** equivalent to simply truncating + the output digest. + + :Return: A :class:`SHA512Hash` hash object + """ + + return SHA512Hash(data, truncate) + + +# The size of the full SHA-512 hash in bytes. +digest_size = 64 + +# The internal block size of the hash algorithm in bytes. +block_size = 128 + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha512_lib.SHA512_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA512" % result) + + return get_raw_buffer(bfr) diff --git a/env/Lib/site-packages/Crypto/Hash/SHA512.pyi b/env/Lib/site-packages/Crypto/Hash/SHA512.pyi new file mode 100644 index 0000000..f219ee9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHA512.pyi @@ -0,0 +1,22 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA512Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, + data: Optional[Buffer], + truncate: Optional[str]) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA512Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA512Hash: ... + +def new(data: Optional[Buffer] = ..., + truncate: Optional[str] = ...) -> SHA512Hash: ... +digest_size: int +block_size: int diff --git a/env/Lib/site-packages/Crypto/Hash/SHAKE128.py b/env/Lib/site-packages/Crypto/Hash/SHAKE128.py new file mode 100644 index 0000000..011268b --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHAKE128.py @@ -0,0 +1,127 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHAKE128_XOF(object): + """A SHAKE128 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + """ + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.11" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(32), + 0x1F) + if result: + raise ValueError("Error %d while instantiating SHAKE128" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + self._is_squeezing = False + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHAKE128 state" + % result) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length)) + if result: + raise ValueError("Error %d while extracting from SHAKE128" + % result) + + return get_raw_buffer(bfr) + + def new(self, data=None): + return type(self)(data=data) + + +def new(data=None): + """Return a fresh instance of a SHAKE128 object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + Optional. + + :Return: A :class:`SHAKE128_XOF` object + """ + + return SHAKE128_XOF(data=data) diff --git a/env/Lib/site-packages/Crypto/Hash/SHAKE128.pyi b/env/Lib/site-packages/Crypto/Hash/SHAKE128.pyi new file mode 100644 index 0000000..f618881 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHAKE128.pyi @@ -0,0 +1,13 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHAKE128_XOF(object): + oid: str + def __init__(self, + data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> SHAKE128_XOF: ... + def read(self, length: int) -> bytes: ... + def new(self, data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... + +def new(data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... diff --git a/env/Lib/site-packages/Crypto/Hash/SHAKE256.py b/env/Lib/site-packages/Crypto/Hash/SHAKE256.py new file mode 100644 index 0000000..4b1b141 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHAKE256.py @@ -0,0 +1,127 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHAKE256_XOF(object): + """A SHAKE256 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + """ + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.12" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(64), + 0x1F) + if result: + raise ValueError("Error %d while instantiating SHAKE256" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + self._is_squeezing = False + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHAKE256 state" + % result) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length)) + if result: + raise ValueError("Error %d while extracting from SHAKE256" + % result) + + return get_raw_buffer(bfr) + + def new(self, data=None): + return type(self)(data=data) + + +def new(data=None): + """Return a fresh instance of a SHAKE256 object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + Optional. + + :Return: A :class:`SHAKE256_XOF` object + """ + + return SHAKE256_XOF(data=data) diff --git a/env/Lib/site-packages/Crypto/Hash/SHAKE256.pyi b/env/Lib/site-packages/Crypto/Hash/SHAKE256.pyi new file mode 100644 index 0000000..029347a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/SHAKE256.pyi @@ -0,0 +1,13 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHAKE256_XOF(object): + oid: str + def __init__(self, + data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> SHAKE256_XOF: ... + def read(self, length: int) -> bytes: ... + def new(self, data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... + +def new(data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... diff --git a/env/Lib/site-packages/Crypto/Hash/_BLAKE2b.pyd b/env/Lib/site-packages/Crypto/Hash/_BLAKE2b.pyd new file mode 100644 index 0000000000000000000000000000000000000000..cf7518c53172003c9d1b86df1c3e6c2801aad849 GIT binary patch literal 14336 zcmeHN3v^V~x!&_iCXdM(Fu{Ni1{@@cq(g>l!>i1Y1oz+>9U&l9F$u|pjN~z$hXzYE z+)0$-I4$1N+iJb7NPFw@+Ft5)(SX`c2$F!HKy4c;E>rPwLXbxEB0lQe??2~EGC|vW z*SfdsuGM;0&is$P_rL%B|Ns8?e@>3~maWXd7&GEdr5Ni5q|2rJ{^g(ZXfBw#r+_`3 zb9h>}PCPuVx+xI01w(D?LUk>+`nuNEwuo)5-xi9t+5)Y%${SbMTG|@?r4~!BT@$@@ z-+kZI|5$!CedoRU^;fe%Kl}2~2?jXn#i0{ArG4ntnF{}%6FCYzeIi$Z!%F+ZzZ*PZ z0R8aqWDQ>Ys!E?y=;eX>CYmF?B3~6_4fkcS(38_@)4UNj-Ik{>xRhNA&gCk%`XWH9 zf_LZ$5Uyt|OJR*c)&T|?5tp9TqN_S64L(b&oSo&29m1H6^Pmq6u4Rnu%6#wv<{7<$ zjFHVAEn|v$qx=hyB-+arMYRS8W9Cfyg8roue|rSH%~SA0D%0AvbpTT?8?-77HPl7w z7<<|R7G$yl+-S(WTu`}G?Z!A62n6WFypa( zCB5=J@cyt#z7as!l~i~h=?K9g0iC>EN#Q#^3R#RnJbs(q%;OQemB%;QZSGZ=z{aJs zqV%rwfP4l*I+H?YKL*NC6JuKjHdoqRu3zY4i!S>IFomw?@!MyzfFKp*Bsz5_BR9$~ z<$(GKktI(9izkX!*$7y!1CXuxjCCcWSIKMC?l!bvskUEC?WgicXg6YwLdjK@+VX;Y zEE`y7D!N!MHbas$z+>esV&_u7JQYotv{;n(@1X%6=|wIXM9K2hRL0z?qbbu-Yy-tc z!+K|uOSjn_T#DGsxwO&FVGEo4_TJqP!sJ8v-AZ;QyW9+8I9yvkluQp|d{^>bi#pha zAfRDaymjt2QwEK{+JEw(6&l#YH`Zo|stTw1>lmWa(bme@fARUt7DS)g6iex%*FtC+hB=SXuy^(VWC;9X#L|E=>1L6}@>`T8H5*=PT4I z^^z;w;SHDj_L|#!(|%LpnVe_yNav;BbLm}dlBh}kK8zqrebTQv1;}U+!2dNv--7BG z?^>a5faA^ssiLnVA#>@4RM8zkQbiGk?BA1PWVEkrxAN4CCgR0K?4j7{^4%B1W^xhy zjm2m0rcgY56=L212gy%>;!-7oE^ayn22T(kOJet#?j~9?{hC1BRC$fs(*>M^)n~dK z9GbPyR1Az<*=L%rVcpmbjIU49=9)qy`;0o8d5kYv9-ak$Tr}F!OdXdj-$*kta0ttg zQG}$hu5z-YYsD%upo>e#Q$;H-!Eot6=p1mSs;~zV)%IH0FjeHH7A$rik2ToKp@L{{ z;u2a#sh`Z^fcuCUB%;K5fVPz_RWuR9Qhl+Me&f%{c{R7+n)aP-C6`XCE-<}uHH1G? z_SkE)A0ky3^hP5*<}b!9-r`izBt@yJVji!urivISDaq#$ej4W~@PyZ<(k~NTWdp;? zdj@UX3EWv_<<1_y8%xFo=~sYoj!ngd${v`%j#NOTaQ?v&kjsdK=hyT?#S#iCD~~PO zfw@^UD^(<6qOnYT2zEV=e2yr&8x6=Q3CjV{Lk!!SCrpQd_l`oyli!9aJZ84bZ>fkL zbCvWeIbZ$>QZWoY&Cey{3_7g2#LdGdzMwx+=uD1q;pdiZ5EwmErsn*ey5SFKKakw@ z>ey^~7xK~YWO)T>q|JX;*~YTuaJKv{5Ic3sgfs@pe-Yvg$^Sd5ytJ%)(XTTZr9l(r zkV<6B0U*PtGsep|gCj4;kH)0&lv8=Una-Msy;$3zA5t>I9vzbRAqgV0Op1N1i%w8d z2bT_WsV`M@YBH_2+P+#6?LKYiHdFeJ*j?n0njspito2?+NW=-?!k5B&&;y8ne;rW+~aA9#lLno!xh*|eAv{hdgI@;A^;vp}I7REZqe z%MWz;)E008f^5y%M!U%QM*F^2Hd7rIXg@ zh|X5%NGYuBY!uo|b!328f9gHqUej~vfLPoQKqNtsF9GZ~J%nGKVsWUj8$xgf<41`D z9FINS-dh-JoM+mEP>Z(nddt&5aQJ?WfZ^b#c~qN6Yr6B0K^o_|i=)-zti8f7+=V=G zQ!zIj0h+}V`5j^+{~(VS+C2-7MvA3n`Qn0}P_DGZZ0HdeBtuAsdrO92a~|V3peRxb zV|UxxHweo`TxJ34l`t*tRqegyxZDTRI*-9Fd7ZCBv-$l+`zeQ`Zj;on^J;q{2 zqx8bzt9Aot^7GhuP&?rrad#h~!}}JniA)vUFbSCSFEYCE#CHbJh@%0?FjcgWS|Dd( zIteBQl~$$GSR%w;Psr3jsgPt6Vxx1$pos43r$!`8ACGs@G^B{aba#J`W>wCLuFa%p zFP*uPIs#{TElzk}5uz8cnnE^JBtRw#0Xk(M4@DbKRE-GRV30~Xr%8W+{xq8lc?&3_ zFCUgSC%8fWm2!+fVm|^~@lE^4WNQ%6qcxF@Coa36(r;o>2f*+|R9j>Kq(B!fCDv=^ z3tkHisXLYSr|Jh~$7AfR>e%WF#v1?3*u4l~ntLp5Se;4d0pYPqsi(^MN~QFId(%j3 z#NX`R^zL>3)@Y@4Od4?>a3?BsReAlwWA0671Fe2f+-R@LI|w;O=|foF9Iv+LM}0hI z`L-Pfn=!IDB`iv2=~$#>QEir+k6^xVRplR4qss=OYIy$VD6 zOsOl>!@V+Y`2?u2=j}ov_8`ptc&?LZ79!1P04?sL7iNO`Q^)@Rxb5-vu=eZacW>qzAl z1CZvT@06nth%-6QQ~AA5`Q5Di9#ekbP=5ba`F&jZeM0$tQu*Db{C4Aar>+-s;9P_?i};y7Ho4hWqasj=&r&>03ISal+HhGS0$dI(mJ)h$c_ zblU*A6$}Rumkk^jaU9_Ih~omMmN*JkD;oBK5k%&s#u34q!zH&5ERtu&Ad=^TJU7YX zkmn(JE`@S=;x3<5)v;i2c+#vRNC=&k(q3-pcOH}9zy?H%V6EyD;me?4^==b!_O*+K zq=*e16ed(bYSoW~30?^GJ`LIddLL-0=zSTq5A++LYeByc8elkC&ijD5|10S!nMLoh>OQ7tb}kNN{ArCRM?2K8b}Et zwLtKURf%#iUQzB$4x7}%NJ=O^aUPNj-i6PkN9;+OX45|M9VMBJOUXB%sTT&gbP5$c zd>U!uU9UuH;MB zYuxb1P51bycXTpInT$-X1Ov)i*$FoCVF4PVPh{w>JTZ)`JH{z2N+(v(34_sy!Yge6 z6osAko!}q_s@p&64WKoHhQ$C6h%FJg)D z2keTICAZll9j8X+`1;khCBHOQU%7FRj-X=xOY?ze-&^-q&K=k?R{q?ApB(J$`q{c? zsQaUkB}y{yo^^iPr=j;+7gDJ~qipC4Wh&_?=(ImhN_4+_?PWFd|MSKl6C0uDwowyV zoK9ZJT|y*s8kD3^mfQ&EPJ2>gpoDan^W8`P@vWcUyS(5gYU)>pj_~dc)9$%w^5MZ- z#drgit%!Tk`60(HwDRt$&0m@IK$GeF+&RFpYj{2e#hz7mkB)b*f6o2!za5WFBlpXAvU%OXokuADTbCt_Q%K=jh@fTzW;nGN zt&m`ZA*=B`CKcL6$+%xMz$)}p?13Im<$>BP;^m5>(lAsS@FwzSd88ihq@K!TQ0R3i zMB*y#I#IGiq1U~JL9g=!>D@C-&sh!+q(O<*9u$S7Sf{-MR`N(^dC!@}*C&hGC9uxulkje1q7cwGUzK*eF;#bwThQgbfe^M%Z(oC?WPm!}CgTVB0B%CzXca zk$wd0JjEr?e@MGY^4*!EkP&9r2+A2lfQwn=Eq@mKj)FJsv|po$Nl30iJ`ip_5`8 z4%=|&`DKY~rg92oL>;*a21k_i^H{T;>x8bS=vl^z#4ILIrDIod=Oapz^EgMi^Jz61 zJ740?C)L#J#K|S1zl}RL!k%D1ib0|?o&TRz5~Ap&$7uwCl}-jVg;T1u-`^f(x;!h~g3oQpPL!V(*32WUDcU7&GHdVHX9M7nE1*Yen@4&Xr^ z8|nZ(g7Qn4kP8l^EP)gcq%4CJH>50q6c3~lJTzGBO&9QSKpXpK~Yn}tsC~x`_Rr$$xgA9OSu1iV4j2R>vnVuCdR(5U&|mnTBja5)?;bracbgA9cUCz@7p2 zCe%kk(E)&0(AS|7?}i20&=SJvfn}&1*mz5-6qiuQXIxy;^LPg9LRzW$;Uh1d+0sFe zbX2xLe#RR&dT&`JA;13K9Xokmln%JhF{JaOk&E1t%j~W989x}Hj9%X_UUAe@-@8mI zM66d>@hT0#e6CVw5F&Xv)pUl~;E0Gl>clh$ZXK$lH*zyhVmTT5s_5AHo*Mj2BN!R& zGp6II(6y4@3ZaPI;&svcjWt8(&%izzY@7TRI9rv*-mwO?^L6i}J0n%25Y|c9YX?fV<rqdgYrmSppPHtYlr9ddiHJWn- zOO`I5>GQHphCFjVGwB6OZh;{yTbE_>nVCHb>lAgjm zNA+8=2Gjge_*c?rRQ~@Ev1n#{o-qCO5sed9WG0gbAHl~ydrzfLOL z)Tewr5C08-U&rx0N4`t9gy#CHA)B7eM8|BV5v)tubK^kjB-(REOS=^SZ zm7%O3cjCP$-$BawxU+pPbY|31LO$kjJEDT;$OY%wYmFTcHifk;i7$JRF2t@oF$ zt*LJdZWtvR{NYHbErSlM181~3012Z+l)ilP(~Put)7K3Pfnj~M*t_ZL2jF#pw>g8? zg!<)i@E*~E-QulEP3`_e#j#2>96xgDx#r~zcu0uwblEPa(`f|_J>*mtq6rQi#=fQgf*fqwA5cmy81%? zFov<^#+#!4&<3AB)YukkscWsrDxz3Mh)w&fbWa1Dk-@EQ3i<0Az+~?ccg2Qq#NSdK zXz{zlSb(1}HbYk#2-mFzrF0epYeRLR4Hb3G&0*GNpe+!nYYyD)Um*s<5e{*z3))ut z*GAW^^M}$}USGBRhAPonR@%_qjD5&zYAR~NL4SRqF;HKl8V}Z*0e(2rP!rh@^w$Jh z8{2BM{ZrEn;nBOu~HBkFmO7V0KG*HbQB3 zeJC=!$zKe0D?owz>%PViyWQw_i$0kokOdK@^xn@|fGzswkY z8~-5gxxlvp_Tyd+oZv%+_+kL~BY+ls+93kB0zQko7C6DzaMN7*lW_$qGFa}aQSv5GeX8YeT>3_igU z+|-BQO&Z<+__Han1Nb`u=^MXp-~{pBK*6s8JdFBoANW*vT!fnA4d7IJ?8E&D@FRc^ z*UPGz52Z2-VO*QhP=ueR9XTk|m?Mpa8s#C8)e}bEFKLGrT z>u<>dI-HPnva*ixtD@RiDNjGc#}~njqm7;o9|XMcTDZ8qrP+3;KNLo}v}op>QpZf2 zzqP)tA<(*R(ae?AOJ|qQw1p$6tLmC@bS|2?!5^Nv_$!v&g>~Vuzh!On1{(ylh8N9@ zhFa%`>zn*7b>Z19f%;HexUDfVyS}YueqFew^v*dmZ77QZji|d=@i3?-<$f_im!U<@Ns;+plF> literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_BLAKE2s.pyd b/env/Lib/site-packages/Crypto/Hash/_BLAKE2s.pyd new file mode 100644 index 0000000000000000000000000000000000000000..c884350139498788832518342d3650a85b3c565e GIT binary patch literal 13824 zcmeHO4Rlo1wLUXPV^8IIHHt3H3qYem{uUu!M>c^JU=Izxa2gc_n&s8~~}dPArU_#T3_&inSgGa9M-$<97H@^Hv2I?=oGH{Xsj{Et*NrTot@WwQa{@asT8a#6{TZ4mI`;)Kr zpA=Ak@-;ODuY5zVpVsP28*0~)jP#5uyo}Y|pTPppPOeVVhS(Hmjxl#4n*ho+I<@i& zK)Z&w83+(=WGq7?U4pC)1TrEnBdbPNeNY;FvaVC&#f%-nm2fo+;8N{Vj4W_OyXx!3N$r|J zlzFzZpJD7=c&w60*y}$?B$MaD#&cat;*fpdF+>XVS0ZN$I{HF$BBu?Z0=kWhd}s(| zchWLbBKr%v)O+FmL9==bfVeBJ(aLx)!2uDS$~)rXqY16*z#t`hhs&ZwLoT}#y~pL0 zS3rPM;pcq(taM0y4@}zQVtWq;s$nx@TaRz?xZLjN4Uv+oKMSJRaRB#jvketeL5-tR zdpvZD`cf8ZA7W*xlR;8q`74|RY>NO?yOptyczC*cr{3L(_UrWaE2;f-4l(V-8lt7- zo|WACqS}`UtUVdNNp)Dji62)Y#Vq7fc#k>>O%Uqv@q>G4KpFqJ!UZ3<{df{%a`ITx zd<@H5v(cbYiYxpMSCPU)u40AX<5FM?r@XprH<&Q>2<{J&oyjh@!Wc#FtrxlZVT|vH z-)GYYyD_Awsw>$OHE%$Jx(d_OEaQxnWRTL~8`T!fOwFeG8(@jZ)b}K?$24*vKF-Zl8NuEZiz;-Mo7->*bhI(<>MBU%CfD3%qsY7TOj@ge>p zT-4DQ7U$&u_YB7F%aq~+??FUtK{9f|aJN_CW-Df|z|7g*9#;{0XD~0B|1AU#emn{a z7DD@vL50=$YcQGm)E|<`OHGfBY_ehX=}nQOA^bToorPgi9}PPTlPewPv8vRAA19L# z{B_8H;81S5Pcpv%q>g0B97zgc_prpm5fVewG4eKjWH>9f(f|(>4d$i$CiCCEI<3QC zhV(kM@-A}a26)3QudZ6%mG+yqp2>O6GJb*oQsHN@Ny29J8!&>8ck`DOS|G!V0RFEU z_!?Blc=s}W1Duc!CG)?GXs_@Wllfl+lFScj)t)_BCPw?pxk^b+T}QO24|^zbrg-<2 zu$h_4HZd967wo1JGz&xAOpG;ej=H_>?#cBKcRC#N_w#eVwJ9H*f5zdQwt_LM~T$A zilKtfwNBw^_3<7uOA*{h#8#>lNQY=!Ig|NgF)Y~~NgD6@5jn4F^{TY*oXZt{Mt6bv zH~$3YduKiU=8Pvu)p=dvkP`7bAf?=q%pa#I<#i}guRWR1P~&lBCgEprjslOBJN5c2 zRQEbTuq%mvry>EDymm!OD7!IbT;P5!D$*rUis(te{57NkR*Is0(2MF~s>1Vkc0t7g zT2yu=QnCZGSvVt^&mrhiBt8PWoAij?@OdNWlzybU@vYAc)8^ zF7kUrc#IZ06n<3U-O2pZ<7vK?u9e*9sz~kJMhf2{Y!Yqz(pYV-_i0Q*b{P8+#Ace% zz|TNa59{2`>hoGx7&bM}AEfF7-v*-Ze{q-j7K-?X_R?MxJzZPOH&LL!3t>?G3c95% zkf*lll`PoH4|MSKR^rnvbGB`Exa z9;w%3EF_xvi$|yL2F}z2*mzJoRz4(m_R`^fJIGW`=HEOH82?ul-IUm)$I*zR0l_et zKc8B_XMQ>e#`?8Zt<$AIh`kDfhRF4srkEU(51Uy+a13s^}@HkmJiClUcVWx$UPlbCl%d;|vZxD*olP4p*e zZp1C1SbbJl-V##;^=H~KzRlGOTPd6OlgaMHf*#I^oJ#EK2Ppi;O40w&p)!iHUnONYd#Jv`x+Uia`9 z<;_FQA%CO1`RpQpbJ)ZC_>go+j+Gj`IX&Xj^5%05&Hl2e$>q&C3_d1)5tg?^D_z!b zg%YuS-35b99l9wgmc%m*EL2cZond65$tA4>CYX0#{0KRf^KO^2+knHD?5l`762UwV zH@D(MkRv??Pt@!OI+zLC9Y8t}{C?ihJ=c-0knZ7>wV2Yf#XYC8g4y1j1jbIs zkZyDGTK#bML~S1d75BWGCq^D7Mp4_lAnZjrWznnq^|q^m8_{E8FbMXBUM2CCQ_1O7 zFe@h|56&rFbGyQ~a`TVTJJMt6Hg5+a5CikKfXU~(oT1`KkF(pn1y#5h4fg}kA^y)hS$g>5M42O_ONA}yRDBfvD9}p=%$6SF))}lhcE-=P(PdkAR%$~E>du;bQ}`fjqn&Ek=T9_T|~Qq%j-mYnLX-FiuNT=ak}?0 z(JmK>)62gLvRj`yfvP zexouk5ga2&5?D;-d&8NX+2A}_uEdsSVp}`4Wfv=dO#KgF-1}Hm;NEXUOHJJSt!Sx* zdv`=jt=#+FXsMlhzaK5l8{@C?3+is>R^e7}1OvpPT+qYMjnG@m z)<>QBpU@ZQ#O5{M}j5D9$WSdW#A>N$I!#fIZte*MV!=z-U!E(mJ7Y*v85(%y&{^;t@MCllB0`1DvJyn z!oTvsZ3D@} zUvRYn@YPnZ`z_93iT#9i)>Wp2G+7J59wAsJgu>he+8;k}7|d*ZZR>RlMin=A8u zvJ{6`5~@{B^yhh`xGe4}O?vt4`@OC5nCVo9$>cLO}jc#(P#luaTJ%Y#O@Ce=5wi2E;Pk1!xGQ@kNUaxQ%dP(R3s%lNV zl_&L*aD#`>$H))8!UeDN8flm~ZAtVyWN9Cc0a-{ut^$27kl1&}OBZO65vpR)QLk`> zm>$GC8=9>L7PF@(>=F9FO*){!N>J~zISU@{+Uynjy~3|$zW=iMdKShejv;1w+T3K` zMH3$)4c-|q4M71DJlMfJg3mRPZbe>cK<2N?iMPqvvh-?7xChN6C1a!+Uh809LG=n4 zasZNH4ol)Kht#_uHgT*jN+nTmkC#+BQwreS4BfunUg02Fmj(^{)jEM}&<$&AE?4+P z&Ao5Iy_$tH;Tl=kCkt_ya`t6YEtX@ZEObPZT{nJaqJKOYAD$vpTX^0BbVLfjkcHQz zeIEF~-!3Nx;L{#&%rsu%Z<0m_%lP}`Z(3NxlC${yh%9_4eCUy0^70cHWswu75W!BA z#!L>{67zM@dE%~x*mWFev7cg;#}2m-~;4MT&U2U>mtr zbC)cf7EU9Uz(Pmy09PdA0Dv_MX!?nFpaeFW&ox_)biyo=Y1$O5p9OBYOlSjJEE`7*lD}s_|<{aq&wpE^IW{qX$Z2(_# zL}e?NCxS?&I4Vz!VodZlyMh?=6ha6^;Jv4lNndQ6i=t^6UsYVj*W*=LP$}c5GYD36o_MtI1cRMn8kLS4>%AOI8iS`oz4q4>U3P-j96TOj6kkN z60o2enM8EKI^-hh)Mr%8r{~LL{(m9BAz(-7Nq+Qo>1ZAC?zM~$;H8!R1{Mm2%`w?9 zxQO1bweO{AMv_kPfgjNXV=`VAT$m(2Kyi6bBQ+u+DvB+BG?~8yLd&8V^p+Hd={2!Z zt=-%0wgld9p5N`Z&i$WTzSHft-?ub(Nw+)i!FA?ubh{njKM|SK z?VdRPX#XlaljB8-MLO`=weMKgN0Iv{L*ta#7jD4IHHH5|?Sn2j9o+yp!8XWd@EsZsf&{(NUDk>srnSo0fAKkN$_t_%YQ6mLuM{(0eN{N7njFIdbZP zj~|lHGn0=W3tb^|x23$c!t}v$pKwg6?eSfEtgNM5izBNf*0OYiTK{hw$AVMP1XDX@flIlUD?61BD5P0zP? z4qQOuaAXA8slEfsLt10kr3St8qTi)E7io!@f#b0V50sSm`(n0}c#!vTZ!2Gp_i*La z>X!F(PDM!XAc9gigLG(fG4lPm_(*~$K!*)Kz5X98aAB@4)H+Xx*QMax6kL&lp%mPb zg4@gn8lo|T)~Ra z_h;B)B9C}R!wp?=-)x4zXK`s>vj4bqylnNww*;bItg0d zB^tc}U_+pd@By^Pw-R%aA`oxgFOY=Gt^Kj|_Ze=~El7q6*93#qoc`V<{apsDhJGMx z-(R@Mi>QM@59%qJrW4Ws++{%Hn0yP%4`s3Zx=fZoJA>uV8q`75Eb)>IS_2%$Q}Y7=vh)J=?;vmu9rmSiGiYFQeZ%6dNr#Ts^?E$1-+Zs(uCV zYf#edRG&1W-C!~>(*pdeIU$2hm@TpifpPtLtyfeNf6{w7`p&z8vAN(!P}60p5f2d= zOvGG7%tf4)(`v{vu&gy`x0skEU}_brjYTQFiwrgsv(3(8wpke!StM@?=9=OkbTIY@ zlveO3XuwNCBb;u?v#`86l*&w&2N`*@?5&1;Gi>~A{sdQq63pCF6;%GFSuw$g&YQ&1EmOhwoS#nQQTL94n3a25B)RICZpVp zvKplcWfKZ4WX(ujjd^{S?`|}tg3Bk-8fvS z^9MtLmJ#)awV(_)H-N)%B}`vJ`AH(}<@B|BJ}}I+6?-{-JqKDFXpfJeZ9qQr3}`z? z&>jaZ{zvRYd_K+hbZfq9S{iCBt!WK~1OCz${BVP>;7$_F*V3{++`2H_TpMa=Y4$dU z0vj1SMJ+2D0-JVZlm%0jmcT-P4e44D@CPxBEj29( z`vV&*{DJzGKvPX~EoKqMGy-h$C%Ko^v8f}dmFoijnmQ2K`$S!~F&OeURW>yFGGQ^%gVhoXBX)_uVl5*sihXXoYe>X*himZ;L#9*DVo@g zm+&e-yNOi={p+ggH78Y(N)Cp7+g#tU7M87Qt_cKM?yjl{ti22G4D^xuB-2g)=DQjK zEzM2j03-^UoK%mNX8#6+g%FvHu_>&|zrkM{4*9D_HH?Ei>c?13Yr~AD;0!FK8MT4X zjCKB+))|d6XUv?Dn%8CR_<}YBw~XvEpR7`gpz~K~@@iD3p)R25d+92sptfkx$hD-v;PJF7ym=f=iGKneZDKeH;BS z%530|0QR7)1WxdYJp4`rd>fz*KkfT~+W}udsRmB)O%#%49mm)-6v7JtYf^X};NvNr z;15#xPQc?SoZxR$_z+;lmCyzJ3AUjSPlAu6aDs23ke>a3>ydX6-Uw(K4+xxK0SeIx zE=l2afX_{U9YEg+NZ)vM0w;*~1{!_?;8Emv`$4BX<00f2zX49Uh8w@EeFVG$@Fa>I z`NWGd`HiY-W~twCi;7x|NjC4O3+gP literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_MD2.pyd b/env/Lib/site-packages/Crypto/Hash/_MD2.pyd new file mode 100644 index 0000000000000000000000000000000000000000..8f75dcab187c371efef60a91cd36f34361db2c17 GIT binary patch literal 13824 zcmeHOe|%KMwVzG0`N?t@$U;Pr1s4;F2}_nLzfyO}25)d9DD>nd%vRcl)i`)YmK7cDOa^gRkjhBwG;S=@@XS>o4 zPHfY;4o)mx?G2b4{PnB+m38K-N}sP@GOzTQ{Xw7E>oYqSE;iTIS9|gdhIEUjdj7te zNW#xj-;Sk>w|?|?GV(7UOFP9tN57tSO2^C7-nMYua4L<{VW-kL&EVyazsXJ|BLDcC zsT#fTwwh1p`9tPubi6HfxGfT&t2Muz4!WENvBsM7*kO_eJcyGYc{SPt5`{4_8|wrj~} zY}{pVD03L)GkHg~GAr5{oWG(hh^bg@FATNf_T|!`-{3zLCMzv3eCU9yGvhy40S zV#Kt%6D3< zqAXcvit`>W2~!T>y^ZWlcDWtKu-dm?lJy6)al@jN7RiBMxRJlcXK|I+S$jZv{fFcIicM zG?iWYMH<(J-N3}V+i7zxpqgDtI%;_Umkp0k1s--KnPZ?%lnwXBKvcYhVJHYfq8Qgq zvZH&P0yd1A98`kA&Jsh_mYL{+%d%ROQR&$+7j&wlYUeBT+5d6^bToe4Zw+e8sZZ+?mTn`HVUR`ggwugt@%pd^MoS!kssD{30eJ5`h=hKr0*6GKjJ7pD_{2InXENQqg^AESJc8Gupj=rxNr z@U|kOXzLJnW5{@*{aR#f15Js7vjgU@q%L5kh~kCA$S$BPW`21mx|l;h4rM*?xx9XS62tTQK;N1=Ei~NgBJAi8d5w4buo16^>vg_;dv;i+s_Z}jN`!~;`m^@hCmH`h^Gs}?`EOkxYE4=8)6eG7~iHV27 zCX10_jjqVB{bD%NQatB~lqD}1=9<&tPnYMVCU&^ywEN)<_vZASvKWj6G|6MYdZ?NWHg-&rikA$ohM($O{vN>7W4sOTTe(yO-F;n zl=G;7^Yl+3u>sQ>i|9?TL1$?4pGmrtuemZY@~p-FDTcWD3i_IN{wUtX|MkTC!1-0G$W~=#+szI!q!({lddAh|?lcqd$Q>wPuIk0*lo*436N8{1|`2au~J}H}4~pEyses921$v$oLdG_#y@LL#S^gqAfB2T%brp33aB5b58kB zt2>qUr#cUO$D{5^a@~=FV<3a;4*Lsr_hJE4+XHUHXlu9i2v0iYjw0Jhr~H~@bH7jW z)H*hwz1ianI_0Bszpcj+Dby8ZbPG>9HlOqQJjLN8OHsyt=t+_QQTMQY# zZ-K!k_umv13fhx(Ov)*!NKRnV#DWGQWA!bUK0{1pUTYC|>u~szeMMnsM$l@X zaD=*b&anOwuze}O9stt@AJ3c-@zXi|8?U3jqIIWCQA<*+ZDG$lX@T^jj1JWOCaQGl zqt~j3yEAO~46Lx{y-XqWT~ZV_ya(VVxKkD$->a5g9as;Eae)BT>kf+@e=v&~heeFa zOu>VBQs?Z>;O(;h1xSawQ@iv#z$B6a{ZGL;&Uc!nnW1iTmwpSfm|_&%53UVThn?3R zcg{H;%;K6Xy>okmhLB+uO6}qC%Ry*nb0`>=5J`vj8OzDv@uLrnt$mi>4IvyI1Ydrl zljq@=d~|XXf8WpFTlo8Z{(hFff63o1{QUxdALH*<{%*s&Mc0WoIuF>(79$*v)LM39 zU$S_S$au4{IZ4ip)jTBMZs8T4i}}g8Ihk_Xb1ErAiFF4l;vqIQqqO0Ru`&l zSF2nQrj)!VUz7wVk444^t%sFia4&M%1u+sFQ)GMB6)E@ydLTAOg(iOjW4lSYym#t; zQU0ws=a5i%NIn5S@u`^D?UXx}I}uk><;0H?-w>yM8ZNsK`ot(SPeVYV+b;A2x1t53?G6BLlEN-WW*3;JKZqBwMr{P>!6T zU~V5deaDJr6rB7wkG+ilhQ%d7zbL;i%9q6E9--+a_)Em4xcdM@w_!~`B?KokygBZZ zT#>Jj8jgDcT!*=fY|)y5`#Je`J9p7=&~siF3STGhywHiT8Qxixf1|vVh~vB5E++Pg zQ$GrAvcoxFuR7<2f*85k>I`qPtIqjkX< zhy_N$=r9W-=9!VVB5y_Bj=WuVxWn_@h@@hHTf91ccMvswfv|%&kDG}2=ki?{8pe&C zgKiJ-vvW@=c4cJ2b5YZ4RJDg+$>QgM7)}(N`w=K`sd!a9M=>4!CNGg4_-VhtSU#c{ zpei8Wz@YNU z3!uPWM!)*uaJH7%X{gIjXPw2>;vm?pBm>#T3n_}#)AXL0d2eRaEIIh95(#w`3JN>&K$K< zb2Yj|qjzcaevLk*(Kd}9(5Rx(gt@AoERE)ARMcp-MmK0QW{M}Y{4R|i)9Cqvn1o6{ z(eQqa#!AO(ayc5Et0*UD!7B=ntN{w{^{DcmLrD zDd}v{j~l*Oeb0By_b)r-`D576r4IYI=NGKo@`(ISl~Q;5 zwcmU(hmD_g?oTfcuUUHd-G5r^zt|VNC4IWnz3ZV5CX7A#Sj6_z5ur@7OYCp;<91{@y*i zxve2$nh@^f`tFpJg7ehch-m$7yH`vdx>A(T`oVFQr_r5g6Tw#lEYcwM=VT&098X*igL^!(i<&1|NrCs<+H) z@L;S|l{+4XsdBx*%+$_0?L!Ui`;$D6ny2qj+|*aqw(6G_vl+`8z-(Yy7O+8cfZoIY zI1`SOZnZ92!eduI$B2Cw(tvy_e$3~s3;P8t#~Q`&?U5n zsePh?zw+o%Ho}z3Oi~&%Ri`l1jAUlYAJKb$oceeDmoXtzM=-v&NX>)b9~+d{;rcZ> zKfyf$e@0KAk;>8+B{xuAT$8gmAa@Sw1FTDWy1}Q9#J51S0P&0he+^QsobnR}mFtpp zENLG8sya5Ajh!K|vHsD$nGK^VNI&(v46<`@IX4#ii1NNHN^t9hWF}xN0>&cbXEf;2 zbS!Np%2ShAsz0eAu_D2$_1mg5Br(H`G-k+8cBfH$lkf+BO@9EB`8kps`ibfibSwdr zuFFhinbk<8DJ&ChWab+ibS6D)w}+W@Z7i8&xIL0tdVX?)+8^{~hMox{Ga6{@)PH)2 zUNu3_M(9Q}eQ%3e+kj6jOl5^1XENJ96EjNVS!(q-W*?6^Myj5Pa>xwM^YP{L z@wq3CPG+M&PG{-wrZGL{rQ%w~9zc3lt2a0wv-4$}&^WD7u03?X5?hTmPyMAEZb~`G z+mV(c8PR9=b?{3_)Tee(YNHNi>yhq5dJyR`q$iO`Qvx=264SHt5@&v1!}@CGUSI6< zO68Pib=CRhRrL+)IaBQkNd9___pU;Y!(K>pCP-gNc}Oemx7hRGTyPl2ql2)wfi(ha zAB62fjPo?Gy?9V%Jk^-ICZ+A`l-udf=`)|dHKSNf`JJ=I0)syq!OKHpm_ zdHnctM!(o2{l#K)!p$D3FzEMte3IK=U*&9(N9zuxWf*VOy#Dt%QLMG(XAvx!5s7gw{%gV55|eotjJK=u)#i`NGv zPhF|E&f^GR03PDlWS!F+s9cF07j}79`YZkG3oC1D1FSxgmc6&K*1N&8*yRmKBGj>F z^w#NF8CZ~W!>Z7y`YJlfYWyDTsfDb3 zZDk^9H@7a3i)EEt<(G0-dny}pYp3T<&(+3wMLE8{l`utvMdp%)s^EOs!R6+oHOHdS z*cUD_Ur2s10co09UH|Hbmjix%H4Cs@Sh|YUvmo;^iJ944r0K9BJptxN*)&$docK&L z5ByipKcWSUM#iQiQ9L*UvV!4o}&L~YUE+_RC0&jGE}_-fEcHJ<2m8s7qX zT;qv;pz-~n?lFij5CafxL?WF;AJ%xHZy`}XdqM9;+(UdVXwp@TodHiY2Z?Z^w`qJe z=r6|tgZ>s!`o?P}c%ry3;QaNV2N7R6fe(&dD5l9nys!~C(OZz706#cRq4;Gg;)*ul zMCTzL15Yu>^GNT4r?}&Dq%+_tj+%}{W1Io{5YorMac99^Kw1hO_ZB?1tpHEIpC}eJ zL!Kzbsl*ed7?XIS6c0WNp6F_%VW|7n^JTDrj{m;P!j_32Eyc1t)Et82i%=$_tfB(G z0QAad?oI3JYRzjr{s6+Ag6!#e)@-xKS5;r_^{pz%E-Rg%J2TrHkPsJD*5bG;$X@RW zWZ(1+L;74glj~O2t~W!0FHn#j^!sK9s#bgIDg(K7-YS26puR@Rt*Wn^T^Xp$TQfb| zjIhRAgV=e=;MPdhw3seruI%8KJ#SV2SHid>YlnG9&JOF2EjzaD$k}P%DehEu_V2v7 OlRfoiH_`v+@&6ajMwL4N literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_MD4.pyd b/env/Lib/site-packages/Crypto/Hash/_MD4.pyd new file mode 100644 index 0000000000000000000000000000000000000000..6ea2863323402d98c134916686f61eab3d68ce83 GIT binary patch literal 13824 zcmeHO4RjmTm45O_wrp8QPOM-;0t!|dQ;2b*6dV#4O)SAUQNY9wq|jhHww$Qgva6pE z2iiEuW~~gOX&1IY*=`fMr<^YBNhyaqDd$vU1#IVs*iO1Qr6u0NX(w?CCf&v)lxn{_ zGjilm()R3j&z^-j=gs|j_uY5jz3=DFXtesCos46Q=}}TC#(Dwiaw+$}aMLk%+12|l zV=v|&p4Y4M9-ddb$se+}2irCV8(Qs+4S_&g*uKGM4@LrZf57g(qsHFa*5oTSoAVr+ z>Rb0W$Fu&`_-4A8e)X+4vq8UnEcXNhob_7n37t}(`=&$Tn@{8_F#klJ0!>Q&)337= z9O$QCH)?S5O_k15=r#VvP1MK5BdYc=*7Qg=YkhuReY$RfU1K+8T{ef!2Inf3TRRib zs^Fa%wu)ylmaVWZK-LKcITKeFtB0uCC=EVJYn*b%_Mpukz$R*3&)E9}s~IanKT}sb zW8}MuMU2gvVty2{E@rG;F;uH7LqD1HMNLY>zAa(!o-4x5ib7-8#sN&Z?69gd*whei z0BJ^nPIeiJ<|AOr<-+tS)xYeyluB}81fT#T)_Kz*{YL(0|MY`MBiafGnU{N4?W zor%m4B+1%5DWy_pB3WmWf;4D7`AzH;kQbt-OS(tGOQWZB;S##dik{bn^LlA|3DNx} zN%^aY`nW;91Hkf9Lg7`3Lj+G+AhfzWVR=GQ$RacnVqbI^g;>~O6=DxL?EGEmz%Gbq zyyAP#LHQjh=}K6-2GCHB7#Q0z+T(V(TtCr8D^`CFOiTAGxc3_>K#&P?0zzGh@E!7z zTu|pRqvUyD331zfb^_)*0Ay=EW8I0!weosZ+>ZKEwf<_VKV>4NGUg6VA=l#6j)U?@ z4zR9N2_fdl?@ijr@yrOyE)r|3};gmrKzbZZ& z&vGUN@r#Z!K@2;}1@R$=0AJYo`;vR1gvp0+-${NZzuXOHl(}}C6AcoUMR#JGS#9h> zld`&GY9MBK4i)mf7^dPGd$i(4GCFdT+=h|Kc{F|-{^t{1?C z*TmQ1GWk0nrcxKGzL{BN#_Ut8qA6YEMkrm3X3i06_8wd=bf3knl3)E(Dus@pMIY!m zd|6s1Wh(=#I~6uWQ(VL~)njQ!kKt?4(yz9R!3kcmUpy+%1euxyh=1M5ufcS* zch#s1;J9-zWqT02ydWM-*}eoMWeY3hz`k5PqjhD!S4hp@M7)?6YbbiUeDCG(nQTM6 zG5DOlG!+kDi#hLH2=@0u38EX5E@qgGhzW7RqY11&!}lS8(R0L(8N%4BNzw&;3u^ie zw}V5y_8SCX6v}?X8V&2kYGA_s30hn$sb#-jM?GKQi{_`V13%`~+tW;)Aez6DW}@La z3`0f|lES#k$&ap@yU2kqK|G$a)m(|@;*V+nccwg81Myl%J$#t5@l=DsE)}9pj&hjb zb!-wu)Oy7Ma!VP)N6dC2N}PkVtn4Y<3^YsiM^jl3{fL5Bci+9~*xA<#;%PMmhPOWp z`Xi`~) zXvL4wH;ZJaY$7_kuoEAGUvqIxW0q`11@;v98T-&lhUG294IctePC@9Ce+yHDsL>(+ zMn$sFS4mRY^X2bhD~6-z`2r!(5icY`CZ!A)aQd)iZlSPPvsVMmd+K+n-;hBbOp-j7WXJIk!- z2f9dsvULdJVL|Lq+0I-+YRmQqaQ4+So+52fZ(Yg0C z@ZiW05WY~tr-Ge`+wPtZAdkk%$<;~az<*z_Y^FX28 zt`fQMmk;RpsU1*@twCKw=)vAuu>?(-cs#N(dQSJim*ig|2#91!_Ztxhth|a8UyimI zI$8Z0qH`2FTnaBcn=NgIFOvhJ1F7Fzwi$j$n?=lU7Jx{CAX5OZ8U}IKDISLjd!Ynp zFmB2`!0{;cuD*w{=B0-1m}-$NLZkUbAUJ%#On^JEeJSPSk-AzTi>d34NRIFiwzIUCDXvsyIxEJ( zVLOh8^<5*8eEBRI;5>a7AU$B3V{yY=_#hdb{|AyzDsyhF5P#m``T#@Rewq>#@wmEE z-;cKFQ7;}meC=M~OnwCm4`#<#Pw>4%ba>wbHjydY?X!T1|13jHh(9rkN*oQ?3{$q{ zR0BQB)0<#?OsQ3bE^LHY>v5SXXe%V0gxKkv;S|-~15}C4vRa6BQ#WLY!sL6Oq+XS? zqPvIeOwySvsx5Gq*W-k*wqf=H*3y(s*(}hBM1W2i=%d3V?wPPW4u`lM0(JUr$Ww1F z>{~!F`|{y=V_e|mpDD-qGmatnO4z=iT(%w)dMYQf3-P%*wEe~_Xx|;5fozK$fGtp< zp+u8LVcCh`Np+>t`c&hfta!A21KT0m(!QM8HvePV_F)23-xszGt25yowCr|^lE?Xm zTRg~bp9q9~E&TTPZu12qZgE7Ma1QeEN}b0vVA;)YKjRPhs$zPF$8-RC^x`>q-WaQO zARQer_8WEq;YfkuUx4vvllE|Vbim$k=mCikqv8=@ zy^tDm7mvD^jYf(TO^&hUV-a)Iyb-mo*xdC16xk9=hQr9Dqx-FOwBIR59}s6^si7J| zI68|DxewT88M7j4U_fyLKdFB2?<-SL`zo6WYDfgF@doS*LbV>A)Jmk6? z>v<;(N9xpj4S*B*a+jl1;hYz`2Q6JQ5G-s$mabeN;+^aG-YuxLSat5}`h?gsjy(!h zRc5i?X|-GnuUV|T-Eu9~V6$NP>QRfes+!m?uw8r0A$;8d%eA$RvgrA8;@4yA{0%Ik z7VU1!3W*k!T}~<%BzxxWC_(pb=yvJ|@7(H`8i@SU~I;FS^qB^Wt4k$^397^|9OX zor!USnxlx1Iy%Wv=OOvw_h@h2Prg(lsfNr|vz8tQ>v2E};wfZ;ic8xmdm)!@1QP97 z=L*i|d%r-6;=(qJ%U?5MxjeW|;NBIk zo20y=Ec)O&7v&ZumhPvqR@`EZo);m$$~ZY(yehxHavjt$v9eYS6}iPmyITyEi1(Mf z#rs`u@j==e?Yt=Q#eJp$UhLyc+-s7=6-(mqjQ-un~-M2V{@kxDQ@9KlwV$DL!v{pW;9-Cb$ozBWBZ);(oq(&?IrMa(&_v@pX5+^Qn%9?}_VQ z88|fa7%$0l5e)c`Hx1EH`b-kfy~=ZvJKjTb2HdiwF#|81yplqLm=4GFpXIqecYLQN zkj132TwXeLxnh~>k_1n}=$CxN^>h91_>&ppW?mYf3CAezOY)|JkcCr7{2*63hvyQ1 z*-ozV`b*o1K&^=DXI#=y#Y#k9F_f$H@?7$-w-jdb-2T7DQp~_hc@$>;Z7$_1t6^!p zH`ArkyEB#G7x$Zvz@6gjCP_TPRXP-BD$bt@`Z$6<%A1bz#Rs*ZV=*WuDw9JAI;O^z z2)r^ohD^g;jazkQdRk~vKZ&Sg#Ugsx1=L6mV76gjpqOV)5yboyVvZ@F;GGih?4x;> zjPKkvemnrnjO&kVmQKx8RyZnA{fasbP@gi8kR)HuW2TWsB5e|i)YNR4C%>?%*nYFW(u2A;ws%!h6E@y zDP;qYF%@8i5*NtOgi;5iGePJa-gDuc5k#Ci(wIp)WmBetuEwaSq=SU(Q~QK;3Mwtq z$pX`WZk67xtIVgfuzSwARLYAtka!+f5CZ!-N-K_RB{ zDICY)8*vne7a^f>CM*S6$XBeEf|XV!iz!%vYzC=K!AhhzyvtIs0%;8LnSzzos={f> z8MM;#aLRUB9-cg?XS{iseoeg_L&tbS$J5&aJZA2z$C(;mxhrLR18=^B7(EI^OucwQ z!IJ<9^fO(tXJfxhAN#k%@AkWlfBNOVC;MIbn`;*%k5@&eccf92<7HkV^1jSV%!*Cp zq-h$*YLUn5fdf_Iuxy4QX$LA}a@BZ5WVXNHqi?i|Jd+n)#?_70`pHo* zH!L&`c#DUt8j~xBYhdDAK@+8UI_Kc_ay$GgN;K*`Oj(&ZYwOM_q1;3FFRt_FKGcu0e?2D6r_dWtkyszE`6O&Z*) z!Sqm`(dZX7cua$5m!~CEcwXaAXfRznTazo%;LRGW)nJwezoEhJYw$7+Zq{I0U&)oM z6OU;u4JQ_;w6$b9Ow0dI!A_T|-_ENrtryFA+SQq^_;0}UZ{4&hxv)icI#KYrL{~Wt z{_k)Zweel7+PX*^w+1h+SKf4dg!#YEDAMdJ(O{Vd>u;SNufNr@+bV0l^n3dlezsA5 zgLkwBNXOU$#%=^m^BiSmY!)7|zcIzbON27@H~msS{f6%N9JPZ#8N^s=Sy^NAM#K{| zP6Jk2xdyMUD|wP}P3PT1JYgEIN|CFc&Qs*pP2*K7a`n@Birl7Yyjn%B9Xt$Gv!zy% z>jaNs4yL~=pt(qMQl%YRw{C6PfMKu%MyHG;7;bb_@ru5IX}o(BeM6bNT2*dz8c&rQ z1J6$VY|%bcz>AeVWSdW=={t;SqE+9jUm7fBtmpy{mVyS4zN^sWlrIfvPaP%QYFi2; z2Bm(Ekp3>B9kdm%2TC`DLR6jp&LjOj1glp*0f8Uz-Gc+O4OcyQPXcN@)h4RA#0Aq> zfz8Nl;ap~G%3-!8+03@MVC?K1^>^);5s>Q&m@>CW&11ly%Aj?4{g}NttGWO`Bj+tK zvb;6f?bH^piP=%eok3Yh6Asq^@Vps}-Kx;6-gnoj$EFH^2&~=4IR@j76o5Kpx zN8w^?yUu2S@Afg9u9sz#jN*@MmbW;&U9|_cEHN<43{yM#g6yY@;1ySLELS&!Wuayr z)^#U%^Nd-{xMUV{;RoeyC`~tk2Yx24jOPV?Jy)GoHg8TIoAaMWcKO@6Y{s!XX5MdN z#_&w`9sF4RKFTv%`%F6R-zi_v_{z}9MHd|~#A+&6?WH%oR7ya*P}ZT?VYllu@D&Q# zxCfB>YDHZK%EKs+p*)4M8-+Aw;Z?7m8CczF_ma}~jwV*!Q5Eoq>xgEpO-t$;+uA!6 zq{$Zw2ir8ps%tGKXl-0v0oC;+tzk}r2W=NpfT)k3wQ$I zU>m z_IcLoYYq8;-Br0}?d`Qyt38XBl&Lx|XN|C_t&vW$=AaL2>JC=-Ktm|x3qt-z27Uf8 z+|tT!x`5aD*e$Fs>5_*+v01Cgnf0>DrTWSl4Go)-M_Fkv=Gy3 zVPi17aFeg0ePPR@g^LzyW4x#yU!R5%qD+zHQZ7b^ju;GM|Dx`5L+(O*Y-5%}YP11O`w2|iWG z*nZ&80Ge%h9R=J9_%g~EaDpdLs4x1>eE|yLC4dbY-URrhh7Eh395_L|FHrF70S_a;bc3In%TbQkfqZET_yoU+@+k1ke2wxrJMyaMz$bVU$|2yC zbL~R;C2-2SOvuIF22QzT1ck=91@I`!Y4GvRf}KHG2ORG$l-#}^I6Xg7E^dcBLCVt! zCrCLq;RGokejYf%=TP#|_S5T=-~pZTe2b;fIeoO0%PXn&NBH<6lnJP#XVb3(UVJUT zWlL*|eX}nZLb_M6U{Pt=0=qBJ*w*9^Y^+$Yw)WPAETisI57?u(7T6=7vyf>E=ZX>`3eU z&3JaWE3-FJbwgSgh>H#Uq|;^Ueh88D*}u?}RoM3Fc!s335iLuTMaCOSeuib4{S38d!FIDZtp zR+CO*8BV8#YFo7S59oXCZEY`YeKw$1=MR!UfJ_3_AXai~uMUI?qL+YwI`_BsITI56 z`rh}x@4kNTz4m9X_R|HY4nu?g4h zoxonmJv6mT<2p38YGb3{?De&7@YOb(>uOtCS_9_w9pjlob}{ zSya(W_BBMZ{+s@tOw0fM_ut7z|K+2(Cm7(Bujii7@bTPtESz3=BA3I06L}ow^YN$O zWG4jlpMF!X!jX5Beje{HZ>-x$Ixb(4+sRn{ z3P2;r+puf}&tfc_lP*El1_C(~R~D-Qt1>AAzSvhN?s<$QF~>X~_R!=S#x4+aGqxIf zMlUa8%VEi(}sxn`F zZJ?GhZy`vK$tIwYL1QjE!lz7OV}s-%5a0~P=Aeza?2JXryb2>pmvhy z^ZV+6bDz`zA$joG*vroOJWZ`&tDRjEDb09E3bV?H_rJpN9pJ)PkT@+qY zXkTd}^(c0ISeLpB!0=*}(<VF(ygj`z(4X9IzOr@FNzpxC#o) zl6=l3e`xDXeE=aHQA0-$CZ>Wq#^sDaS8)UDL&&r=* zi#i8_hWVzBGR7Xuu|-dQ02PtN>CgquLrzK7O~L9VxH+%eVX@Ng3>T$MKf$3Y4~9WO zAawj1RCrB(6D~{r`15r7($M$D4izH$l%Y^s6TAgN=U|#`fTn#2mrI@J5ml-Ee@>^N z_*v+H;=qIqPugS!sWTnWh0;RMKB}>7jK;wAnAxbz4CY2wYp?^>;i3$0+VrPuGd2uo z+m6$yoS?1Tgx#=<>#El!GW*8knKsW{AzzT+lH?DOBtc#3`*4Cw?v{^86dvN z`7M}^`Suk`0vxmTrcM8ZQ(lq}q)lG|k~Rf+w`XszmQh}r@0HTiHxe!ELJoya&)ahq ze3mj{zp?n7Jrs(Eu1Cy2F$?6Mp(n`>1YKD7Fm_Ce5FUym`*eBKmvwan!n&h3Dx7xU zgP7W_`z0u(wOjX7V6>Ipx?ihU7qWp#_eUwYenvC9wHnfVi7ppDJrne>OKZ*$HIiKT z)eI36FJc)f6d`G>YaaR0zG4+Q&@Rcx(xw&HV7mMpI{$5HCvqTCWvPJ=(>F!WE>ycm4=2fq|H?w!nSpIK_VECLY`u1sVq*LuH;5Ji>0vBm^Lx=T~Z|(95J}RmP|mTNYY3W-8-m@onM=P ziKP@&Mk%!DH_*+3*=dsuMVC(EgYau3Zqta8hcSRNB~tih^iMLRw-nK>1)dm%ke7NN zrbr>ZCH0j59TOm z{%+0i2;~FGy1(silda#lE2UFR><{>kTnDEV)AQFj8l*xCCM<(k2f*rGd(e(FIS zgQ>5AP4z%g%Bys8;V%!+;Zs{77Dt1UL(pJuTl6HRGWl3=L+GOBp|7NVk4?Z%mUbQw zT43d)B={=K#nQ>@$Elyg`++ie+16la)jdEC2=$~tGCZcM1OsAm6##V!qFW8HUw1uz zHQeJcVGo4h9*iFz2e>`Px+>Q&)-Ye!j!+A3lIjXy0D_zE0|bP=_W4wk2dlgCae_3= z7mI^cu94(<+>N ziYpbKwnfW8VOx&{v>gM%g4B6T!2R?LKxV-dV-Z~tY>CkHmBgLkF}G5RJa4goiY2z6 zrUFGirkvFGV=gpm`3KVu`5Y8?=>J(H*LD}N?`I2QecxJ-x{~WGdySvJX7tG50u*L(HXfdFy~K?YKieAhw@v33!^s_79hOT7nLFKt5~h6(i*uXMT@im)L%;vBgsn)>@qTeUPJ- zFT(TsaFwMX=$1l-Pgvlv>1S_G8x}>gH7roNs3tp$1*R_Y5}2&peepBYROW{)(jE4LM5`~%qv?GBl$7Hvki zVeh-S{yb-X40HEjO1CauqTJja;lj^=8uq?lWC(31LE*ypL3jzrDGOgaq>NqT-wci^ zem}%(l2YssW+^`@VO3@-GtAQxw&^}`|op=1! z2gDYguXBS5w+%eht&~4>$$xOkC#B9_Lx-L6t8>86 zaVrpd4(cf52!^K9knHp#ii3H)NIH2HNTR`rHc>_oE@ z%?-?DF%q!`sMZ4I+c*u3&Ng6>&>$KfO``i_-o?Di-G+%(7B5Iq2eRDDC%8dRilGa3 zpIPiW!)-Cy$U~9B&!*Brv1$_H3jyQZw#SZPQI3i;dnEY{X~AJb`C<7Ooe@?^=yAx2 zR3R3_c_rZ^hmeqFeyS*f_+S|^no|!UM(I|)iR#4mIKx53c_=V9N$sBy!FLM^7vhmR z)4?z|&LVc@lRG1(J7E=!yNVmPXQC>NYh4^Ecnu~pN$6L^E$BCV?Hh{G`yEL=N|z|!_E}IjbZ0$hwMz!e)EmzYnw6Z_`PPnwh%zt^n311 zXL9uU|1bH!a+~V@Z%!chL;NN0JNe!QZ@~SeE%hmn;+2DAT@UW?N@R!Pc-yd6lF^0V zw{zR3jr#o!dRmMW{s;hdJ!~UJ(l*ZPn7g3g_ZXsNzeBf8b$Dfx`@1qp?w&g8Zf>MOkq;5}&a#MD;k(-?zbd6Y5w=|Q z^@wk-8x$yoa!}l)od>!BB z=dm20*(b@rlNP*gD1TkPNQ>a7L3gSRP6DSSoDtrXW{!kcxI>@VgHvhU=SX>b`D!|B zMI7br|HM&8*zfjDg^J*Fw5Oy~c?)(Q%0szHc@?%U!M#6xf|hMp<9&22vvz{?{FFAm zlg}fBKdFu#9rG@Q;xm`x6Yje=v9|`+Mef*{HvJq^r7)F_!Tc_mipr4~Y`xt~Q{vnZ zmQcA$tGn&m@XrsO>9*_t{Px}*-S&b9tLC5pRCvc~RBOa0+LVCBe2-Ech*WiA!0mh_12(N(|gPUZKAa$Gnp^vB{kqA*YrCl;}!S8R_R_nIvj7M2w`gqRqM z7C3~sD1$#b(q9r6OZvq2_y@25Ym7uh!B*@Ldc|0;!722JB{5O%1zR5t#zf(uL+BS{ zeR)m*%vF^1&|n-h&5p>^|6+LLKODj_F|zH3{a;@y#u7PB;VrQQDRY1u1YY5FGP6Jw zj>vImr2UE|)}MmG8QH4&&XUg+k$uj{6N5Y28uM}I*IERrRE~4wdkkcJNfh>zv;(AY z#36Ijh1V6sVca;w6$w_@&yA16pz+2F2gJzJ(jP!Gx4>%)R)*5em1L`7QTT$f5B9BL&Xn$|# zAYUPZg)HX56N~8(tx(5=yp%gLwk0iu+`Jr>P{XUJFtGTaf_{@$T~n@T3Ru;moU zvG@d9h-xf!zman*JP&?EZ{+E(7yjiR&L2}^{jayHO;;`g^;&h0I1*0ay zfGUSXtcmnf)mh>XDO5*uFE*>&}Dq7?3?l{e{{3^|HBr77 z2U8N6>^Swxb5NH{>LT0z`S3f-L>sody=K`<=V2mO3hzjAfdz+!_vKe#LAGisTWtwJ zG$SqJ`EZAfvk$7_qWzD4zm&%IE0!6_GFuPHNkH8vGj`(cSY#1}*I*lkJ~9Hz$v#1H zC{ExWh(if^K^o3N5j>zSjXuuoOv6cVbMxUjfXjBM9kyG1J`Ve|at z(PLO3i58N!K1k$2g;xXezdv>SJ@`jUX9oGcTR?;qSz<9PiMz`wuW@0-8QFH>kzLn` zLV^Maj>B?daV(7tW8-U%B z6UM;0yB_^%EBO$5N0Q_)ZcIWork6a%tq_(XCXRvLAa~fp~20e7s{aszQ(&48_r<|9^voOM?%|=JU&cSkR%EymzwC+bHK7X){`$R z)e14qJq6*VT9hG1v05qr`CE`n6^R^|G$JZmarHIP&`{pEX9MD*cNk+YNzRhOc(;+c zhxb*;{V7z{Oexz!^w$Hb!<;hS0akwF~SQeBU$q`#srafe)Fl;mUE8+w|yQS>Z67yV{#01`0uZB?YK=*^Slc8tMDxqo>yVf0)?+sg$q@+FiVv; zS%r_P^!HSlkym;RYeO`#3>+#``s}95U}nO95gf8B^4mmJk17}8J?4F)Hg^vo2zlxz8q?R{&rjKN zvfLB#8ByL`Jf9dSd6N*gYX1u0UqZ`_Q~$a#;~K4oX&2+G zlF8X@@>~O(?7MQP$a_T%$tSy4f_K3cjNJiw1i3BCv)md(HZx!?2CT&}C*P~d)v(<4 z7}sl=-lz2nHCa~GZmXtH%L?b_vcfsp?p)G04IezI^8MJ%&(Yc-pP(j7!?LjHnj$?b zszWx|bF#gPJ+Ni2ju|H9d&!SvKV1Z`o(373 z=}bSAozzCN+!Av8Ea+3sdS?E3Dk~05W5)Utrk`tJ_G=k?8Lj?SjDu%v|LUu=SdQij zM)TV+rdglO)(2q2+#Ir_hTBn<3p@1Wmzt?$;~#UF4m?r(EsQ?8J;>!^_dsV~pd-)5 z@MZRA)R$8V*p&KwHl-?$O@U9ROe#K$9ZOD%!aw9YD~!TLH!Om$%v076P4u}5+l2m; zXuHvdu(&Afv0^`l0BN11n_+F;7*C?TjrKm;S+pGF*(5ZQn1w$fYVp^?Y*tpUm8`@b7ilSZ^@) zJMHp7Q+cg75cGM=rM}5i@7z@9@e=!z#-@PBhc6X%D?EX}IBd>Z>IswweLhc1!0l_T z^Wcd7p{~l~Yi?{ogk(tU8$t!%W%Kb?-x5zP+3NOr{Fug;Ywr$ve4E`KUqh>}xwfSa zs|aElJ~s7>(ktrO^fA<`jXqCpJ&5chqOREN4|tlZ8k;?$9}Dmh#-?i=jsDv8=y7IO z<9c7MZ*zHVQhfy8*HhQn&{$WkI1k>M4!S>3Ume)&^;9>uG_+Q$8CKmyxx@aEHG7)<9$>4= zm#@6Ds-n_4d#+WHc@?XJO|5lw=WOtKJdEANsvknud3@l%g+X3p0B&h!w_d`lJ?wT? z?e}b~Zr~eMP39FdWOz$M;|6%Sx~10VYkjD?*02&4`$IRJH_pL0no6M@o7ZgZz zx;8RDn}waPV?pL)P0Yhun3*j^Tg+V8<8ro)RkIbWf~{dr@cY44hli+2=3qDDkVio9 z>%U5lD(>&#f71g-BV)7Cs2-jR*n##4DprC|qn`UC=+6L}QHzy=PVjEjVjaNge`21 zRD3t!po$axlZu}ObmQ~qBGfnp+t5fR!6#Lm;P27M&LP12ixs>HPUCf?M5}-?y0bc<^mo?dj>ewKXJ6@ zfhPgGPy@XTd~6*?b=wZKw?QZPBeai!6MO-!0QC{oQvGOCfKxr?n2!H}0qzDoGK;Yr zfm7Y+K_j1R1Wcew#1HrfwAGM_6y?tXHNfeafNFO$_z6;7PdGuU$q6S&_4D(<3GP5E zAUS`({vY&!hCkmHV=KoSPc=6$eZN4*e*`cIV>LB63ji;_7T&(8xyk&X$LB}+wP?od zGV2Vpr=_m7zOiM)q8Tfzmdu(r!|V^BPOELg?RU|P%^v@Z+yAjJZz0tP&Fh;sn<1dZ zzi39#*K(V`ZlkBU)<3JcvCh}(Z*2(7s%ve&t=8XM_TcOpW|Uct4XE~4jnzh?Zpz3) z=W+x8*85iFf8~rji+7rLmhQCf+_H1q&eC1>UDB@9uCu!?>|)RUt-I*|=k@<5VI$FY literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_RIPEMD160.pyd b/env/Lib/site-packages/Crypto/Hash/_RIPEMD160.pyd new file mode 100644 index 0000000000000000000000000000000000000000..85b228b52c1b3f4e303c3ac3668a4d06f53e0644 GIT binary patch literal 13824 zcmeHO3wTuJnf@m;XD*qUoBrQeVHG+ss6mX*F*9;vCmJCr>n(}N1X6SBToh2& zu#+f<->I?1^|EdY>UQ1MZmD%`ylkBiBtfA?*qzVHA3@BeQ9?K@{qDzEEiM#h*4C6i&S2aqnmdjI3M6KF0t`?&)4v~BnF z9z$sN^y*bjQFnW!ZDpjc#a&<5+S(R#-yC*F;;rtcR(Ig4W$u=?hVVS6GvA|&zI^*# z-^}~h^xUTw zK!ET(#>^^d0Rihh#g$btb|&8MEfrPwFx+7G50#ZN;g$k;&w{?gudSa>J^5k%qch9b=sZ zAVH?edy*l3m^V*jV=@H@1O$Vz^H6dwKVzwR?HZ%%V-*@n=e!&ypnj4UiAL&yt1+ns zLh=@%{74BM)B-UH6Ze0=LcV9>{xuN z+}rDF{HQmRITp`5)+hJwbd5ZKodW!F;%G_NU~Ez1h#^)&w`qw_46*zknr3ohdr4pV zA29l(Li#EI@tKrLtKfSGj)-8Y>`ICE_o~%mbdr@DJXTqWd0et`hsP~l0|joG9}Dph zd^^+cK}csx>>NPHbX;I;&(P2O&41si4Vz%QWo>bd?0-`nxM2e#CJSL9V+BU`VzRvjRJ1s~&|V2HW~$0I3Ij&KJ6f3K%@qCWtgH>AX5X9C%KK<2 zS7S8%(oHot^<~FRozFCQ?h1Z_zajGvut?%U`kxVm5bx*vWSSu3lK|siGjcCXM}Pk^ zZ2=tg?aUP2j!jnPuVjk84kS|)Q>z2d*-VVqmHT=*Giw#mln~ZX;%Mo!w2`NaFy3(9 z@+?io-E%PKjToTx_feC10FzD;j({L12~VW3`h>Mq=fXY$iV(n_O`Lw6fLgPsb{~*K$<7$-1+diperGhJ4-ak+_^4GM8^@hkwy}d zfnTK*NB^>GD1d&MAIuajI|JSMPv{)*WrA1($!bq6VwfqCs0Ge0k`oP{QkW3(tdcoe zLwtb3;>GY0v7IUDKD|sUCErn@jE; zcFR8CzMxC?^~%q}Wn7Rx6BXZtDp?HlBK&n^0%nRVf3yeHE2xT*zp)P{me8bf$%(SZ zpqs_bnIaBF6Fc!P#PuNdbIg*pXuzJ5bnZlbgkgEhNntnezHtb7>33m@oUnS*zt)gE z=ql+`_x$v~VJk+Ur-x;3nng)~%w_ASATJ(>75Y+RviM`?CJ2nbw?NzTw;D!2qV+&> zZ_)Z%($8Q&8a*w&40UXq|D=&k3sR$&^u4HTHK+=y4>tcY%rk8M_iOU9vhD<5XDUve zrluoW#gc9UGI}(pfBITb(o1pEiPWF=R9R`JvnJ*#))(l0Kd0J~?pl^e4DQ)z(VjE1a1DhU!!gRY< zu_0bzprc1NLM*lhZ4E(#yQ9p6u8bdyuS|Swxb5rd!x#jNWJ%YXaSyC~4=X%cv)5oJr#@h#)|nBj?m)XNIt*JYU~XyPDQX8ZYvpm%Qqj=xoyfg#msnfK$PmA})gXw;R);EZ_*gM~hTjaZocFrNm&eQh+vJTT6OA5aNTVG<2#72mtdf;3QbUHQM5*UN(yE>n zT^q>GK00%`)&pmGEl&8VBFtXEYMQc{A`vo?2+%15d32a0gJa?*1SH^*N$GdMPul$0 zw}4{yIS_ekQZ}YvRFCmTJbMr;dHr?@*^QXc<2jLAPM&o)9el~M4uH|A$hIf|*aH3J zN}|syFMcaBqODX~pV~O6D;|BX*ZNkU)YtR}`fkGnCfyU;hRc`o?Gzsk@ZO;B%>aKz zT0hnr3pY#aKUflOjR*K3AM@>$lI4b=eL#FvT7RsmHC&;XJVE=*kYnN>Bl1?I+T(~< z$qDDTJqXyWu}d>zS;}l+v68Y{a~_LLFKZ`oy0G=*&yZ8OZ}Z5{8gTehd}Z;m-e_Tf z3+*@&q{M(Bpa@~0qZZI^0Mdg!o&}Qfr;Eq#IEQS7_5i1>#gx@8KKHgQnjf_HqVF7Z z=@&9HwZlE2I6ng_KKE{+n7EGwDb9C6cplp+Q_dRJ+Rlis0mqrqD8w7~$i2UG%l17o zymC|W;GWTUQD6L8E<6S9#DKM5*aE~z0)+1ZlaBYfW2K1!cfYU!RSYp2?gG{Wu04U7 zLxIIZ@nThyXZYgbxHI8giB`XI){Ou(vL%!Z$B;=Uw!3O*zf+GsAimThp$bemI*2@e zWoBK5C;H5+PW675df%Yl2i5yK>iv7_{R#E{ef9pNdVfZ}_u#(O&<7oTyF&cXGOXy$ zFdfO$Gcy2u$e24l-74h`xofA``845OgJS0ofQT-G=&Epua}XcVCAq~pNKruX62%XS zpD0zJR1u~27)Z55YCk54Q_6=VUOu!VVDfhml1U{_l-@d(){tB3FCX$lL7HNqWqCxV z#kRV?)6;<_KT7%`G}GWHM%+D0xe1^|DYpWgr<6MYyh=G{_o$%d5&Ry_8Q-q-FFv6C zYx`!1QBRlgu#O|}H|B@0vBmJymlCJssm@Nh!ln8;gO>~YWuQ-I4*Spp*4tfE0%ps#K9fn$B-eaf!+ke)If7WaLRk>iLw;Wo3R=^;s)S& zd99rMn)^jRF&GEVJyF3A@b~4#A4W<7{H1g|mW<){+N9%iv@_Z8wc?QRL+r8X2CAO> zVf@9>X~+uFGVw*h_hFV#=Fmp75&CFm`e|NfpX*eD9?c2rgjW;T#2%wQ3onW62uO1DWn7saj8lP4s)qm z3BJRn6-qG8r5lvsQEpIz9X#Hl1UGQW6yU>3@Np%$iA&Z1A5(%)1UML7{Dczhl$FW{ z%f)9p7kA*Ljl>6m-?MlOcxCc>gXDWn^6d}sK-boQaa+K+gMTFLO9hPm68|K~Ukmc1 zQt!6>ARm?!qlS1-fUoO(-h~h-@eb@`JiZA>UqET;4)B%@0cH8-P_pQ}w=x;h=h{4< z^z9{mE|;p$Z}@0XvL7IAz5~W>Qu6*^^GBp}i;kpDcuk4wG1`TSS~ z>3llCmv5Gn=fCy~1XyX=5aQVCTe|sM0cBk$Y3J)Yl;xWy-k(U&=Vdo!3*6PxIrP%JL*h;=Z3ho12 zMh~`P6Nhu#jQdVh zy>Q08|I7FVc>Iel-aW3AsXqC~$8QHepbg(%efVA-AHI=WTh{*Z;rj_Z#9W<2(kDhy zS+@aeQyV}(>Fe~S)bSf6-Cl3b_`Rl$-+p!cuKax2zyB}ehsh}=Z+8WFICpwt48Qkg zCZjCx7SC4ooiVJfMbdXhmFg?4{R6%O*`*&eZV&QzvG#)}O-zZ8$%*%|_GP~ALGjuY z4?e*wAIJ2SV54!Tvcx1Ack?975x62j$ZK+1ONxD(G zfzr<}T=e{wDZ0syOgTl*c_;IXzste-gf~hOJ>BB5=($zUlf2@gOwqaMDl103LPYjT z9Q42q4$&o~9LN@?FfQ~1Q6wD6T2rnOl!N46GS8D0`qSurCV+6n`qFOZyU=pRGIr zxl4z?*5M~QoOZF+zEFp<4sX!mY8`${hfnD6XF43x;qP>qUp6@`=jru;4jXj1_L9kb z8+Cez4zsOK>h+)La7c%r=bRAu4jbF_NuB$(y@b77TV zlRu`ztUN4-NtaMyVYWQG$!Qj?hJ2&LY&Tk%Q^+q6tqzmTkS7WTqou%Xvs)c``OM_B znoS0yU1WkI&uO#d7dXUxlR>ar%z4fNX0&0mVT{raI-TkGqFp|#o1oQZvpYpmuvo12 zf&z!x<`gXstIck)I4ow-X}4KKyVYzia5#kmr_&;c0`Zw)fy3qyM5|RGSYWm~%>vl% zhMB>Y$`5wG|NBGFJ$P&N0@v7o5BBBiV}?AVNibWiwtTz8Ss+?WPD8#~G&-0~$Sbg0 zjrj#88(hz`FtgJxI1Dy>K7;E{qeUzb%y}jz*er0pz+kl6VIZY+l;}`CKquxTU8vX# zL`(A;4I> zRiKCA@!ybX)AZw|2shVkxODs6 zIxNwl_j1ktKT*t|&9t#DY`_?#ZwfZ|a z{kA@@iGIuW(5N%SpOp}g*IVDX5-g}&sb0Pmua3(l;wha>!`o2In@O|^l~0~bQ~9bU z(W+Fw6_aTyU+pAXwaT|@GEL=c2d&!(`}E)UY3|Y-*6N0 zY4%N`U8mWXORLuSR!pL4e6^stNY85RJAmwW5c9%XoxX>tf?hqgNE#V?5VYb68WLB$ zhXE~T4DiDz5*M6ZyGXV+E;NjAK>cIc?-Tr}Td{iQ-5iZlbM||S>~{&QR{azMalrQn z9vVf|L8F(TI!(2S=zr`opm9o(l@-Notf;}liWZt#(Sj+%$IsM$SAGF=@7yVPZH~D` z*EfMblv6k0^`Ch`Uey%*jGMpE%JP?*+o`V!y!*g+4CS!icK~?)RQ#Z;*G~cd6_ji{ z)z8jpH<%2}bQyltJKfAqUnsKEBh!Wp+fS(_`DFKU@Gd?DGG}X0wPk5ul|eK!5x$7< zMO_9gVV&E3vJA~z+7b`eKTO6EKnt z{d5sL?JOg+8w#1pi!n!Rx{2;GGgpI+xe!y=0>|)lk{7cuq1w#0;s@Y2QBIU<+jwsM zG_Qlrh@Hk}G!(NL3r}S;junp~YI~-pTva5SlxO2Q9s}|Vy0aG@@PzNjw?AZ`SuYQw zz7=HvB{d6E5;ph&{bynB0g@eWqwPZ!6XyLil-Vd3ppc|IyjsP1$ZD2WR0S&o^Dpqu zYhTmAs@7DrHpOZts4UXd9&Twsdrf^?``C#7omb!JLf z-ny!;wV^rO5L{g!ZYTE3o0?5bYn|RSI5JVHC5qAV_T%9uC*Rs#NkGSP5+|w ziUu|-hg!WV60U0ik^Ppa%hp6=;g;&Amar6s17X70EJL6vT6Z&QDs!mm=15&+O?h2& zbCk6iX+bpAH8-sdFAFtAV=~0CE?64~-yC1LG91ZT`L*EED}y25f_V+i&2;|N)Rfmm z+r#xujZO76TJR99S)fN_4K=Yf?ctiH*2cCPeI3;_LpWoXu$FL3Gz{#T@}=!QCvx&~7Je04xb>mKGFng}*)Gxtbc?4W8#^$3? zKE4pJ6Xi4D1Rq8|*#P_zKsR#1Cx8>Y8abf}xerI!^G1I=&TfNXH5OM#skht4@V2 zkWa7!g=7-kq~in+qmZ4$fVUKDcr&2sG{%}iCs=|)JOr=S@dm*EI2~~SeJdb+73yo$|KD$l2Z@e!y%_Nw5*$MOqhW@DAW^;N5_8k>`1VQ{HzqN;PoG z{hmg-0XXHDvryDn0VYryK*#$k_9)5_bm5(q`aCfVoSrKv|KEI}2I<*=aDtTA6HbtF z@bk!Kz!fO>0RQs(TZ@1}eZDTnl9}u|JzJR2OZTckBGwZlsY-jd)tQ zCRZDY`f64dDkmHGw_f*Z|1)9T;@VQY#l59u%jzv1TY4Vb^Vra1s~_)pe8c12kN>TQ J=>N0-{{vhyn9KkG literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_SHA1.pyd b/env/Lib/site-packages/Crypto/Hash/_SHA1.pyd new file mode 100644 index 0000000000000000000000000000000000000000..58f6ccdaa5ee6e847d3b804c3a9dac69bf491dcb GIT binary patch literal 17920 zcmeHv4Rll2mF|(RWQ2br0e0#TrwA;HDc~5cCousUU3?W^gPb^E2$Mf!Y#H46ulh+K zlO}isc~;J7%Cu=GO*8Wf?Rzt8UfRWE@+i=(R}zayG&Oc~bOs!oybfQm!_nH|P;Xo7Xzy(H zmgVQ?I1SRb9BPSV{Wt4tnKSp*AH0^0`g6}uJ;ea8J~s7~NwiOW%_;Etr=|*+cPdA~ zT+#mIi|mwy`janO4S4A_y`CfLYg(JO&>WwYsLst;^P}0U{pr~oGi~SD97k@}RmJS9 zzTS_6J)a2I2j z32tO;1LnDMY-fygw^w1T_zLp#g<-T;3Pz1K7v`B*|ICoGfOl&ExF_c0LM)T)h8!q~ z!vSu}e9eu4M#i@01LeYb70xo86Al+z%Jgn*loV8llVn(qbHd?bELzs3cN8*iL?g}l zHJlR;7t!*k?vgX#5i^9uY@N zi*42pA-T3J|J|DzyAYhN#bWlBOR;qNLNMz>T#F6ZPyan41^R2Dv!y#n0!u@0nF6JB zxjOWbDUh?D%u5R$DvhTe#;TvOq;3PS?TZRr4Id_W+J;WGJEOMm#zeIUgS7D7POBCU zIPF^aA*VxGj|m(azu@8T$^)r)Kx9YMwj+Uosi1|i$3}Zpr_1#-Q)tCc;JwVY^DwUc zmVZEnSV%?DX-710Tk7~!)IKufG8-r@Qh0AUf&AM5QuaK?c1DBqQ}^iI9cVAp+hX+RA>rg6!` z^ABFnn37JWEsfBt(9xMJIjZryo#h%2I4d>&kW+&$9Ll}%10cduM{wOmdM3Tx0cDiC z9{Z46W@G%$=p*_1U>AmzH^kG4u*Hgo)Ljr$XvPs*@jV!PR`AADCnQVdko+cSA~g45 z;2Ffo#RT$*>#^ghxhP#6z4qnM>!#E<(3{84?9rL1CMI?r(fA=PyuslKyNbefSRc*0 z{tmG~t{Q%xe}pC4ITExjE!+X0ePoIpJ^c=*h^$P9E}I^3Yur){={2Z1r(boJ)9Rcl zNEiMS0tO!q0|P_Y@eHug8h;TgOa1VZbb7q$? z$z{NYJ!VG+Y0`N9H#0~K{19TKkc6ZmS0(AuwRSxz(53N{>B6-OFr5Dz#eX^Nh7UyQ zog1OUbfH2m5O%2+YIatF1&?!!#?k8G2~tZrmXDBKR4J7Q$gLdd!s!^6?hmE29{N{W zyoP)4%B-DZoyO1VOJMo!O(6dBC3{XS`aVswEFKJKA#V|8sVzzuUM-k%7inR)JzdC9 z<56ufmCs`TL^)FH(Cg1r-R%IvuEoY2nvAmSwrg@sI{=Y!fO-ik^7vGmO^reQjl=>h zMbj<~qq>T!SowS6V6l{p%C3c0{2S(G!R&M)$3)|i_z3jc>cW8Z+y~Ksm=ekVOVm#@ zxVILu+=Fub3Ix5>n_xu?S)Hjj^g>7Rv!1bN%MyVC zIeK2R{cFAx1cUD^(PREz)0s=;4@CDqee9IfKE$Il*QC~>jFaG)RM{r+j*4N~#rwGiN7^Pu&4bY7H(1lEza^)xvGG z*94qJhJ$`c!3;inI&}y^5Rv8T(8s3WG!Z&9epKWA>B0-hjL57uaF4Uj@Z2^E-vKy@ za9ym`>VAa5)Hl)1&_F?|ORr3YzPu=&dFwHdMQG4{2s1dguXr3o89y1^9Qx4o zz}Hi+ViB;Cr8{2^I>F_`MEFY>3(<+|N2xwV)B|PEvfN_pw0wgU5K5%qvpr&|MF-g8 zdH|{tM705+&oUoZlh8O=H~=EpgK-gdfbB81zxGbXT9#V6VQRsxT2ubBC}8vb1_5cX zdnx7Q!G`_0h#)OXm7-w1XF;#+XG(zzKAoQS>e2Om7K_- zMXsAd;Wx5^;_jL0$hJrU2!R@j5{g^3Wv6_nbx$S#)Yn0HJjT9^aEP%KmnV+R`#ocO zVZb!^c-XMZQF*|&N98fM{Ibf2lB2^}LZo;-l>Auj~ z;jIaqo$lO0&@uB5p?Pb#-kBGy(?a>*ser=fp1(P5TM^ARu|VmHjoDc&FndK8fti-Q zAN~$GmE!@YcEE(qm-MUIeiZW;sNBM^i7KIlNex>*z~*pf3UFsp*pC>`)QI-+vhxpp zg?Pp6DyOW)oSEBp@b#(w9CvOEV-H|Rza=f}o4Xp${~e09gKrktLVrhu!uf9k@$+ny z!`F@JZP)s@p+~Xb5AvpAE%qCSmOHFLDhDMGjydt0;=wz(mc-%BRzLz32AzVxIQYbJ>vS7xIQheKNZ)#;`)rZ zJ}<8O#C1Qedrfi75kKNur_?L!kP%1PoTG59)8xT-&8&4q#J%u_&KMQU}Rg`HjC)F*)I#AX)O{QL#@`1 z%ep$-G%Oc2I&9OBGEqZ0;ar4s5v~p;kJC=bjVPL(wrRDy1P&eD+ff1q+q8OTSBuU6 zlDWn9@Cfif6nGabB7pvTfr*MSb*K%8K63fKtnAMxwj+hJNoSG#w-!_AQ0E{-V$cB> zSmnJ71~vYIw(PjA>Nr11@u@L^7ISmIX1 zF$;s5sGY*kR&51UeHLwJI{2g(SvRH+@BqozJx|IJS|Sz~Tw(EH@CFvE1&h09i>|g= zE;fGu{Kfp%K5npEe~nH)!|l4~iCJvHwcGW3RPG+V!YbGZyGJK7x?5)yoqr6SA(f4| z-}5Xx|3E(BYa+MI(3bW3i{4Xgo!JNMP~3aA&f2jS+qnc#k6h)6xW^9C($_BNTNN=c z))pkOfcx@TxmAm}kaxXh3ZAP8oio{X{t7om9ZfSD#j785hdF^FQ zK9Y)_quuQgxqx!7a*uL11?s&@&5>mRHrUjN~20G)yzC-k>dAy}@tapF>r| zJlwF+Zl>V&0x4|yFI>R)sYM0bd0)D!p0BQ75DQ(l`W-5d$|F3cMOGKU`cegyd+?zY zJ)MvT!szy{-l*dCNJ_{fsS6)bkF^^#KG?H%trk1&;IW?7x5ZLLJypotVkvu1RV_g) z==47&S)%mIofAUMs569*^6(Hm5^8g~EM-#^1u4iQxSU zS^)~){}YAKwtAOu=9SRUgeL=7#f1Rr!bh(|1<$g|Jy@YiJeUYH@wN4C`Qok#@!?n6 zvSYTYWBfy7g;d_3sz3sTo|<${dQn?&DZI9x;=^oOw=A-j?y3zGwiE=z?!OX6HSDkV z&BheLACpbeocdFxyOD13HvQ4ky>VeZXHJm^B({YEs`zqvNb+&!D zN8m}@zHNo7ZC}kbxP0R}T(-@&?Q5MI8?)wKM(yj1(C=!>dtI?NfkN#|nL-gdsYuuM zDKnMWuZ`lhlJTOGI7y-TE3yBj z7v_Q6`9@`BDK&$Im0tx1qOc%SSSTo`FnbZ&%WV5rEmkm0C2A|LA1{_?NG!>Wlp*-0 zzp(A|t;k?1K&2QImI|6fu@y79DrJ$vTo1LX-xG6JyS*EZYbP7O6p?=WB z%CA8uqHuGj;1VRTTno{>GBdC$Qy^_RK*24#q023qmg-D_R?CboHJL7}GA*k!1y4B7 zi5|6?9=B#1ZW9F%$m26X99FXGc9pL*tNhM9m9Md5fYROj{cV4Jr%?8q%(%6pfcr_M zB#O~?WcYme(nu_3QA$oIu|$?UtaQg|T*PdXhQ-4&RT|+ZE{!OB5Zy}ZtccMiarBFr zr{WGl8dhQhS*rAs!h4mHn8JsM7$3oa*kG1Y0(w!RCk9-l;W>i~LwH;%>E(ZE)#swY zG(&6%1Co459wr)s10Gi;(8b1(pe};Z5qUUbo-Ykk_wE=GSEWJ0l@Nx+AZhr3po2uI z$U>c@7)>05I1papNeCnelHdanJkm|Q6zMQ^>m`Xu9LPOn%nL~e(GLWRClHKN$6iuo zhQA~UGEVQ1G@OxwgR4YF3egF~3MLSXLJ|^o0Fppf=%nOChLY3=#vmI6P~|sO+3AE3 zv@S7{2O^NngBh6}Zut#CSVv#LmitxgCPlEV94gDI)1gMbBg7q6 zl3;N}4XdQsLH?q_WRmxn^u>{q1$K8QMbs+@qPG1x-=q<-9qNN|w>!jpTz!Ftgu^(p*)%v*}&3b9X> zPAc*x7z(Mi0BTdgg+ms=s>lNvp~`7N5*!gb1xj~6&5-N^Da=drP}D$Y;r}#|t^u)B zQ1L^~g`^9}OXXs?CNcmskOm3X7Q2 zK@%GGMB=~$buJOBVGgkQK@6;v2a{3AiGd&$M~;SQPin_#DNaidloKkDUx5nbS78Hi zts;y_f^B-71g(=LQ0Jkn$dke@6Nn2A=_CSyS$+}M%49Emzm&&`Ck|qRBK*rkx*7MP ziydbH%(WiA1bYWNn8{4KAZ0>4fFhyOJW>k;%$p=2CMP6Vg<_Ju@N~>rq{6v|1|-w6$+P9cmFHT zUHdIz7qFWlioqlnZH}}rV2WOgbXf(-2n zQE&rt$$p&@IkrPW{zw57VMCXI^uc2kZJ>mb1d;}Xm=Q%f4k9#DDebY~os8xKG8#lw zTK>M0A#lDNTJ7#5K1nxXFFv7*ehd+lR3yRel*&`k-v3%W%-8v%VXKqv(YD?vzoLzSDI z1+)&$PNzW!IV0`<&>5VTpAb1L1s9B!2TF!`1?Hjs0X|1YX@=rNNTyvP&ya^!S2}`l zSwTid+Jz0v7ZISL8R#7bK)QhjORyRUjF8Z-Z=57_ugGr_So2Dqpdz26Pz-zG*xjSV zO9|2rPL+KJvM(#U3~T^|K&S*RkIJmg%CyoN8tUitwULM2uNwkD_1 zLUuzdDxakV&)94))JDm~OSGGvB$I@O25HSJjL-&;23fJ11qP2og;-Fij~rEoAyRHa zy0p9y+XQzUmdNF#6WAxr$f4f@FAK>@Wf&lKZW)6v0#_uuONWzvoFd*r${wLFgj+r! z%1LZ4@~g<2MCOK3(PUhVp_c|yAM9V1hP&f5DK`uA=*KwtD%lM{sc?6w5F^Q6M@s+nuo~O znw@44!kUrq=TY{9&j2W`jMD0lYan?@@PaH3%E}0iJ25DScB3e8q5~OoIW~FNg)Xnk z&maRKKf+34*F}s+`eg7Xyn+lGWDg6n*k;m@7KtV8UAY_a z&&uz_;ua7@Qa(!brBR|bVU^)0Vg@Pc=7&{m%hIsO%BgR!Fo?KY?$hT$Iw3_V zKhNVAR4%eL#6t3NT&%G^WXd^45O0ln~8senL8|P|%TH zpg03lKs)xAa``w&n?*9SusbF^hqf2mO~g(`ag`6cVZBH=a7zHQgiIKjC&s`mDI3y9 zQtlUnn)v|P=0KKsvW5;ihqR*(lSk$v;+d+}Z zK}{0hGSqSfpF|AS@MJ0<5C8N>jUoDigq|I`Io{^Iq=a6-?19T&WM&Uf24>)$mbJF2 z&U|UqBPF$_gr_7~(-dFD3-G+Q%8pM=0IU~PlVl6v(Jx?V?7T-xcw|g7aM$UY7xBV; z?hsyf8ay63GX6d$J$3~-p)jF+?o2D*D6XTgB=Ds7v04{Bue|4U>J&c3nt*nsUI1p7 zXr#A(Mt{BYjjv@omy4&xCXUy{c)^b3*WroeA9;aZrntNKI((N@yD_!(Rh?2f-j9G1 zJzqA-1KpK)R~5BA9^*0Kp}RlI@xxX6G|w9Fm;rxdz)uZWyjt&9Za~$58x8oN0e2bj zzZ)=Wz*h`-!GIaz3Ossgp#ePx?5NH3(4o)3Z!ut|_1i}MhX#DcfJY4YpaF9XdUFi; zsDXdSfEjwF*Rt(+(9SY&tWvMDrJsSB0sl^LtWKxD{dQv>gDy;a(n09Vx9a3zE<9r2 zUWF5{B4zYYU_wq1Ow@HylkA5moRS8Bj=14h?R*7qNqv+AmP z4}EVxjF*by8+>Pb2k|gm$=D5m8Jx4coMkU%?AupxTe0~1H~nR;^bNi9FVMuW9*o{k zmX((`wQPnxppF2?%Bt4j)3qwpFl;jJPQq!Ea5bV|-DI5Tw_y^Fdf-O^lX0TomPxpJ z(XR_vdMmB-QZM>#rxw_7=63~T%dRVR=YtPE*t`j1u&CRR!xw0B)+mBrViN97L2q~> zu3qmqIti!u8w0M3=Gm(M{vh-Fi!!fXr{7`J5wE&#@o55n^8j4YI1XQz;1g-!=;gT4 z4`1;xMlp4iXzOEX83nOnfHIvt8wPPJKBXz!;(E>qU zS!_#kyG&C}Z0aVoTg}YsGj~ZFv&s#=%T4)amR~WI|t{>tIkuY|Bhw0!^#}*&t^q|In3T%!mJfe=E9rf0i4a% zjLpW_iS=u)$zoGXGZ>BEPP8{=vrWz5p<>E7AN5nghn4iQaW?Vz+bPTv$YyqYvveoU zBZ4kg4|w)DJZyPFzs&kv(PeQSD{h{_itDGd;)-dk_}fM2v0}s1qtFlOt{hC^pbd-Q zFEm|#%tW6Xv#qE0=#B6{2j+943|J0GveNtswEqg{DV%37MY-w+7YH0NPTQfa^^z$SyW+gw*9NCmJxWKEC8xejzaNB{aoPb-1T;rdCtDAuH z02c-B_Z@%E$0lp$SLQW=wyMUiK+xx{>cj_@_?6l#g!6QE-WTk;CD_pvXzlE9cLaRf z7<-*s*0=fs!N#_A9S^j2G(+2#bsbw8JDS_P&F-yD-Y)8XOKV%e>%%WrENi`i-`niS zs`dt|fuYcAfK_Br>~EwRzftM*-QsN|Uh8~bKZdb2=G%i_ z-?lohucg!1-q_IuDS{Bg$7X+?dQCH%I{{n2#pi8o29mu;*tOgI0dISKYr9wRLjW(8 z*j$s^>TleHn&|9l-Q;WZZL4Z*YxA>CiF~59v90w%?^+LRS_3(@6WpraO~K8Zy}k^W ztKDmEb$jF`WzB7Ew9hs)R5kd!yiKhwtxXNO@}RA`!21Ku4S{W4-iFqWmd*ylvl`mS zVeqrPc5l1ii_-e4HS2Dzuc>t}t|-@Oeu*`Kr_LtYL0f!YxYTW|;Q_ds*N6T$Fwkoa zKrQX;#_@84m)*=7{N61MEn>kMh`k~P*Y0R(-3%=^bTs;WoewlL`ZnK>FIM!Cy1fYis>9M<67>TL=JybY5YuEu=SkFmzC)NS8Rn%HGIX{ZTt=|9YI^d84Z%k$}U<`*uR*iHLZ z7S8J(2CJ8KNO(H7Fh5&_6>nle=3{Nl%Q~0?Kk#(mrVeKKEPTp8|9s7b^vx z;O)r8cA!kZiT*pB&!YS|U;^hT$^^e(z}O*_p90LsM{P$?wgWzga|~sIr*P6-c~>(w z4=0sN0UM2SGvE_Oncz>1@?OAEqfGE^qkJB)4nKb@K#oIjJ5HiW@NuI|@Kv0|=NRC9 z$or_=257zp`3A}aOK}pO;O$1a8Stkwfdl=$fM;=5qD&Ca3q*M#;8En4D)19?H_GXj zIlvF_1RHQZg>n>d&1}Y=MtLLPr*ohilqcq4l((gDz5+bKb2#5enV=bYQy%go%A;oE zEJm4fsUP5UqP!RIyEihn5M|06TXB+3+5r1;YQW<J4Y{hcj=MNOUUH#Pd( z%kE!1&w=!+wFSxj`iZj=o|_5c6? literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_SHA224.pyd b/env/Lib/site-packages/Crypto/Hash/_SHA224.pyd new file mode 100644 index 0000000000000000000000000000000000000000..b176a1efd3c9d79b7158173d602974107a9b723c GIT binary patch literal 21504 zcmeHvdwf*Y_3xgXOfux*44P=PSO;e)co7U7v_U}UkaOe&CmJ9q$|H%%K%yaulNklI zKZ!$>aXT&b+FPx*KOgk*Yi%!WEk$eX3=@*@k`P|;1+@CH6G08o642`0@7m`~CP8ic z>FvFLw7=nVve#aF@3r<`d%gCaGnC)Do=J?cOgzaXW1CUZ$0Pp!jXxH~&bj2JbJz=` zc8%R^@$MR1S>Mp=Z1Fd*@Yk$#*48vNH3yux`JDb>le3}8souE2xw5&=H#t8)$8D0n zu48#5uB&A#!ge&!dM6R41Lx! zMzZUf&)AqDMmIJg8|Z30w1&BOhJ8GYMJKly9mN`3&`3PJ!#V-<5IujZ zzZS5N$r=DekN6M!ctk&6V>3FEjM#cS)DN;4{_!w2dwBid8Qv*A%a_|PSu4b#&&WT1 zBV#9m=jySTWBFh#nLH89I1$%l+Z_GhhNnP(J#=hBdv9QR=sind0{uBZ^pPcyvzgRO z4|Pn4C;kIwanzc)5rutIRN%^Z50(9-H|6b7`?q4ET7*G*c!}GlhXZbh9=^}*RBi$T zr_N7!`Ehw$;sX#_8?~?P#=u0-%Gkqw>r}VL^J`0J)+Y;rw6|}?-!AL>sL&J=QFK}x z4cwU6JqoptpizmjKZ$y!B8o#@TuN1rt&F^49xey7em^zb65H|!}2S73j1+f6r%4f2%n z0saxTsJ%C6pI*4{2FC8smZSY2fJJ0>GIZK5<_4ZTHszm3f z$s`y*2OeM?I49L7Sy&8Idoo}RC8eNeh~wm890QkOWP>p>I4ZKpf*mM6T9E3SEPQ`f zY7R%ULQrp0E`V;Ky_I^g;yr7UDnfpXe+y{8R$jOW>^E$?}(CdeS}uME7L-p(|Df$o$G-Qdo!IW zq($fXUrQk|@C=rbKoXL~x=Kino&`6N06jW?Em^o=28Q!rlK+>J8f+j^>8^$hlZ6Vk zV6oHnP@TI3Q}DX$b&ggq?fHZm(`vhZ9COLm2l8Tb91Hm_>g;?&+b z7wY_&u?5z55VQ2(Onvt7q$efA@~?# zBH&25)2Q#Gy55cF7|&NIgq)0T7HcGRlR=nfRf(E;85^ZWn^ z20xf;`23BQql07*ME4G3Y<6N3{L#@16AMs>+x%Y!I&*6DXm;WosBE+dhBOAwe-^Y0 z&i`8my%eqc(Qj=uNQ2Hz_>D?-q5;6sW5dQLZU!bXAAd|FjVGU~hZ`x@1l&cYfu12} z1{>{9bifJ1vz#CL*b*EqT!+qg>AWjhcw#!Ox6-|cd)*bLeuEy6{FCj;N z7u;atYv^W5pdis=R7OEwK7dExdl+Ql8Vnl(2j`ku@4$u`e=WEobjEV`?TNRr3E0UA z?QaC#nB}WzOu+h zicafl{Iv*)V+t>V2x2h)2t7b}jBPHzm9gd1t!+@X;3~Z~{{;XDzIRd~ZEu@SIeD;Z zb1po{^65%Zu+lqmi~ZM1fgYJxq)R;jv-C*b8gC?TyB;oZm(AD{DB^STyfb3{96sA7 z#k@13emKJ|6OJC1dvydT(v*VGJ#O|jf^wjjStv7vniji&(d!TRb%G`_wxwbZ(&@ zdEV{$7)xwBMhOak&G6KBU@SOh@*TS_dl4{8Y=z-r+L7`BWpfV&?^}VUYO?SKWaT>l zE*%u~$hZ2?h|mCMm@J%0Eub?q=r06@lLx9 zha8o+*`HN;Oq1VG`3|LRpef*MRN9Wu@ihfi-pdE%ZAzrnqUCnmpHPSdS2<-Z znJM1(m);)Lnxo~$Fm@D%bXk+7Mo3q~`F{auf9a0}_Ru$pP&ogOK>P;ol!Y%oVzhm! zbv1g7X>A30OOGD=gHzA#(XlEgB@fO^<5$OnH*@PR&^y#^>#{xrKq3Ox{{*C*iaP@( zp>AiFbsehMVl+GmXft~CsIESBMqjWw;jnv|_k1mfAx)_)q!gzo= zLYN9*22K#F1t=;220>t zf!u3l@PudzYX!3iY|RR3MV5e7Z%+pQQzV}Yi^YgbAYQ(3JxLM*9XZvvF%eAlc@)XQ z>%>&o(^P|YoJP~<^Ut3z`DBWiNurRJYHZI37QoP_9{U5&0*+jl-R2DiCm7{P(&gJV*&bBvamV~x=PTHHj*5c*%54(Z7Kd(fIC<8|0XIgmd z3mPJdLkYz^pWn*2lpziH2H&F3=xn_xb-!WX1g^;~vy1aeM2r6eJ=ExSTI}rwI0t&U zq4+q*e(Nt(#leC0j-+rKWYbT_lg&6=vHl?1g|q2z1vKPeVAug2Mja@%J1lb4K03qh zD1)xgb=Vz>(>{8(6Lo0$++x%{sC!VaK)nL>YSgPw0MP=Kp4$>nIkO?^h<|Oz;2g=p z%%x*0IheKXrwP-(E%>E~fM|ifH?Ds>jgMXYdsFzVud1exU~HHho5(^0s6Nd{L#ZNyD5+1yTYZ_6T9VP=!~uP z%LwO`@?7tPJtcmB1~cV565oaa@UHN^)e{rcr1Gt3m3tF0wDarhc`TMFT35O#*6&z% z{Vlw-K9;bpE4|geskDAw>0*9;o!McD=&%$Nd1;;KvkZOe)|FNzt^^028?zGzpVC*A z=SDJri)k`-oj22^M;2%Do@AFoByH=|f=dPDP-6W>sEtLZ0%s^`^LKeUl4QhTx*a)Z zEKxU>OMBr(CIlg~Pc>=kG!4B4E8tVxxfdO8}wO z)gJ#vL$1~(aT0Q-`h9Mb5io}i2cEq+XLjKDZpjl zROXKT-X`jbeUs`Qm)lJ#9dM%hmE3Mojt9a7F9RhvkT_MOk^dtNPE3Q7&~<|7dU+Z- zISpQs2B)Cw<)UkG8aXu$%4zUQbVW9S2~0~PuS$boO@q_Xb(-i}l15&g24@Hmok~Qf znWzF#-Lu46?{hCoA+E{oj{VV=s$FY}Lcz$HVx&h95@Tnl!O}FSp)2wpOj=DNuSQMLrjn8x|yiSEq7eCs*!Eo4Nx26kV%=F(ZdF{Ja<>x69L?cqr0%vYk6-cdYzZsqibZpn1+yiir zAiIUX3X+P{CwJ3SGUV<#5w}B?elNru%?2rqY}>l~XExOL0AYcOO<-T$rT66)deQtP^|G?*2PLwS^YQPnbKoMHS+tg>}=ywrlcnvD2FC&$KYwh-p}y z>gwX1U<@?`Z;~NNFD6!nUDBi@sHx#Pw@s{Tr@5{ziYundm0RH+LBSHqXP!BOL9NC~lD@~7bF`Uo|J?*c!QSqzG) z%0jlp>@+b$r>zvX)<~pR2_k{E^G%^Jh#B8ec8Z>y=P!T|Hdw!l#g_6qyUm=n3&q4G>Ot)|~~ z5cFPYp8^+Rs1G(kSgP;_-JnGQ3=RQw-+~X2--H)aTyF>+-;Z50R39$E+$7R_>I{i= z$bC?4ShV!IAgaPO<*AW};$O>KXy`n4NH`3Nd@i^N$mkQuT>cmoTpvq&Xw~pITOfPB zS1>aa4=&+kiVxOImJJRtaY!Z77Fha^sP#(ngxc-b)RKPos2w+}Njsvilu9TeM z_Mv_dH|9+)i!2|DAcH5}Ceg#%$U{HOdh>;TG3tgD6B8!{M8!ZgTWgydlzFb&f#k&cDly&qwyvGe zpqhh*gtOqsAeO!0(`ZQ7pikQedq9{Gp7RK8AENFja*zBm4948No$_fx+cXP}cR=hU zCd`#=Liz}GFcS_LELuRZt+V;^{zw5FQHZFmX_yO7mGPq9KE`;X<)Q@t!zVrzHee&f2O@`Jzl&ACE-1%%7mWE(jsf%7EzAS9 zA@oa&gyzq?=5g2w3;}N<9E9u)*Ro$KUn4RO{yGK2L((1u24Ux>QD85zNwg;vWrPY- zoFLWJBJC?Xr~C+ZK{CjOWk8wW|49Z}F63x%edTKnr~Y>Y3&K-p`G~xw+Rfrb3 z<6ER=@|&0dDK-%Y=}xJ9of$B;rW`D6L~OY5Amj=lMVc|Tc_>a`q9XaD{1V!hl7#Dk zRQ?EvQ&{MMvx_{`b^k5DBU3i=AVlX+WCUh9T>jXwNHHwm zJs@NP#SGye|Hf(@ono^d1Qu*W1>=l}?(d2Ym4*L5Fp0cIf`A*~mVRcVX2 z=lps+85#LJ3K}%weFzREW4|<+984BWg`(0G5qQY1 z&JGQvq=t#%Z6|W72nNG7|3@OsM&TCP;(u9rc{!p$Ti1E{B?~^2FAa|f(A8lH6loy* z6@`VgK_EDgMo~hI*dUcZ-_}{#n&Sg+z89u}JtfcjkAw&EbsF+z3Oy7Z5GG*72c`0j zWQVQ}lQcOwEQ=yfCv9dQCXEQugLv_qoZIh`2PsNmpU6f?ZN>3~oT+qz=#wuM;spIJ zNUp7G^0qB)QuzzQad!;!+d^3p8{qY%1n-1VfD>W^dDD}a5#NPqO9!@;Sqz;zh93kd zER=5+#LtcjnnY_MjismnV*&k?u|S+VD4G*}!~*OZVgVLGGwx`=-2doja;EZ^KhG+_ z;O~wILkSat!83{9Jxc~onUmOYGeO4pdanJL58q8^P$a=fh!7E6hhWuoV2}o|BXE7R zh0z>-0P0I&#DyylBQ%gy5!a0PKwcUuyNfK=Xrh;jY$i+)5Tak&DLdxYCTG7P6j zT7}3;2lC4ugS`j_E&Tc|BKd{r$V?34$ej=xz9=XlNQhI!KM)jfYZQ6txp|GGIQ*bU z;xTqQrqX>~AC?Fz$wh@C7dZ4sq1f=Qm zAY7^I&kzsUCiWLWfwH1hQvU^q1_T9n))NOl6BPCtK>_*=`YHWJ%rSj6DL(}W==Rpr~V<8OrQAD~rj0T~flzvJP=FCg<;ZY_>!&=cEq;D%!-e92fn;IuQ0?x=kD?Mj8VpNFrYn2~x@=r1E$ESwTVU1*A&nhh0W!;9A1!y;~nY z;?f9V0%C->5t7&BotkS05Ks)dYm>X-k8pou#01>okV8G|tU%X?q%33z8sCp#K<6Rt z7?x=L)yo^F(P-lyV(9Qd2nsx@yBzq*GYVbjKn0BJ911p4uOgdgX~p$PI#iHVAXJF3 z+{?8A>o4=hOve<65oKnnSRwop-ARe$2|RIALI-x-U!8_)73l!N15BAtC~)s|d?2qU zmIJZiTBJihOnh9gaqUmmxj*i^L4xN>@j$2`TM_3{(^lw039~?;fls8=X(*1K6(Ycm z!sk}-0t1zQ$hFg|eAIg5_NNwWuJ^!ZD54^2U<+t=Sjjds4ODR#o=I1N2_ao5vT=Ke znnuz!0ydxK)^M)}WyDF5PCkOy$6Wi1CVyf***v+P(lp3z=^Y*3wMa@ygU=Ww7K^C zt0<1(lt*zyL|GUa=_a;}QsVF!_h4^nWEyUvD&00jy@FNUYsLMB@H|53+2BW4F^Ey5 z5EQDUavR-maa?&QvbgeaeVX!;g7(?2QUnFa$Rm?9e@M#5i`F}8J` zSam0z(n(gxCxlJHm+hdF^LA2W*AA`~SqGmx^U4Urrz;jZ43psxT@8YrxQ;elzHuE* z@)^o(;ZLOUF+#kbxrT=Qd2v_jpk?9Ay`3yWh8-%d9h&9FOt8`{H``!enxzWYstV6m zEcX{kTWDHy9PqCIq^J_ME0(*9l<*vfX1RNe&hbt-zI}gmCtfg#JmscuXv_HGk}}@v zDdYF*W&CZt$F+>!*h*u0(doGb_SIW zdM{wMWnE6H$MC*1244eP?3*mHBevWvfHUyjuOuq;CzcwOqE7v1TU7g|3l6#mpqVLPf|?}WeJC{lCG<94o7<+GVwqmPCvcv^J+|`Nip;@2ue3+6?e@C%l-0)P@&bp>OC5M69fj?b zYLV=LTzm&F+Dh;?SJeJkjK_e7?Eaxo^%7%Pz%0LR zmd~2yD`xqoSq_+G=2Byv(=4wx%L=nxZkB7z@+q_Yg;}O1yU(m2Gt0bXM$6=?R1c$^ zYT_rDWvX?)S+6!rzga$FmRFnQcC-A0Szc$BKQqe|y$N4pYhaKpRpQ-8Q7=BLO!fa~ zmBlp%pB}SJ(S@px_|Vs)^t~#5BS~Kc(l>SVRUCc2MPD`1S3vYt4Sl^r-+9pY67&rL zUGdW`KHZ_yT{vAa(_J#%z|!3--ILNKCEX0tJsw@X(WMt%T+#g!-2%~74_%?qbqQTG z&>aI^2+&!aPS$i*rNbnh>gcparzbk)&{2fW3Un%9rFyl(=5+!7jJTZ2KoOVQDn|{=urDMhCuj`*R!#Smu zUhx}Zy#6nKgX~_}Bxq~KPmohMcX2Vh3b;qnXyEY16@F_(FZfMvpx+R?K>_u7uwCj? zTwJ?+1%9WDI=yc)xpY2$x>Kr9AJ0e}evN{cqekG$M8ArWIMHv>2wa8eS3MFZ`qhuX zRf>Ksz&XInm`kPTw+1+RBR=)JYSLBGS)=a0=bn4&Zo@KI^d@s1{y?p}OcC_DN8oN1 z^m>NlDvf@9BXCB)BfvR{?<(_$+8CXnx%rGb{SF=CP04mDKvuGtv7$5%Z^5GvaKrXs z8sR9W^wAoOeYuKotQ!0*z?tlvHCPW`pq_kNYpZB)F@EQs`aL*XBZfc@_}#a=cnbBY z2JUf`CQi(W@PF+CmTYvPjTHt)vBJ7+RyZw-6;2&}+{3YIfq>&c3W`BmJ9ceyIiIM$ZIY||z)&&7;AiKlK6a2PYP z4rMyK4~=_g5<7@7gho4ao>my`UISR6}(cV>$B{{y6&8&ee=5RB1 zE1niXD-(0a4|qOpSc59Fn8T#=AJf*xh-E|MFY^Qo^zuP>>I7Y z3Au~CF2-W;p#+1Dyt_X^T~FX6*1$WNs{ol6OU|3hOK?tJ8d zYk)gF40jB;9|7lBk{Zca9`e#Ca9{NK?|gcsWcFvy4>Xq6v;=~FUuiRbAcy~DA=G@&TIutzY-oa3q)_Y;sjStk zH2bgf)zDlk{JvHUWAiiT1%3Y26+Zv+X8+2Xrdq5bh-LWM*w0cgt7GGaVJqwXzM489 z*-64KSlt@%t*mTV=~G&<03ShYoJDPDt+@>~(b?N@o4>}ty0oUTv6VGTWFZYTjScts z7I>l6I>@nh%vSZ?7F@By=TFV@dTssAE28`=P$gWvaDR2GOgI4c>$}%oSJJX z16c0&!K7|vRd>VKe17!5ih*830Ag9mzM6)seC%3Q)#|ISS}r!Mil$e@VB}598&*Kd zRZTU1fAif{HU1TM;oW#+q_N;sldtKn27hzYO4aWXkCJ*0na3i^|3oq-Uez7=u-#yc6KF31fW;N(8_7( z-HLv-DCeG`c<#H|D8GYz_8s7# zLg_>f@-Lo}&;%!H|Y z3lGin2+BK(47d?x=7o$M1D?tWc&HDR^Gvu7<!54*+4 z$?iv<=>R-D-=y61X&3eucq&ig@dBoD6mqv}z{B%N%0J)5vj%u7-^cSfU@9}lfe-aZ zITlY8Fy(=p@pJ=@qNJa7_W-6m^9ej8|EEw6;yF!YHO9`xvk3FYotijzRRgB;7oC%w z=uah`p$Mju&OZcGNoSSk0aMwHCl6!)`}5C|fJL0|i?ID8DN+uP{rVg{{zo8qP~&X= z&5?sdZRWMBRyI2C^7&hlw#^zpWpeR&r?08DxvrsU#jNoQE3cbWGTzx5Kz>)#h_F3t z{Ayq8_-n7p&zVWN#md_nS35zVsdd))pug$r*4ldC%9_?mD;sM4&8^ML1Cwf-S6*Gy zx^nVeQ^q@yo;57T5#grc+=$c_DO#v}&K&=(Q5DAjmFIY-=o#lT6P_u4X5BOEpV6PI Yey;vG+Xlymq7BXs|JL&P7hnAU0gv)RyZ`_I literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_SHA256.pyd b/env/Lib/site-packages/Crypto/Hash/_SHA256.pyd new file mode 100644 index 0000000000000000000000000000000000000000..cbd086100477265349058ae29d9b5b214fc4479b GIT binary patch literal 21504 zcmeHvdwf*Y_3xgXOfux*44T+zu@25q;zclU&<24zhnyoPIMDz>Q65Q51`-WPoXjYw z{h4%#GH$1(UVE$6{(ipt__el|ww9u`Hj@dN@RAVTK7dv~b|R<&e&wOnx!<+VnM{J( z_S4&Y|7d^1=VY(F_TFplz4m(TJ!h!6bv=_9W0`o8Nyavzq>o4Z{VRVgjGcGsOXsl{ z#_Sls$>QBHzN(?I&DrX2S?RA`<*ci1Zf*%UZ}U0*!DeS;vs1lsp>tJBy>Ci>evaED zeO=FrNXGYVhf+`On@=3dLjC1EV-7Qv7rr{?utl_wIph}j{KI2JnRhrxl)0k)yZhN; z3H9&px0&VXLql05|k2@{_TR)em_pj7ecx{0}!u>{5>P}bAncE-+7*~(ZC_zZv6 zGe)xOU%=S7VdSTQA+(o@iJEPd;4`v*&X6eq-|7HxeOKWRO_|nit^<(xI2o&%;;*j_ z)PlAhD9{vruMy3|e>_m1DMmL|oDFmh9$LdZJR?3H#=54o8Xd(NThT~7y(2mS^bkFN zo4*dQkV!iLqDTBkd_1C`uc-wcNk(iv9_j~KjQn^Qn=`WhuMBS&pA{?Ym#!6J&}Zf! zzmc&M!SnTK)Uo1pG?_dR%s3I#qgxz<-+`w=L+g-cRKq=}kpvm;KvOQ7ytCJ-pOy)58I`Ll58Qb}Bc4 zfm7!vy!^PlCGkFptnIR|?Zd!C(8}0D2iB=>kLTBx(Cm*F0cr2tjK6W~d#KP95?$!D zwkvRBV&@ptK7>Xk#sj5C3YUAR$iERq!jZ>VXIJp@#4@A16YW!s_DiY#-CQF27>JSz zd8Q>F+LjoAHhb14gV!dCY@oys=%G>;aO=D;aVeU>w8+bQU!no!{8gPxUY`HbwTvmr zT}kWDAy*-zqZx9S&X>ANbRKY*>ij;p4p}&r<*^sZS`ypwcRk6O%EVtf? z@ts}w=Np4P7*bLlOZJ7Wi_wr+f@KQHI774Fslud#UrDrJWr-YGzXg&AO?VY}hGpb} zfjr=OXlG&qpi>9F_=Pc)E_-i^p4kHO#6Hh=0ULNPWbyARF zCBXhy4}KF<$9T^|!vGG-TatzM<zImMpv-K(a6(s(mkw$z)_#&Lw(sLIdH#Uf59R zSm}#jfXotw*l#R8`$ba49hXDr->Lxm$EfLCh0=wsTd`w$gy2vYtj~HY)w%VTRD`W= zbf=ykz{Nntt)IXv6W6%)3?SOdxHSX42-*Z|VEP?hWLzJjRgY&{h;tgx^S^sN@L_MJ zGljJ1JpXGcBnFmbK1P>3fw!;>GM1ZZX~7|=t$B5QT;_M86Qx0p(3YQ>2@^=@z>H6peee3x*yef zRK?COi(!hzq*M+)H2as}&4O9ULJmf0PrMy+eGy?2T5=B>;8P;`|B3n_gL&%_>rVm4 zhD*>({1H>pLpFEf9RtV!ui}{S`H3IG6+_VDeLByaP(oYIb=y&^KC>@SAa@Pv_Mhhm zKrs0JG{fg_upB*2_CR#+FveylHo_kry(qB|b-2y{WuP;sbsf!4d=r%o7Qv9l!1>RH zcES07+n|@CbwB#8?F!PM^AmoflAUM-aP-)S@rj#(Ni4u06G`LAr|RJ*iZuauk!hf3 z$(g}M2NOMTg77RChCZ?c#|qb>^Bp>mCks!^p!HU{7jv(>(zM(ra^C?Mi7;OpE7p3y zAQEsu*a7%vTF~I{fJx0h$FznyH(Rw9${4KvB3$v@Sb=FJC z(cc9(nD`pHnGz^Sv>KH$ke3hO(RUvLS-1wnhQPtuKKpIhFyjY<7>lLTtoKnpThs$nAZ2-ly~TPb2_V#$JZZn*`aN`jF8%-oRjEYv zf1~KNuEAf6kT|CBB8VUc+)X!fZ%&471Gv@8I+R; zt2gDsgRGdL6a}lile+D{RtogU{32cI2biTt^4h(Tysdh;z+FCbSD=W`%k$2R`g8ak zn-ulV?DE4Ib{8K#EDz`iP^2jZp?loyYXs#$FSAf)2sJH=8S;MUU?L9D$^(!~?%LOb z+4_TpKOiqCL}~PCpZx?d?4g5!%(Vl-yu>LCKs@~bMQXvMV-f3Sbc=;1e4l#9gw8F} zBhR}%A7P0d$0$MJ2Mtes2gZV9Cf~N>@)rTK#AX;CrX8smQa1Hd@V*sjswN9>Kvu5v z@6ka)k9_+88W9@c43mYks0DOprJP{oh-ej^(vA?e9!XFGxk92zfRo~kBuIDmQ6rpX zr5^4iHZ&1|R5m?ETt%$tTu1YaQOxDW2*mOlMEJ@=XfMhtQrTpo9W;>$P?P~Z3MLV4 z$o?nwAF|RrdPro;pwPoPkyDRcl1+gx zGMk||dOor(5&&GFPD=^JZ2HW@{z1c1$v%yJ5EhTIuaoPJ%-(}y7-|mqnv{;?bA8Q0l@IVCd5aP$vuL?}_Ggri6OGNj@^GeG z%iRh(nfxpyZwpts^MaLnDF1(|5ZHvFYm@fbU0D_uD4t!DmB9kzXSY&uv30}QKO?7d z-tE?3v>@b?e0BS?(Y6AWTPqPnl~A8W4O=yUN3(&O1z;0=JX0h3M>B`+`!dZH+*M9l zOJ<6<{iV0YwB=~IQH&jfA#rQ6%n0dfIRDQ8?JxbYz#jS*5en!35s2TwowD#HM~t>F zwyi;racylNZ|T>ge{kx#{W@0Vq~yVQS?rov@MdoP1$u}2Y;o(;03;${{SQFOshBfR z8tQY#t?N+57Ng+-K%3C3Uv(W&XC4R^2}Iq-63Y975&8hgT{Ene=!DQg=;2Rt!W@|-y;l5vK>S@N{=O^zJ}>_MO#Iy-{{BMz-6Q^P z6n{72?*>Z@JYw5Di;xdjDT|bw^+=O@9gKT5_6B*>Pv)SIkvwY%7QZ@xrB!f*nKNGg z2QMGgJGa=^{#HVl&H?+{7XZ+iXzfn~fTm}tDg30nRS^4LKZfh@)y0uAo6b|{ib`GH zo|t!nd`b_oRjQR`%F^oPNDXG**=p}xKvZV>0qKA!4WcA_XX2AD8?{IUjv)IJAE2ru z;MpI@qyfoo-tY_V2I34=aVmC)h6R1%jRZ>oS2*xzQH_19wt-rl_OTURgf0Q5pD-R^ zju562n4uGdY6Xe`$a9|njwq^bhkYzkGl1|v_Ob8{0t+heJpv0la4G^@xx)T$1Ju$g zT53R~od~OLP+sI9qECUv`tTx0=tGbH5@k~@%|2524TXyQn^7bSk57fyUF5(qA#nTf zdG|#RM9aG;Z7t{f^qD*DWjpyn_{Dx*>Qi|vv1fu9q(Lj=;*1wg^6IQ&!b2d zUMHryo~9bK<20H+pMUmz$zK~2c8LkEPfvI~*aRn$OD{?6goSbK(cu}d4{MJaGtLQV zk6G+%b4hHN_2a{{reO_O6UEU}_O(BQv;c-a@z}rq4B*IhN#1uuB(oHry+o(Td*<$Z zf)Rr4p?35DrSi~`F1ZVi$hl(u$vO6x%+gS|=cK*GYb{w}|DX@3`}0bbh%#g}eyW8> zKc^w0IFwM-^VzMuyBul2H+i={v$yTy)cuBiBe*8J=al4?iWdL-dZ@|mwAecfa1Qix zL-BEr{q~=!ih~2~9ZBIf$Yzj^C!27#VuL}n3uiOf258v7z_0^4j5<(ecUa^u``8S- zqa3dYf!H_0YocQdR}WR<;;euBmQ+9gL5PY zGmnm`mf#m73Jz5d8+EvbE8iEcfd<69GnLlBC1UO)yX1}Nf*Fg+UJlPK zW_vl-Ev5?$=85?NgNb6oz|@Et1B2OO$|NPH{qnW$cCf?zA9k+?keGx&&mJVSAm1hjoArX)nCU#GoFIV_06Or&3vNoPB8r$mQcW3zHDHa5xv~{9Q<01gw`yY!tA52_Uql z#^b+u*wwm3E|z}``Bbv-fh$P9_u=S`Lsludv06<&MGNB#YlI7%v{j$^s=e%0e%287 zR+YzfzBf_l#75p*qe~y?lTL^3t?>jeqa7&3K6aGVlXva7R}^Zvtp<)3dmR`L2EiZD z!ib}Br?wPJmWWvyM>{dG zbWSSnP8W$>4&qRJ2#lpB{=*W*@)A=Q5o;$zqfCk@1*445&eTLw;#yMQh>|8 zvD_W~y-m~=`$pA0A-9iGI^ab0%ej4`oCt&oUJgobAaSZlBmYMloRkJ9qieC~dPN#J zB@JGg2B)Iy6{2fN8aXWu%4zT_bVW9S2~1BTuTFzsNrN-cb-L(Unnqre24@Nook~Tg zS*QX~-Lu77?{hCsA-v zronP_RYg}ljhvSTuTO&&=&FmZ-Zb)tGowZ|D(E5pq+(SBZ9E?opF>RN-4p-h<%1OX1s8 zsax>&ysb!QG#Y=OdG)eup}o!+J4$XBGeK=wX&rqa>%E%hsRDsZ_rmG9S3 z=4tZ&Xct6&BDyu#byDsJiD+LIm75h;uiU3ZGRJZ_8mX_N`>`t@nj>Ka@n}~Lwosux z!-nN+uAS&8AD2#0TVDotM(#U3ARSj+dsWvyEIGO*SEIGZzzbbbf(Oc8iF}kRCier} zCCGO3S3y#d4#<5pl?=IWZp7_SrQZwjc4dPUMs{r8`BB%e1Rcm7h|~QbRjj#sL0sW` zT(RiZd{yq!_#Rcl9`t~=CT+&Lq%){#{1`}U@(0i)*ZV;5or)_C(yE-GO2lUJm!jPt{8|bd@sa%h4uTd|7sBIBrIsNSVL6q*Q8#JSZx;a*eUme zIQXbAs!YYTStSV@jQZt1O%N8s%TlEQaB{_fa>c;}97;64!@8vVrWu;NRqE4R+r^T) z#gbx*G^FqWukGWq_P}AUx2; z8UuX(zC9h#U-$5W_#9rU4-ssxH-SBYlR=088vNc@NlmtdIRT`v56M z4;Tw6VlCUD;AN3HP<1HyeFiWp^+)5AZAmggV6Y{ zY++9@X3LV$`6SrgJO=B+R6r(%dO+A;#@xsi4wYxRX*Gkc z{h;?s=Tx{5Lw&FT!cv7d=mRYZU~mYa`zCyV{06+3;(A@^_+IRqq55zM<|dKeQ)ft| zM?L`6hDA%S38E@oQ=S@mF!r^)MTX8}hlInR$mfEafQ&wd%;k?j!S#`}i&hPf(+%14 z-GZ5+cyI|PQ+%*yvTSgGi9;%pj=-{4{G=G9g7Rr>kLyoZMJ2ogi2&vW<=6*%iKpZQ zw-5D$xG`^XS!DTG1Q|T(Hi;hACijtQ!^!Lh-P15u1;Rre>m(mkr9~e2-WualHduff3MVk%xn=z!*&5iExpt zxi&*+9h)DR`^pDA4kG|>*a4K6Sl0-zH*7GPglUk$2+6}bU>c@f$Okc9=>V(({tG?^ zh5*fnCGb~C2u7r!g{SPIp4JYp9W!hZ@nDo?Kn5b(z$~Oep@d`#ASCy7#CpFMwF9RK zdcvW>Rbsjg9r5i?=ebE`_d>Q1xL`3H2UICGA`ZL>(aCSXZVba9b2&gRhqjAkCB23v zK{Zbs63&7jgIIQhPm>{CgFbB^>;Yj$c+MlVeTcdr%l-04Fc@?5w##P(ZPP3?-UG3d zm@rqe3F$-B!Av-0uxJIvj^36l1}CdhuhP-`tx_bYfMFv_NBoWd+^`fv;`?YD$l!WuyAYleSP!L*r%1>dFd0&`9fH%j7Gjvj z2#>G1dcb70V1jMk2L>c7ie3isIEay$4MTtdy|(V`U9cYPoU0cG1oJ^^CPan}!|X|O zAtBfXgeF!4+aQVU2c!FhjE&tOhfjPcY`{i{4@3^deg~_7T~Lnk4jA*{90TUDQBaIXjxWCUh9T=D3LNHHSc z-6v!M%U$+$^Hf(@ono^cMQpID??3I^ajw1lP z`X>kwB8!4@Q{qmRK)NHM1Gt*&5PL>}?r(|?6;BG?|IFaonxc6nZE+AWXoD_e&KU z$PQgSCTVhVSQbT|UfRq9m^30pKjOu2a&Es%K21>q`$RTEYAcQ>X#M4x=A5GUxz zA-RtDlr7yIQpF3xarcby+d^3p8{qY%1n-4WfD>W^dDD}a5#NDmO9!@;Sqz^#Mjiwx zEL3a~#LtZinnY_MjismnV*&k?u|S;LD4G*}!~*OZVgVLGGw$iU!vDypa;A!xKg%k> z;BSry!wD0D!PAM~JwpagnUmOYGeO4pdak{g58p{=P$a=fh!7E6hhWuoV33BeBXE7R zg8W0rRSgR8ZJs1O@0f=%@4>F~{`Pr2G^hpzoi3 zW*{d;TT12?d9d@U^Ztv%1VtU=%usQFjtm_=txw(VFzn#392wx5X(_auB(c9SD7^81 zr~V|$Y}zO@6bPjk2L%{`RDldvxPHpBI${?_pSwsLFp%u;AqUg3+1B)xFVW$EPSl5B z8RGOz84|XKP6xssOt*;x#b{%o1WDveB0)--gjDhNKPo7Qy?|8d{IJ6a4O~lDy?5&q zM_n2rOhAnAHbU~6yj^o`0|JUccWrVX{1NVNjF^Bs9CE1VoE7N$kd%cCLF0Q74Cp+h z9m5i>zj}GYbQ*2kLku4t2tk1-b(aG_c~+t89H@YCokPJ!8c<~OEUmacPKOGz3WN#~ zmIt^tWc_8{xEYuNF`~>Y6)S{aqB|*(Jb@=}O6b6j`>Qi>ts)&jcz`L>2?g$*jt}JZ z#Bv}OT#NL`hl!8tAlLrOI`78^Zjj))QalhU$X3L;)U*}4P{J$_Xy6kmbsCPN=Y$Aw zqwu*Iyud)^A8_rADj&7pxb?{;n(JM#8IGuk8rTAw9agf%OaoQig=f;0U_wY2ifr5- zqNb5_jeyOkyEWYFK^bvUq?3=}^%2+ptjQl+Pqs{Hpfn9~8$LWDcHnx75X5b(;HHKP z+!TH2-W1n}5TS7^CGI>z?|=Q90TuVUnc_;Un{EaTFGl$@GsF%Zq@%=6czI|DdHMa~ z#6nlOLbr10-jJ^UZ6X0Bk>a`#doB&B(s2@bHr@N?8bJvv=%T9-^1?kLJFm!uFKwy& z{%VRNIOS0s5m6RKM!JbDqm(#2&V72IEHWLpP*rXlqF%x3zO~|hLwFt`^lb2>s~E&6 zQV0rFQiYA~w>Yjm6j@w(xIRgFNkRKuS1E!5y)`2lJL$B88*4bQmVXAHEs{J8>OtxP0R} zn&dN_*}|Vl72||>KXnZa`}5+i)IrO_nR_c)h73DYT-!9ujhSGjS#Gw$zBEfUu2mJD ztyu0akh*DFa~<%n0HiJ@Y*#FI7b)Sn4$X4+IGy92aD4mz$acJ75_!^1-_VxxC8g!O z%~Q_r)yw%?c#ms2y~#x{Jf8Eh%&(s@zrHOmq~IloEb+Z|jNU^_eb&vB-~{k$z?=*! z9rRwn9Lu_#RF9E;X$-yww%9jXqDO4G-GDRj-LE7n^e37cm7-4l=U7zx#ta4RYHk;* zBS6W-s$skxXME5>S_ckuGS(>;o->*s@N#RD8Bibj>?}9vw z=&fqJ8SzkshrZumHn?dwUIH3{b|!uR%zDvCZwZ?Hjn3Hvsm^#03~yIhI9@BoH%jy+ zyEl^mWznuNZ~pi}~V03|U(-qKNux4F9Pk4AYEc*ySW`qV5nh6T*> z8)o^8S-xVHZ>vy+vO&(N{q9RSkW;Lf?7N_Y(9C z0bTLaEk50$(_J`SG1FZ#-N4e_E8UaQB_-Vq(mftsz0su?U0l)q65RsPRS#XE&~*u2 zG|(LbT?o)woKDtsR;9xvo$BbcMyDq_<xy{Ooo8qh>g_ ztja5XLyXt|#cz<^tC|IE&G-p&3g<2-nhbVjpzlxDUI|SVmBzDJ|DJA zeM(B|R;%6od%GV!n%smIJ^aqKERFG zgXx5$m~w#DVC>7)gkv?}X93P+=d8tg@B;Oe+uGVhd#mv~_tfvfS-ThlIpBBS?vkn0 zrv|vkP?|U~C&K@w4_LCXg*H|g7{dzdvsvNvELJ#e?2%LBjNkRXj18GI7PJObg4LgI`r%oW(AlZf6(!FFaDvdO;1*r+F_z- zj!OP?ShnR0jOOJfc}tg6O9RyNwZn(-vZx3ifwki{Hs z#%{&aDrjY5?)U-Er|_`15&cp+l-lo$rsXqt;7aDMpTgYJCo{L@%dBV!dewh^mjnAo zYj8sDVy{cE7TGyoDdS6?> z-;%C2t^^|0*oeU?xb?O>>Q_vwZdg@YS6$oI)|l=Ur2k&Rhgp!HN_~2l5d!~*#t7Vn z$OGGfJ2L`z47eWw=UAE=$ygrp(k|dW@AKdJ^k~WK&0G*@DywY`1pU6U7W_aC|I5f5 zg!8tv+!1WOF4$ZbXl!ZLngjkdjJ-uIH#PbL!P=%p&3898*F)OYMa>Ph&Gk*bdTn)` zua&xA*Vq*B`SCvvSQq*NpWE!rnClCa1^s?sbD+}SQs+ac`FCrT&%dg%8CsD-v4^Gd zHnY;=zs^@nbFK9I+AxeQ$ebVa`PWqX{3}}gt7@C;u!w#n^3A=DjTfn!fs&SQ1X~P111hEMgwXv=CHq=CCZ{uzLTK}4|+NP#9)*_LGG}bmX z-s4;7g;whz$2u`v)puKPPA+5H;m2aNB^rC=rsl)mR0O4X}H?Qu4UD2zJ}@*V#BIwdPNLI-n^o5 zC8S*4T-@iap6>>VG&$#R7^0h-U0cr@7QKMoIXQp9a>(CPQ=TSdjTy6Z5fV z=EM(jo%kYcp?@1-KSnm=AJuE|e;=)4ji@bU z3jXV(R;B_Mz_S$8TEL|iJeIO5^j?M5rFa_gOvYFryA$PYpw@^!^?+|@S7Af|dQ}3g zoQ~dY=vRlb0d(5%mu9Ae(=Y5BN!x`GvS@1MEQgGM*!V zsXUB_c;#Kl*hD-87o)5-;d+#hnJ|?W^|fo-V+Y2X4aC2e=C*{j9qmFy)z#<01J!iSjg_Gc;CX?0h_nF@M~tiE~#CU^;)% zImwCsRMHuWU@GbSLok(eR(T#Um3?^fF!sMc|0oGq#QDAm+drBjiU>5`|{J-z;E b{n?sl8=kd2=XkE@Ip=f#((?HyU;O_8Z~a4J literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_SHA384.pyd b/env/Lib/site-packages/Crypto/Hash/_SHA384.pyd new file mode 100644 index 0000000000000000000000000000000000000000..e54bf3882e1d2987ffaf812963f0a5d57b5c5d50 GIT binary patch literal 26624 zcmeHweSB2Kx&PU0Hdz8;7fd85>f$aEi(p`}4I;W55;=#Sl?Xvmkt8G=h~~}Bu7K5R zaEZ-&+*`c0Ev>dcY_-yt>%}*^prsoEB;XrRMbs*(dfSbl2I!SHb?^6i&YA26=*#U- z`^UXqJ}2|cGjrydndfz8&N=vQTrHU;Ny@~POiEG=Dg8YB_pkghNz(8OHVv1a8S>KD zm`QzUY{k<0R!2*qc}bwU(NR;~)YKewEcQDBp(aOtlcRLbJV#@5t$$K(ZjQ@n`s%H9 z(TvBe?}%&Y+Yh~yh5Ylc4%sguJ^SS$`%PRv&4_=q{o00#) z>sBK@`i`E@;r!hCnx#bJ)De}JNmA{-SyIa{$5x55e(6HT(2U_@BpH|sb!^31NbL-_ zVc0sHAxT+`N~Ka92%1d%GNdY0)o&8X>3JQ~cB>>MaL>{P^oMRvDL$!1ugGJ>Sea&3S|xxUYA?_l zr~%A#vK9cfhcTxAJY3J;(2R;MD!^QEy*kRIejZ6GN#*`;Lof2LuFiJBDxL^=dhVfS zNje-FDR*|->yCCNlZQhYhr8s?9rnKO!BU{U96ngsz9%>>{DCQ0NPo@_pD+b;VkBO2 zcxz!-;$NT^2P}y>NNgM8j8n8-l=j(B$=4pYeW#PN1-MC$EOc4rNYG`MBX_zS-s?cX zA!~QJfs&*7i^?gCxt=#sUb#04k<(E?T5zKL#7w3lVgtZKQN=17t^ zxjSij67$M)bRfeWm$ikiB3TQ%ie>FimyEe^co%hTByCB&h`+08o@rjL$7B?F?mMno zZo~cU@w;>Nn?1OtsIn{B6R|8nLEep7950az^NDiDrx}kwfD*VJ5=kUM3C4 zFouGFdynV7R}$j@eX{S|x5Dq461SnY)^*^=dM9R4Csw~GYg^^W0*4y$6hz9wAK7}{ z^~@lTqV;Phz@qj&A=|Y4SDPg1?re9w?<0_imL$W+Ov}n-%`yg~mod#b-KDM~;?9A* zWd7sVfYQDQFc5@QPXUWr(_Y7vCDwkHOs0yymtK?$>C=nCNmJ;nXmkp0bMK+s4q?jW z_D>*HiS3^!lOX&P&;a4!a8W0jUj$TpGH3}W%^}YK#Yt%tgBRn@di~DOkmv#vI8byT zPt;B3f9Ml^IFRLji%PXFV&!sh!{c34xu{F$u-DPgj69+&hx_-+*zIwHK24w*W}y2RYlbX-K9-dgZuDPL5kj zxQGfJ3Lh-qI0|!?$OpeM`0R}&iZ5LZnSZYl=$|7eYo!pnh-GIJ;3&c2IJD1lBjq*A z)09LkE>x$Q9>9e_bz44fkR+noZ8-*rSlMmKKrMn|&<07qHBQQP0;O_yrim!0@LKK< z;Nj5AOou?4WG(kDfy9l+F^mL)kR--cO!MfOcO6ZjN7mj*=Fj^&Zr7eB`|nPcK?kB0 zt}4u7GT%!j80<7TT(P3mSiJ%8&O?{Qd+$)4M`JIqWY06<93|P zKvHD+=q_YuQx=@RxeHw^B%!j);TcbZwiL=r=4&97vcxZ9UN^SjhUBA<1odT=&^oT%N$okEZ1b~0gF7n zCz$7s_sg~?bAxCw^wAXE=C3y$I7<3J?cS>2o1NGIdvsuQVjl7^o8Q&ZnN#8ivJ>Az zX1$3C(mgQ#Gay|s{@>Bt6K&m%daL3gx@lx0pl7lZ^#BeWOuIjEJur#6_+udHezK`@ zq=9@*&{be4=y5V;(9yocRv1B8mb1erO`#ENI%Mr7S?f;bA696*6|M!E>MA!hw}H%e z5K6-8OZW0v@8(8=c1(5d`dj{?y3-?qVWDOvm- z&yj}NO1nBN7m%U99cD0b7pfUEke6uDGea<6et-u)xDU<3H0U}63XZlJ??Z`r${JEDYE~8WV>Yr{+f8k(S?m@1U?vlSPtMGJ7c~ZC8=(jWhF!{v|O&qeFgxW z?`@Qrcdnd9F?pymHWU`5Zko3sRH07TV*8ahPmW$&Ae(mq%#x$S+SKTS+Zay+?+FB1y>$-{F$(A}9xPnT0fi#k8PHpYQwL zNOWUr-Fq-ELsz{O%9g*G|5LJpJe7K#wiyorBi;8#Fmu(O(6GcOxB>q3VRWLyC}wIeymawKz*1rx6c62w`ue@G zUF5uP1e&tR{A;cTr2Q~~YI5{D`%nni0ArZUzk*88&K1H4MtixGE2Rt}bUm7&0y2fv zCIJreGiGkOy@v{6EX(CcJCUJ|Fwz@aLsZ#Uw0BTHyU6Ei`W^7)Rq*iT`H)_u6(q9B zd>h(CAV6LQ?U6Hymi5~n#DtW(WFq<_aGI!jV7CB5`i5cVtx?&W_zk=9hh4icSMtiO zG-WqKpa)_iha5fcn?AaIMjMg?BN1)U1i%E!G?Z|cRi3^-(5GuE>8H*I)_B}|lYVc- zDfbThGxu(R02A$$*|59g?j5!tmuj75?zc*{7rZO`n}YrZ@5)27{7s=!ZI9OP-r%S^#n-R}4Nx{My zRaqHQaO{j0O3tyYKmI9VD#tRHywL=gOYWCM2v zfEaANR2r2}PVc|-BI+xsmueKXWD450P45k9%_$q&iF=3OmTpV3MtA9@k=#!K+BW?$ z&lY}w8j9rp5s2rq07uU2)yvLpU4a^7T3gY)X_wsju0tNWOU9@i6g)UC?7F-wbiHPI z3bn&M)^5uq0L;{Y<^KWle$wR#7KeKr-Ifky!D1BL3n+$KyGot=N~iA&6>v+g-Ya@T zx#8R;DD_0nyBP^_wvdA1AR_7TR(mDcJ9d2l-0^9aa#VurAoRd5Ij&?T&Gah%doTa( z;J+X6-(T|IC;0Dr{`(aFeU<-i;J-2aU2p0Fjjk6x^AQiv_g?3{p7c9LLc8*y%$i|Y zw-}vtBYtjofM2rq>^dYq?KtZkqC!>s0P(ZjzQeXkBeiSaV_S6(09qccTEPH{TBs=U zu*<`Z{%|cd1+zUNI@2m^0v%-v?iUkR9Ht&@r5<=Iy^FjzS1v-JF#XAWm|I)>In>tl zr}RuNG87t7;QdNsWU-!$M*C3}$bmZB=m?FZI1LGk~6)@$5X;Guk5^WSz$xZC)x@@F{%2K`0D2VRv#&`HD?xww|I`&n(w#JeRZ~v zdw{)rSg|+i?bplCRH$|S`Ld|j?hSW({%i$pi-M3QtZk8}Z*LuIyTBuf{TkZ_y%)(X zB}K!ExkBK83X|wi!Us*Z_JyA&ld7hxE>`C|5rCXJM>{Zieec3@#jt3U`W{CWwxWHH z0U9uz1^{anyc3oxxXflZ*+yj8?9j&%v!Rn-=v9dWd8pNF=p<=Ci3fQo)$DTQp;IMQ za`=Nbz%6pPugx6?rr%~CVjGbI9NH;CI||yFjdr|frv&XNXlFLs@uHoQ@-AWEMSoTv zEFk74q+K`byG;;CVdi^jgOcrfOl3 z6uqE3G)xJ-K<{9Tc$Bb4JW5z29wn?1k5WcYJuqQyZ6E*?4;H9?8Y+*|iW|_72My6M zWi&*?^q?VcISD>dAw+p%0@@%amm0+lY5hBuJ+x-#?U@$LNZ~?bMi89IQFZ~Ub{MN^ zb$S-I>zLaUp&;4=aS(|Pdkudj*`)PQGL4hgJUJXDD{2bm#Bz}KeM)m1`K4%?YBq(3>>mKNRB618fti6y(l8Nb# zJV$p3u+Pc3$Y$y6oY1!rVUpXijoRkfkf%+F9Rm3F9^zG7s@qRBi*IB5-hU)A8% z5yjEo+ObQiBRW?}5>-eOlxa^Nm>AyL;$5WgoamxRF*3G88e9(E#l^Z>);@s9ag_5% zbep4`cL1#17{}i>T!C{2T*})wphh(Jr%00dp39NJj&PBRrFt7AfKD2Mg zf!f%HKNEJ!(_gmDd>H{2%h}FSty|V!OBAF3CYCYT{INXY=t_p^j?iS1!WAUZD|zb4 zFUl*QBQ6kF8hJVnh&-jP1i`WBMM$Grpl<2gw+peZ(HitU+(liiB?tCXaOz=Ol~#6| zw(^>|Fqwb;G*)u#6ATJgQGZ}BknGtIvS+*;-PDVIVI@G*)K^T?M2nx$&=`aUK^rap z-8b=JVudXCYUNN#lMEo`lqziueBKdn;1Wv`3GogNME>i{I1QeZ|3G^fDK6lh6- z*=W>Gjb^2AX{`=PVRKU8&=fc<1?Hwe8(J-(R)?o>*aj6dJR$|+)Jb5^N`ZMPFh2#3 z!a9>%Jv)UfNP(kM;5jMq+!Qz_1)i4zFF>os)YtP?IIMjk+CA{&ru z-l3Ym)q>Vl^PHF(nbWUom2I+ieZr@8DV;r7eNsbbQM0>yRCk=)Phxrytep3MzHFAN^y#?J{P5q({pcNhx`hfB5L?zAkt6jP$vYY#HSNE1ulSs=;4YL5M_!>&=DK3Vxs|8&F!kDbL~$RIV;4SA z`1+bl?<>J(CYIi^xxxi9-MDWI27`0g^JVj4zUjB}Q5sH@JGxi_y9b)16i zQJgqELm#@8*z<2iRzCLDvwhqLj2nH}W(?LxeRz%fK$9#8qLc=ViQl3)w~+=2x+MOp z8M2CKir8z6S|(!M=p!|O4wNgg;%DU}H*L9Y8p!z|PG$_^ZN=R0bHCwpz6t5pX+qw6 z#?Xul(w#o-_XE_oDb7ySy^lFSqb81k`#$%3I%>Gj`JX)My+m{YalrXHiqH`yHhPz3 z!=Vv}Za~wJXC5_mg9w7F3NejFy>oyxWWEA%nHYK}WJ7iL>ih!nUMBu7%?WW$3|#?5 z5U*mgTa?&Cb4LHiqxW3(Z%nouWI+?!0+y+W_~0jq-A8=xKj~U9#^*eQstOKJnAgw@ zW9qx06{@>m=N0JpGyTKJGsgsd*~x=ns`E;T6|8K0^R-1|et|}PW}>fX+XsdXM()*z zy~7x`itxaWDn!<&eM)z^KleFLqIZgOyXuB`5RX9rm})+OE+Eg`6Z9459vXiO^~(wF zk(&F!P$){QB*XlZH9ybZCv+IJJI`kZ!VGk)kueClS*4kY*f{oS=q{_W4|)XkBi=!~ zNo_|$_n^(pxXw{R+g?MV&j};jGu#IaMgL;=KlT1Ko7*S5s9VdS1BQ;rHaEAFnQjV? zcom7A%0{rY!&_57W+;353(ng5XD>O|+ zk$S*ZMz?@6I^u>nu$57qpD{g%yl*udnOD&vR=4KJHx37H-W))aWOoMW!#b+wOcy(| zE$4IZz%9@rrV3Gqsgz07NIOTuF4B0-Cr(o}GDnbh-x|ZxXh(>S`=|n+5qt8xXHWn9 z$#>dF*mbG$nYa32DalZgroTj8W6CUHMn}nJ2_=JlV#=o}87sMOt1JVIs?&clgXougR2S+>*E8|)e6PoQ4Db*x=* zCl6j!|4Q)CL!I3IfOXkRtO8Y4`yAS!{)6({!0L`_{v)lGMcZ%s%v^&iaSm2e*8fWYTkR1A_iK3A*+!7^gb_;B)_;d23%j;YipSBK?ZNoq;T= z-GF@QY6*8utYd{_-G%)ipU>KY7We83J12UHU5P#W+Wp%0A9mWH+i*?rKBTaq2#rM> zKIiZB+jP2^WM7>4r7LS1U3QzyIP{2O2Ds{!=S*ocjcLGlk-Cr#7Uq*2mf44q5=)er z|KVRQd-0KBIQdICEmS5&X5BTM*4xw@_gjkdcWfJ>%jw>m4IXJRiv=c>R2Xxkt1#Fh zIJ*nZgE|xoWlXrC`I5egWP#u`Xs9qLP_2_>L7bnF_tPDwE+1oP)`d2mL60fEOV{JA z@PMf?>q?9c!o@%q!tejI>F2A0vw6&{O}hH>n1%Yv&~*@GU1zbNF{d<|0-e82kC`e2 z4snXF7h_anub%Y&`gcEDzmQZn%~$S&>XO?jx~c+eKX=RqAIx5T6I! z^r?D`xX_mndi?CX5#cvGj^Beat*m?Gr9;H(9K?$H5dUjT(B-VkUcU+!OE6c40_(mQ3eFtR zy)oiU!$3K4E(ra3fB46nyYIf?H!R~cN0?ln70f|nBpGQ|7{$FZqNDE_%r%r!w2&s6B_+4GsF1@=mi|K;hBlN za0hU{yktZKKz?GR61!-}MGK3^o;!(MqwvuvO8MjBGy7xm#S~f(_+t^wvp;6Kc*0X0-!4FGk~5F`@<6- z&YHAi%SzTJTDZaslL>Zz!of3ZlWyV>g9vS6#NXqOmkK?ee%U>F?Uw{-)&7P0V`k+E zV%O>ZxP?3eEqr##v0C#X^^wGHeoV7H;ECO@z)S1?n0K=DpqjXXB{9hi42mH}P+dmR zB*6W!2A|O%Kk&U{yIQob8%v~=DlZrQ*i=;V{MYNw`Lg_Rw2jVK{zopELaJSG%*-o& zh0I=GcxE!K>5F7?$z-OKSkdN+te;%lfXEPY0!fZ`fcQVqD@RuoJ;N~<%>8)p=)}Jv zsAk7Z+~dumzt1t#A{oO*z=px;vC?qbm%>O9otUF*c%l<=ex4LzZYJbiT?w9 zb960_9D%_$CH57^ZP&M7HvCUaoDB*F&sOZqaL>Z%!XZc3p%<8CB^KNL+?1BJx7eZF z;0Mu07hsabirI+qznB$sln&s9lkVBICw6w{qocnhCq3^M-fOK5U&BER)~{!B(os5) zPxsSs7Z|_r(U^t-?@X&_^3E_KyqAq#U0N{f<`&^So)djpiGBaTzwTF;$>TYK=4CpT z&$M*bwT_n0x(PBz<4l^uJ}`w$o6h&tUKp3KGE)WNwoY3*(`Fb+`5BhZQ9h+Ly!ATy zq2E9I{Q9YA_Ec{TSB-5i+VavPV14O~HFY{w*y4S z^sq70$jtD98J&Z7j`GPdcFQQ1MgXC!+u$Hv4^pt1j5*;AMfT-?hr^E2xwY`wFWl#S zZ}!jkUBe^)(tI}fYL502Q|WvGb{5_n`rZ0o&9XT=AHE61Dcq)jfx}&h8SQ%*3utVF zfh~p~M>#FvQx5!pvW$)6;rAKcHb45A z=D9z8>f@U~y60O#8a8`w-uym+RW_E%^ z@}5wn0qI!bQ? z>zb^@3eT@`o=9$94dNsi1J>++aVNzlf<^|q6y!#K(5W?uBdobN;&+oiUVK zKTO0SsuRa9=urky|!_dlv?bPJ5VV~;L`)28Ea@dr`A>M46E%;&7%T8U(%0FBYm|UEhnzC z+g7x?+r-eT3 zQBpUt*@~BE4POe)*Ehp|x~AvH+wQ8#LCa$C%}agurC59$8xCnZ-)iI%*FX@{2{Bd4 zI&R|wH!`jIrtK9tIix)`Ogud!L5{~9t)da@emwR>?(xl;Ypxmrk4{b;n0tEgJq(8b0DF^5f^u8gtfV zLvK45l*J01`m66cV7m0v77fcCJPtF=<#Qv%;*e2wo&-+OcIcd-CC8v+YvwP`5>Gj{ zrnNVA9-=Kg2YjFgv8MsUapU=uRd;1N|7+P>g>dRV=V9!P`?PJU=5m3TSd#NARzBMH z=XJPh#*ra*&cGe)d!5OtLwpbMjNmy9ggvC~-~xHL=Qr=j&)NK=D>x41%{5Y8b96aP zj&DSFt#mo2fbNq>L9>VrJ+q@zbMOd^&L}L6`M`Bxb%;k) z^paEn(Z-WNM|z+De)GmD>>VDxf}WvWoJ4>ZiklWlit}@F!gd|9bMab14M=0BO8Kp94AwBgz4g?6Du8Qy+3xA;PQF4n zxHNvI#ET~8#c(?ABS9avVAkyWN0&CB59`nea^-kJ*g;+7*p;lPPTOaiV%xAj693?n1@ALoqGG1~@dxZ~%BZh5N(e1T_Z7vt>xI8Ne)uZ0=G3nd3^t<})oT zRwBS@(e*S3cs_f|^LVx``ml>0pH;M*iWRNZqiEld741F!%`opmdZF@^pKN-7SDn*1 zX|>DYm9+3Pg|hg;<}P~YE&bOqJDZ=+PgpxjvUJGaQDV`df{v0*9ddM(n02VIqa;Iz zitto_aYu=%BPXrNQ`@&~pjS9{m)bU%l+IqOIy4SUMhY~iz)af)#X?zSXdLg@Pr*Mb&6Dd>Gb_dV$Y96bvrabc~p19d9*Yv2)-AYZ@Y%R|& zYcuV5bsvfKlTwq}7948ZfHd6OubS~pCCKc!sqYYdm|$Eh-ug%l$AD7L-zR%dD+D5~ zeB359;F(Wvq2o<}`+OdH3jF52jxBh%C=KmM+z!lYE~K|Ajrw}!*W;pc5x-4f((oFt z>MEr7Ox0*^13Ic|Wi8tLrLyL$N)(_vwNwQ8MoaYmfyupNWw8a{8?rsvsdWO6>HFso zGtk?pPs5*#bU0q#r{9G}dZm#rG}0C$U1g+ejdYWd?lRIp7^#F$7SL~ukxnsE9VEvXT0{*!eagVEGg48y$H?~@>0u)sX57EY$j>(Ni;Z-r4GE&hlRB+HwJUT3%R~FCUisxX(qp9KvQSr#8cyv-cwkRGL z6b}@NNAAR@xI7>VyB_47T&l`z{d&JW=;=vd30E&1hL_Cur9*q#sP>81t z#1X$Za2Lnl;xJmA3yT9>alR@JGsQWhIOr2+ZsHJ397c(wAaQ0Sj#I=*hB#Lc`}<;( zU2J)ay=k#yEVgIG?y1OZBoX@)VxK@P*~QvgtboPZ zQmp*MT1zZ_#1ce={vzrY5xR&}MZ_qgHxWOH07twsB;FPh?+J-Fgv7f+;;kU@K9G15 zh~!#)YmXe6hhA8!!!;Kde#NAGU&7DYrt83e@g`0fRN1JKpTmb>UWNy5A412|VvkyHoVzYXA)^Y{Pk zn4d}Y^bJ4PWz;C7_cJ!8m*k2C>W_oWk|W2jjTj zfcqc0U)uMN01yy_>uYe=$4GO6cXcieGD?P3f=@+>vR z5eU|}6fd_Y55nEZ?UkqFD)f2_2I2I2Rlqs$`s<|S`u7jT_a7(u^*ntCvYh&=&+Qa| zQVh6)6wXb!UBIRBU@GCrr|+XN==TyEF-Xue0jD=FIaXpk_>{+_#jUMezFPmzr1&1A zw3gY6dBArUT}6|rP8D!#kQzAd6XE~T4AGK!3T7nyQ1rQE4Qq}(Z4 z83oZlvj(ZB3sG>jl3!L7E`_jbGJ##H^rnZ zs>Ab+bj;+~7o$M-gG(eE3a$9;db&R|-jM`uXc zCZ|OGqt1^m$drccvGmfM60Qp9v93(X70j2c6?xJwrzBnGlBBgvAM}79jDwD()#16S z7=pR767QrKOsgkkNbq}fajrBTA4nZvTP2O3 zS}Bb;EszTOAy>Ob#_ix6jbS~y$-FMYVDO^=kJ69u0UqKPsPeTA`K`EmajmX^jlK|S zJRXY}q-&89kEkyvP(B29ARm_t*A!e=;#z=@wr?T}+fDncCxLLq0i*K#1n^L*7vAU+Ry0x`FRVzfF zclDzeWT(XMYKpz9(060aLHO?i*9P43G+YBDc^z=8({T3#7YFXo{rzkX(nu389lJp*xTvs0mhN>IpH!Z7g zs>N(u<~J>^ZmMnY*Oo1>@wZU*tLqzr{s6xCX_@B_{@G$j#w>quW+)KwHwDWB%{6|w znjcvz{DH>$CP;-qN%xzTRwL6KxY}P$eJv08TXCB-H}l$%Kd_?QAE;{%G*&m&U=$$? zBOr}Ey>+El8kdHxSQ_wG*8(XWA?&;rtwDcdMSY{++lm4B36jQ{O6yyz7bC}&)%wMO z>cEPb)eQ};QnQ&fq`tbL{to{<6;dsuIjJ4JE%h%BEm`6Zh+clZZ0s(Z?$+6KtE zRjRCh-qn*zLtV3{nAxZ zWvhQ_WgRoDlDbzQLCKry>X%@aE1RkVf#zkE)qy3q;|m1(o%(=9k-zEo`apA2BQZmo zI;ci-lYcoZNRXzFK9pVQU+%971^tzS3eE;SswYX+E%g%{TPH%SCe{Rk6PNm{TP8M4 zo;Z1;F~(EM@zwAkSd?Dn3YwuBSg3#gzMvU+>1M~Cx>u7)@r6_2i^vbbO-YwJ3_{2B zl$=Ohvj1*QbCf{Q=_XTEGt@bs!dAM{5?O*CH0qM4U+9#D4(SRKO1+?ZLGG zFs0wm!{<`~Ka4atAK&W$Y)AS$t}4Kk?#D&6hMg@*<8cvOh_u>(Ymu%oU`qepfY&44 zXTX$xXu$nQ%klZ+2hl#IZMdjSN*^>}O5ess{p>}0E8=c~8<1v>MhckHLR^HW^jZV1 zMf$`!mB=XMm@4GUCM-0aH2) zS1;i7c$VVdYY|&Yh$$&ufU5v7rH#0X08uOg159ylCoY=* ztw=Ap4zV5Z*iV&8aP7nVU{96TeZ7Eb{YUGxwWv=it>Fl!l-6GaQ%Y;4OHiNEW4P`I z{LkMPH3KHzUw|sK4Q`Rv5d0yb)A0BrIR94>Rcj#=gKy&86;~~9Y;fG}540j|oH2g# zq@wW-e^X6!ZGF>{8RO?yTs^UPyrVUU__DeIVZ)5^EBvkFuevff=L(8#8W%ULaG-&v z)*0hNfu_q_YnJ*Ot6L{F*4G4@Tbt{G6Kk3qFRyNGoOJu-@eYKC^>tXHT$fIbnz~f9 zh0GcH_8e^|5u2t$!@`Sp2d7)ARGceDMDb D$^s7Z literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_SHA512.pyd b/env/Lib/site-packages/Crypto/Hash/_SHA512.pyd new file mode 100644 index 0000000000000000000000000000000000000000..ba6919adc7748187fdac08321e3245668e093fd2 GIT binary patch literal 26624 zcmeHw3w%>mw)alclu#%Mk3vz^un?Rn0-BUo&S6fZLJ>xhwzMf!`yx#R936_G z-o|6@;EXdDN1Z_(l{-3Kd|`YHwiIYVc@82z>4?_}54FG@0YUTq*E;7k;W5uo=l6a0 z-t>2}*Is*{z4qSgx%WOf_1(Ban1v9jxZ-gkqDblE;qSllWfG###jo@cFZA3xENW7= z4l7zvS?{O|)Xoo-Ry)c{YieqPj(L7ZprOW5S>wo`Hp5X}Tkap7k&*7wo4$5KMI_~M z>xahG>w^bBOhf*)-97gSqyxA0+-KtQo*%jxpRuner@i;3bJ~l`fAO~1XGZ=PZ(H^B z_=j3Po%7Qx%N7ugvqw}|AVm4yX=35i!%B>@E^&#YS4y8DLIUOz4O`S7sh#0Q3|oU! zgh*plA{8w_&}8D1B1%wIyU9q-&1)EYl@Kx9a{_5O-Mk7gr7MKk06N{DHA2wbb)nLb zZsfUu-7iEgcT_Jc0iERhS+|T1`WFX*TRa6{)MXmGJ`OO#`gkBdqqS-xy9WvyanTs&;!65>glHLEr&Z)JVysNFB&iZW549Jl50nAsIavjO z+QS?qe>_~zUsa2WE-JuWalH!4Bt9M?@)EiK+0dK(si?4ByqqUOnvij%R*0hw{iN1b zd&TkAc>HKX%F#Bdb+i4zzrj+Vz7#r~-Lxw>K6KC&%%-n_p)XCr^eBm!6xxv87W)y@ z;*cda4T)`S3*%&U2c-vWsN`#EvHh%-vst)F3eRy_rEt(?m%?|r9Nrs1z#*we74?XF zbL=xTvb@E%yaPAJ8Z1KGv$r|l;Fc*8ZZEGt@4_e!B$5p+pvN9EsT-v5Y=;u|WQ7aCAIW;dEM|~LR=d>-99#0hgTXInbq)#gf#Z3+0MWbVJn|l}Cb_7!{HGKi8if#Hj z9tYvaKm&w>eT+Ks%p9Pa;z3I&Zf@{&QyiT{F?bp7tkmvo=oy)90ta#q4KV7)Ge7ki zeK?fneveAkHe%&saKq!BTRgYT;2X;`G0!2ZC)9T(^$3)t!4i8Alc1>W>hC2IkZvIW z|BDa&7+uHxo*9|~yyM;+&-@{btE9dW&%6adJTu7Ij#ql73eqdbO;UXL0>XtA=uqfz z?#qKPXR%E18-wrhGKu2W%OLantAYLva*~=4p$l8K)BuhU9BP5~S#G4fYI&ZLu*HSy zRMP`E8>n{6*HuCg)piT{C}L&1B?Ywzib5NNbZZMK*Ow@j+EYzLIe}L*esL@CVI|dJ zAWf2*ai@XAjVCdT7=n;E#+6I+=$UZ?O`u0o--&0=_&#n|pC|k8ju$`&B1Nte%was! zOC=cWcqvrw%0(9x*8)jJsiJn!wB&$&gsh`XwtF+Fl_Q?n54XkJL-CY5o+aiL&%Mdu zonxk?9@bc3`S=zz{_5CA_lO$<#&qa7=>xTUt9EaX*jm`5LxW>8kcZj)xrR<1+j6K!?8nHgF)=~9 z2gZLAqzlIXXIgtkTT4-Ic}oM`)Grp$GCg9I01h2ax<57xnAmiD=}5YtY^oHlB3~19 zW$6lfl8hO2^gwI_j36w_z|bjELti!>lDbt=+vAx>Wg2ghYqqMm3U$q`BJ&-DlCb*H zy*$>X+(^)l$qvFc(|``V4qR-D*19G35?95<>f`*ao^J0i@*6*(lA?o zTeIb2GW55>494z6HGKvK#Okz6Pt2De;Gu)}pjns(O@~0i(KzX2=&(@VX_y~6X;o_XoXl=|ufc_0ZbNBo5bnj$sn>fb--GkP(U@g-g{{`IkR~A15kF>IYIztHAd62R zp)94y{s)pxmL>Q$@r;sY8M)r}l6QaBcr^Q`7pWkO2;#&CW1p?&UM5*!ps%7D=AE^#M8>5$7bq$w<>S#8>U-}6qa9aHPx zg?Z_<{Jn-A(!XRrK~|8bQmfNA=|Nz`J?{ilm+xxm9s2?|z@L7O#27HiSj4gs)!IVC zAE(-FEORrZ$kQ&*DGYJhVG2;xcQi|VEA9owRP~Lmm%R*F#5O|l(Cvt?%NyN6&ih87 zDI3qc?pi?V!!cBoB0t-ULbwJP!+7RIDnUCF4I>!o&gelK5r!@I1jCg`v7E;};CU(=AU?ozwlo4t|Arh;A_ zwnx3oj#k$A<#4L2pw||(ld7J?%v-}nuHFrWQYho6<(RPHUDw2IlUmYDBA7j?BrQb* zhfS)ZI!Gq(HwyWA2W~r9vP&?FN zZMQrEz)THTeg(+;MVlj-8|rYhTbhvti&1blpeSnX$an6|pRl(fi(7JaPV8*R2xZJi zsV6-2W+cSfYzl^hh@?Xs?8Rj7*!2N$w~V(Gq7qyOp}Rlv;dzs3rkC^g-Td9m-v{~o zY5snOzt`~hbNs!Vzt{426yIx1ZJ^Qire`MN;hEkWyt7EZSCh~#J3N~;!?JoFI_XCI z+(ePDq`pX<^Qk9U=MWVt>OsWMQqyMJa+TDsX_syJT>xl#w0sEzD5|5P@I$UbZuH?b z)D+D2$jD@?q#Ec5Q*gf-n|PFZuz`BuE%wg!-dsEvfx?7m_hN2sO&3sG6Q0vD8OTs* zM1lAA*iUk`TqM$kszAmXfsx<`ZFjesQ5OESOG2wkq%2;MSTA~Qb{Jy>HR{&@ud~aM zyBmpAUlX#|I&6JqJK=L|eZ3CCc!0?vOd&8H!ju40NSHb$@*>ejL5UOzb2E12&P~_fT`l7s`<)jV?D@K(`gDKih%k$i5IoA2orrfZHGhH$oV0g)oJB zrwU5we!|rKL%pG9sD));Ql|nAZ?u%y7Rmgogp!O+NaC3#Ubq3Rk65}azXMfQyxfRy zb^XXKvbsl_u-!I!yZR31w@@;7nO!s?d z1P&=Mi4Hk**ko&(b2=VZR84iUI^Ti-at ztX1$1SgPP6o84sVn_{y=ANx*)PI{qNc@E^ER#Tyqqyc#z*V1?Eew*P7j%b) zDbNe_4#tQ_32Vfogf-$(!W!`?CG^w-6V})W0zmO#f$E~6@;I%y0S$T35Dim8Lo`ee z8uAvB;1d->6vjrP4RUgcQT!mOe`m9Y*37&;Q->MJo}!Vqq$>1XE%pv4@puDgrhk`8s44H zY7V`SoVPcJUQEtgn?q}nx2@#{nnN!kBdOXvBomJU->kmOrEh>7PZZOCg>eP~^j`&{ zy8`IHmYfIu-zDcke;x9mPc1fw$W+DV!Tw6BHqi-xtEjyqsb8U7ixB?e+yOa-pfEtmECy>N5Jy#)v9pM}WOZ8kf;&-8Xf%}WZe5gNd0qW&O ze2v^9P1t6eybS>s%h{HEwOvx*ibb!0__B;i=Fg>($CoiocQlM4DLhFMy^N=x{Gzn% zRpJ7H`QhhV0FkHElpr_^y=YKr7N}d=_U#<3Yt$-j4|i@GYssN~6r6h4Rwb34qpiFq z&W>kZG@g|l`vhkMtEiu}-k}YOlVsNVuvw$6LtfUr(uqD|RX|hrK`u>M4=jWl>`|yT z7}s6Tjh(NRWb?w!EmRV2X6*_$vlxd?4GG?kaL*CE3j1=I6Y18>fk+h9GKiCmEKV{9 zb29V-B$7JxLQWeY7uG!Maca%sF3N!Mgg8TwyPr$prXi0z8!f zPba{y5?~yyTB+5qyK&ZTNHQhBlmuu_fT;=4k^p<4Q9Cu7mcS*o+B1PoPk_A=VDAK& zkpOLIHH%v9lfYpc)|lbG2@t1V2DX0!9FPDr6W}1MbGg-l30zhJ9Gn0zNPrh6z#$26 zXac+#t>#i+FG}DX32;~fyd(iS6QC;r4%b19K8ISo6w(Bb;qfG@#1i0v1b8q39!h|R z6X2&hI5m8KZVk4pKb@WwwPsg z3l0;la_i>)in~K@J(T8iqinsjvqM52Kx?cAAjP~{G5@#@tt;kfQ6)UBOHqp(C3RNJ zr?$zh9a#NRL;b1QogIq1h1-wy03=Z(EoL+#wH{29+*nMtDrR`d7PDfWf{HtRW^7h$ z=`Xo=DB)QrB(=ENhZwuHqdy!1^-khOsXwg;(q#8e%4^-C4w`pz^X6GZRx!UTgNhQq z*ymo4?yaDHQ3ucp4+(w1c($MeAihH}w<+N%Ryp(;X6!>)7)7mc$kDC-&z<+>e{Z3p zUM;J;6}2W?RyQc>Z8@@vGq@jFrQ_(sT)XVtrl@nVi~6?I+R@MFMzVg8&%B8SqHgy& zx8p99^g&*xQ0j(U*|~wI^(gh)c}#X6k)oG8ll}I}EAGj|W-6BclDWtQGVQo;2nK_5 z--{&kQNHQ-(s3NGp`>N7?0kz+ZIT&gsH30a+#}OmwGQ%OdWdnDAtDTDgJgzvl$~wp zhJ+mknlEY_)3c45m(6=ok9dGWw2cfVSmA2e$KzHtoP={SYW4UoYS_7(8ukz=XPX?o zVuJ7R_ERk{f{M?K!R}M?*ZhRZwu3Ba zLR-Kx1raCw1hM;=&;4JT77Xz@kD#iI103cxG((^IHfV+7?$US#`dv)_DDuoPgTCbC z!7tEwB}cQCRlobz+#ye)QJDs1nTgoA_NnMDtFjMz1ob1{LAyz9`$6}h&CIygL58-yg+iYbMz*7m z4;qU8Melv?P~8<~MH zF_3%B02&O({6WX}f*Fu>%o0(faTCvUinK-ot2+9~(T`q;ztwhU@0r}=Qy|`~v5Xrf zaVH8ado1Xr0wfXOXPb=vfvD_$k1CTkLZr6SG!aGW0b3c(0!rwJ8{)uLMs|M1^dR!S z6>Ma7qeHB2wc#Hg4c@#ifF{ZAbkm1*RLrR^c4q4@;@*K;D5B2gpl><7C@ z<28SIj;fJ4g0%Zq>Xt@5Ms(cAW%!Kfvp*j=;p=BVY$RdVq{?UB;De*oZ&k5ke>rJl#scC>Z!+Rn2P%g zpcLmZGLvu~PSk20#OlqmouuAi&l&y%>g8L_+68xV|D~1NgGU~0<@UR+%N}ACs4D8$ z&|XbMhA7TB_m4dl<4D8V(h|!^x0wg5+ZZ;~8@^dX^&y|8$(-4@25yz4Cc10tkzck} zKKL=X<-;VCo`V|@^tVaS)sMh9#rcWP{Ri?T&@I?Zk{fmuLkxw5(!eU|6_Rxq_Je#rYYST3 zqbcmP$mMo9`r=#ns+%5ewL!Pxn&5p%VL=fZi#B}D_qE$Jx|n2N3-L=+)+D;@Hkong z5ycE})d|m;&}JG_x9=i#AscL%PjXmhA4W3WDB6%R@&~n-9~eaz2u_2B z3X=lWIz<-5`4xFT&0%WtF@$E_(5Ca~F~xVudb|N1FfnFLiP1s07|25C<0ooNOhBZ zRsV#8}ss+`hY0=_75-YY{)D=>tF6IOAx!X;jt;dK9eKA9iU!2i5^ltOX+tB9O zUK&i(y)@LB%H3Z>m30NCz|)xbP0;4E^;q-K#H2Hg$Xg)JeT3?|DbOQF4F*S_+E?+} z^y{x)NiN!qF2V-1vlnI!MgR>LtVs@JkOEv%=cIs}%>?s{{4;rCO?wSze9f>^D?3J* z*PN}&Cp1--qV};bjH=!=W;L+_!Ixg=Abc>j3le^f9;J)ui2z_$KK=;?bHv_ru9EwJ?U7lPNGa=&`p{BFf4%X zo2ZbKoW=MQb<_D38F{e4hy(tTKgJl%zATOaU_oFaKzaC+=|7!v-;-E^p>gs}y5Z6t zumbfWX@^F`Cmho#A7{$&ilDreM1tp)W?!=)f6*U141ZjEm$~!$vMp&$ndMqDnUITb z#~)`KdR$l2Cw=~9mkl7ww9tY1To1K{4jXE#$J->7Sm+qq3vQ7wHjD^Z_qY4w9K#=< zzGBz4P3bRet%59m_qAre{2Nv?2p;|^-K20pMGb7VZz9Qz4^YIe-TJ>DGp#~d>)k}+%qY#5vt zEA^p$DU1}+i8->8N6NlAYX5itOjp_id*G#+_&>onM^^F35g2Tgqu+5XoYiz?pZ{Xw zY)~+GwqoCgdp3+N9CBnedVyJ%qtTtOj;&jDiyg`heh_VR0Vdg4G3zn@H?v}n&;tO5 zlkV8DD>|_C*Mq+$Cq3gS@AcNI@8KW@>(}!+=?Fa#knE@7E--$>M`Iegy)&(z$veY{ z@Lo1{b!ow@nOlVScusYj9DU%>zwT2ONh3Id=4CpT&$M*bw2qd~nh7#TT9`D2eP9Zi zHjVFzy)Z6eWu^*-+d60IOq*dO<>y&CNBCe^_tvYW2mkQmYiq`#*|WVhTs5}6Xv<5B zfVHJF*3`*VVT<=_ewy~fG%HH6c2c}g-dc{9-MD$<^Sy_RCo{tfW^@kTIl_n4*e#=2 z8Uci+Zf6GJT9AUxWXuU~D6%jA2OM^Uj>!$5{l-1s{ZoIl_c|W=x8}3KS97G3m`djh zu(R;q(C_;FWs9b5dFUn(r*NAB1`c;2X0-32FQBmz2Da#a9ObltPdMak6Zfc|*?r=9TpM!|(IDZGQAL$#b86?(>^JyX(h68a8`w-u(U& zt86TzHUDj_qPNou8i6rM7DZ9;0IX+!v-1u-)%D1L#U0Bv2jA_yBPWO@nzThrt{61A zw(Rl6JQLsidOE_-t)16@Ym%*PGj~Csc~^}E8`h!Qe`6(0M>=e-u+c%hf@#t8n|A3m zR3F+GgUUw_k1xN#n$mtR1kXy(z_6x+*Z-7Nb;J&mnkLK9?2F2rU&hz10C5tGZfo{; z`ETCjCf^OeO-o&lTy(FEZ85`p z>AB;Nom|Hw|CaqXZM#tXlFj2KVUnXGX9V^iUA&Me|5H}j{GhtwzaJ{S^x(FuN6@y& zxtrePCdh@bPX9xT>>le|hP6lUhHLzD{yP^lp)0p`n21AECyrgnd0d1&Zo_LE)4#A27+fkIIN zpC0(kSVPk}wWea{S#7^kJ1Fp*`CZsF(pKA%LJ~Q4+p_u|37-2MqU`*D!X-Whr3E(* zKlKGS>^o@$7{{fC3&$xaulV&fH=S8y;jg%j(?TEiD5;y+Y{idvbzch2*EYkSSlRLD z#yiW>(Xz4l=A}OSQY^mp4Tq$iZzX)Wt2>Bkg_tU29XIlU8<|#Z({>_G4rxyf6Hm`b zkmE5&N@&EIACEqhadKVi%4_<BUA zxn6O{$UMWg`HTk@kU#5Ki-x~|hL3oP{P=}ahV;L(*TM@y*;s*7f3;l)OqX`rqGGv& z$6wHcf;(LT= z1kY(8>>+Ij7s$iCwr+D~`nq3E2gc~-6xTPW)T}Yrbfo4;}IB*QCJ%Dfou2b5Ra(neg;+9l|>qP66i<|6u@uZIEB5# zqgT)~w2PAn@IrRe0!em$O>P*or3LIX81~Bucd;c#TuVOtIs0*i-(zgeM2{aXk)uk6 z{hhfXHq>W82&@xi(5{1E52`fd|xJZ*bJwILBIj_J-h$=$>@5;Cf3ytdm$lXB*6 zx#nF~FnwBQ$kDZVSHH1t_eKS6Dp(#+^lI1|KDpO*PTN>yCb6h1&JVdu+lWV)0qx|T zSq$3zoC+Be3Vw~VKDZQ&eI?W0#I(QjBme1#E?Y{^sNmr<=59jUVRSTY=M4}Ac8_cb z%5j`b(GjMLmrm#tn?B^(u#@H!{arpcPGi{GDo#Y&;5AMPBC9~3;w(P8z)lQetS|Rm z^;GQ}#b}ZQqdT(E^_`cMI2{6SAsAqRrs{0>G<7Fmp&48nKU3mG6Z4`E9rux-51Kt? z>b-;WtI&tl=mWWOJfZBME^_QjR#cE;b&?{;|HtT=r?!C zpXRAO`1b)ZYja+j2HBhQEE<&6oR_LWj^;eG24y$rrD#wNepQg$oM&oIPipe)_HAqF zCnP)bZEH<(Yo}G|)dEaP0yHPURNGqFLRq<23x4B|$9G$8Yp13qz#cl7MD9)7+B_2) zPqdhq1ST5KOYNqU$27Pjd1(o34;@s(_+?_^gZ6tgoA901Oan>bpG$Z#0|Ysmy=Dfo zn!TwE*qgl;2CU8AG<2)k+e7>9zfbk%;=x9UOe_QZh=R!iPgV2t>Yf<|YL=wF>kaM4 zZyGmyzYwX4x-;0{t9q=yvclBkc;aTKRMw$5cgkgLQ`G@>Nu6xRs{@c&zsNV4ZNXl) zwMavqU5XjcRD#Urn+`0b7b6TmhM%bh-nCvs#SGIs9t|lZu}KE3Rz%ovRYbI2@&? zI1B33nF}PP}I7|$>mr~JkdyK&%c z96uX}!N$3)ae!)^ZyJY*#yOvH&}N)@8HZ5DVUTeYW1QI-#~H>+g0Zh}?5`V}+{Tu( zu{Uh&*c#iT#_pxDX=v=<8Cz+_mX@)VWNbSbdojjNh_Ojw><<|0bYsbEtZj`IsIj&) zR$j(h$5?t8O8_JEHlki5LN+2rBVse6CnJ6_0u1Zs*N)Bqq~|RYp4_s$>6!Vjjy!Sy z*MVa-6OVj%Xz^8ln)dk4yFU5t(&>G!zqsn^k@qgJJb3Hxyr-UeCv@?WVL$3Wcx&fP zkNS!RcYJTsy?b`QAFg=d5A}CF6d(J{OJ5G%{;Mly4|wb0`Kx`qe^@!P+LjmIkXm26 ze^JMGhu*Tf>EUZ0`N^L9e)~zkzLy^#dHVigV=h@=u5>(BJo8uAKRcuL*62^2zdPvp z!Qe%?Ki?n!*VP|go&EEljK6!v@QdcI`%Sdz^3xBPMm~D^#%1r1dZ`cTmGQ1Wa%3Jv zCyf=jrfV9POUnN(e5{R{exKd6y;Q?H^z`g<@h$b|96Emmk)`)9TTgTJ)c!B$AJ<=L zuDf{3aoGT|sd;;80x^av7{eyPQ&*(~ehoI|9 zY83M+vZ+o^PFcl#yaN$=E9ECo$E$lLd#R4+OdQ@ugIMMaoXqtK&%|-P*=OJixn9Yc zIIg$g3|tY{s{;;0)%#M!^%{Yr7hf3fRU}y?nbh*G+i$)1&S{CTz2n5SqvX|TI zI0JVhx3?o1SESY3dj?Lc*9lw>eqlO#vG)E&ChVkH4BBSTk*K_#_?LC*q`yj-QQMGP18= z*`jv-74QQ|c@tj0k~TJ_urFStmp;xa(x<1@(Y^R}sQ5kV9mVw(sca z?+^GQTt+$NFHR~qrJ6))9^R^UL7KQ=oK0L17}zWx)b^f@g1~BDfeZ1 zN}q1>7(tbx=zb4f~$-tQb!Mykjd*HdJSO)Kn4^e({* z|Md3j!OX94m7sk}O(`ak0;Zb=SjB*HTtz*^0MHmP)?R1Iv|#RD5t*i_NTWJDA88_e zY+9YxAM|CMMcDfFs-w|U|LH^N;O|()VAJh}slqc< zh$nEB-vS)&IWv!nXV;XanEHJ3Gr+FzBBW<@2}Q5lh)wDDWZqTDX4$c`N3JK zqUSD4C(SA0N`M~bN)@hPrmz+b5IdYgTx`ufU5tp;GJhRazqYa}=nvoxq?Q@};GZpaq)hP#CpQEF{+eK6ptj5p*YitDkv~vf zSp%ssP~u**T(4(p1K0XXsjr0re?4v!(^Ic+@CTL@`U4fUf$GwlGK``D!w86B=e91F zi{VMwq6GneX*rPM7-45DsSobD{~o&G*l1m_OeiFnam@g6Y>4DDJVN%d4PsR#99$xwyX0 zUshRBSyrq~9%gGe@b$s+;^2}xe{p3^MQyRJVZ~LXGU94c?XRx)1G-`I^qJQc$-aUy z<8rh%2Z=KDskV$Ff{K72Dm6_MFM_i91E~L9f%Yncn3ihsy#!qB7uSg5djEpr3T9X_ zbuUXm$!jVq=VO+OYf1xw+C{~sf%&)LjR@ME+JKEBf6Z-`f!dmCVul!ZMvdAU|6*8> zAWa{=xVzZD*k9HV^cSB|Fc9>po)D#Vm7}WbM?tJcl?8&M7Whl+Mpca&HD;7P#emAf;LaNSQN9$f z^sW|_$juR6QHQ^L;DWg3ptV|1DFux=q6oFCQ91`#C9YAp*Dn?#orl&cQKua6E#gYt z5riD&Knlm9c0KBqAzgrW>hVoIlR#-KprGDn6;t$!(Wv>~|6{icF$Nc{2gV^?j_Zy} zE&T=J*^hyL2&n_HXg2VaUXNIGIbeDx{(ZP!0DM2v4qSTyQ~JOFgkFFjLYk3@EnL8M zq_5%X1Wf5ZTtut)Kp{rpA~+jqsScMTU8%#A{#J+AAl<9OlzytiT}TTDVQ&(#Af=7C zs7*@m*I`OOz(xJ+M0zXYUV^KTrVhqFCtymmaS@)<>vgys=`$AqhxXSXJ&Y?CFs0Zp zWcYHVTM=L91D_njQf#{sS0nJ0Qk;4};N*Cf;@HE(u(?icB0YxdO~91)MvV17U`hw# z>H24vTi_`jgZNAVOz9L{C4iITNs2$OM=aL}Jf*X7 ztprSIHLf*)DdxKmR|{Z@`wnM==YYj!@W4ev>;_CRrWY5@e?HP*;W`aG_EW`rT(i-C z?5Xm)s{}Bu?P;ClKz&MS4Mi}ewEiKOQd+A#4Vco!xO(H>|NVSZGhpKNJyfCb%oZtT zN4$R?Jl+V-|0<$n6=dSfn>aV|n#I*sj@$f!dW30{MvNJqGs5AoDXT57teHP)#LS{= zN9B%i)CUpIl~%z`Pa3hrUq9lSt25FkQY=wDuWE?{4b;?68qpA_xvIWwfxo)6epGd3 zS)jJQwjwyHthV~9()#Mrw~ZO$KsZ)efhEBW$<(N+D~z^~IZq$|N25x#|CM2XB!?VoY4*ASdIjAr6=%O_RvN9$_`Mq)Hg(PtTt2KTYTOz5DU*yYIgH?tAZQ#;fjrgyl2F3Q%;Nu^vEryuAL?mCeG~jBEGL zV9yjEo!4V&I6AMXEfg1{vB-v4phIj4gu@Xj%bZ_2@COc^0~a}*#oL3gT`{18 z4!$%Skb3_5vx6F z)oue2ev;TkS??FS@w>eR`(b zx(N;1=P*s~88KN?NJb}a&?1C0h+D{rM~+D9L8<#Lv7y^r*4>Epk!*L|$rs35ua2v4V~M(sCxm6rKP+Nw zYmp~C@&<-T-J~ZkSstj9RO@uiUV@uT`h6}ptCd zSY5heP3rjx@5G3eIU_0y(etxL&qQ(RE(=z`JzAQT)t#?iowZ@K&~u7f)jnFvO;`nqc z26X2?;B=SN=XK}jf#^y615?W0gU zx)3p+j|I}c4@y#f2)b_T88D<2;mI_1pLGk-s`W(z-B#Zx4M{I>39NqWHQ>;w{nj#I zw3PkU`6kwb-N2;#)3mwfQqTSZ3ynOX{d5FPWX zB0qZ9-a!uZO6p17x%NtQSO0^~eUDy;J&)(Zd6^F)LrM$oeY$kmKGC!7Iz`=)GE=SPY_)vg5i*QhegQ) z+*9X}JiXFBOc@WfUkAc7Ig}*$dg1;6nSe- zinI`r(X%=IwL8Jl)}S(()Su2&sk@!-8pTy+ZqT>rFvA`l(GKDe#K|%z`Hm$qlOG+D zdQ?*Tb?2|Ip!qhr?ou0Ejpoj6r{i03d1*cTBh;7Abt_k*INaEJ8WpT zYCq+oaI87cADHyWcYqlCU-*gjHahVKo~6Ae`1*EOSI|Mf5yznR5X4Lmlxk6fD2Bge zprdDYLM@I4V-H~jaa&CRx-#`-VngyR%LAX+Ud19{B`dm4C0wxb4eW63W9W;ilhqfA zF5?p>!vhtguX~P5xS%Lf zN|Re$>>XJgN1=wVvbf%e2l@i6ZGAormZwNB zDd;?&n66z!2i&J)0NDvsjHRrlLm7zu)mW@((7Y##(aAs}KV>BwiE4p@& zoqcrYszwjo<;}R^8}X~b3)n;r|jX&WUgs-IS2gzmkBA_R7B2h|RT|~!ks)o+p(OJm0$N@M4t7t08 zKAW`s_1K89Q)z!1>%ezB`kumZh`w|#=k|Sa(#D!t8Ut1=qXU&g6D1B zO|I#QMk#6knhOpqA77yhHR(bNQz~kj3-g#VuO>=hu66HQ?;@uXA8<+gEVz8hzmo7& zZ@ko}TBEoTYLgj@uiGjE9W4Uy9w0q94oiTiAcPI9^gW ztrvY4qD#M3zs|VaeckqVfeQP7Un(RYB}Lu#--Gcij#Jis^{~-)WqdOvu8PN@-ZCil z{#ul#4N91mNNI*RzpuJ4ai?niF{G0jTfcQTkbF{L{a0YM7yCq|Dwz@ctvf(siP7*d zupUSa`mP)DEgwphaZRq_8;2A2q&e3qUJdLCLU!Ogeec;ivPCUwuG4>1EbN z2;u4=_Qa!?--KWAdU+?WALjKAULWT5cX<6>UVo3*-{mfLarFtuA~^v#Y{sd_KB>tL`J*1d~5GEUVX?vSyd zQ0*IW@b(j6R{MU%K#aYJDsq^l4v?zP?&WRIS-9XUsH=TP9hm!AhGb4t)koDSE_aCJ zs(r7`yt!@mFl9F4<_TSI{;@ zZM_DoR*0XpaV19?nlk8d$Y>G9P6BxiUB@uXv6s>E0?Kn>zG~{Dxt5sS#!g-kzW8PC z!JrWBTMf};TyOPE^nQ;|=B1ecW5=k1x@zBx)ECO52OwVUJ8Y<;Y4^`0V-Jz!5Mi%V z1$!b`r{su5==v;9QlC(PWkij&MU6#A%>$r<2SbGs8$yKoBG_CFY z?6Sl9rzh55h!i0IOv9~_moSMcM(aM-2O}aw;Bf~&PbMFPud97WV0g9f6^O%HT16p# z%5)KTX|$Km@0dB=$Wdd;KI+6LdMao7G*L3~OB*RyRgDegNy8P`7MLJ*4Z>;bl|X4j znLEwMj!k0XU@#HN$FP&>8tE2r{iSiTApM4c z;PGQn{|n@6bXQ`05hQ|~;y$}P6AN>Kcqd>}weL5G%F7n&v=NL(9P*Hi{1PmTQSGCp z9mnrNGds>~b|R!t6U3^y@&?g?O;@%~MRhI83D(ovFQ%l}}|sN0j~ z`z&X%Ihy@dhPO5o;dBIsVg87K*)wnnO!BiB+KUkwID)drkPshSVZ=oIoCQIF*yz11 zFdR~DU^F3Vp}=Tx!TAUiIHDbqFm~VrhC4zVU8IT@-3*S%5t`%yzD_UGI%Rk6I~1lv zs1OK{ulD^KexWEBI}Jw^qs+g23@lX1>Phn`Ty3xlKguxPi=<-~>41IJzL&|%bl%}O z5yo)ge!xR_aPC8Uw+O#>kKpay)D}Z3yE`GW8zN@c$@ic7aIUs2p3`3bGP-%!(XT2= zJ+pI_mwq4IJJNFjuWoYKqV`>I9^s9BlMRM&(fO<}PLVT`YEh9^A|a=|sUc;5;*730 zsC7|w9o}QCYSyH4h9)->U<|=~cm}ptAzw-hyL#1L@No$IpB}N==(f~^t4+AxgkLn_ z6DHhi!h87*~B+0J8&;MBL7eJWYzz< z&}Q0KWTwaqQH#CK3$PI(%rg1hI-yO5xT`OgSrthzfWUJxZB|w>N8DnLW zJXp$@2p;_!H|6l*1-xp;j+1VqFD)Y`#s66Ld#D$*4L_wT*T>`3oc+F={SL~S@sSm5 z{=L&hD+U^j$0m8)CgT682d1%^P8)M7#mw1S#GFeDnRCg^;fq%p-?e^*1^L8G#2R9Y zp05CZJcqX6^+Vy3yvCXMj8wAJ#!A){MyW4e6R{T|cOGSx**62cWEK)xlb#Ly8kB51 z(bwj*TM8_!;3j-km|Mu^E*03^*qq_g=e^R=ghVwgP606-4u!^W3K0Zi~Hu*_ReG`;x-OVj6EgK6sh><5>A<1xm6ta>fg;B#E*s|2hgjv&~ zH$?;CR+Gi|9Swu$Vag5Mj6F$v z(R_Xh-Zt>kIlR3$LI%8{9Nr*!J>Y%V^HI-_UMg6lw66?Al|)Ql8Nmxq{CCJJ#A}E| z?oUK-PJ~;OP$XOzR$`kOJ54Qjgknk}(7rDGKq%ab6|k-gw_)Mh<<`1QEpn8^Zw|F9 zatvRLt!riFLz~6CTV!QrA{LXwN@Fb2BIBt1wzWx)b%eqJMb5I=m-Fl6CJ~9V^fV@& zZIZ(qL$OG>gBFA>ogxtl%bRcP{J%YaJsxm47+Z`&`S4P}PLv0cu@d|y^10uD{{*0jT&x0og0~|V>jX~!3;8I@ zGr)HNW>AKJ6MVdsv4g;$0JJ;tq6N4E@MkE)zzM#NLSs#z!`K28!Ycp+Cf*A8n28ho zfr;-095Qi&|83&qfQ|U~mr~?41h=7(PJ+8koZzb{Walv8{bdH;4p?vnV`sr9Sb;(^ z1aCL-R=^+51@8vrjDYk%9#y~z;{5@~KM8mg`K1s1+-R G^Z#FsT6~cJ literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_ghash_portable.pyd b/env/Lib/site-packages/Crypto/Hash/_ghash_portable.pyd new file mode 100644 index 0000000000000000000000000000000000000000..098b8136022d8196ce266ec958ff4af881e82c7e GIT binary patch literal 13312 zcmeHNeRLDom4C86EPsd*U=fFWFeD;4O>m7x6BA&Ouj+!w6niE zGqP;b{W_=pW1Gy;%>8)x-S^&o_uY5jj2iFQ#mtN`3vN|qtPhYbulD>O9wx@7%{efQ z{kZ7p+&)wA=-lRw;kX!$^=yc>b&Kt7kw}jut`CW^L_`cnME|X;#qOStP^H6B>^4-_ zAL{*b!QYRZ%H1WW8cx|jKlAd5lMHay&rX~)Y2_zQU8C_&oh;Je$&!?vy1vr_$s7j?2+9ZKDGz$;vGL79B2Qcjt(W}Z>N1N2f z*i)rok=>37C(oFq3VO^+zHgF>`^dd`@=(Qq z@*rk?%&Oc9AoON6UO+xZa8!U$kMEgBHHd@d-z z<2kIn4JF&N!uBClR1#Lkc8;X{Zm;(zrsT3KJ_Dw(<5@fhtqu^>1tkli?OEwo<;5aU z@57^%xnS{(Yo$oQu^d2g!h?5Y6W1zt>*6BHuhh#0DnC;~O8XY+F66CNcRsHS7sAE1 ztBK`GxgDD15uU7JlAFsz${ZA-(ej}D+yU~rfc!Hqn}f3Bdvh4`smE38@k%Ul>fl&` zC(Gr#+*Mqb+%;T&z|CO`(Ra_lekfte3wZ7#JCj{*hcT+WJ1@xAM^Jx9_92H}*^4Sw zEd%OM+PVn^%AFXdW*ITL%toC~+^F%?H%Tb^#h{BZl)+qDVu2IO)1eN54g;e@c* z_1guEJyht)j=qg1GAq>Nd#3yAxNI%O=sC<>Jm_~g3%ey^%%_-kkb4NKE< zGO9}iYlkXXld3u4ooKOgQj5~HsQFpFW}+yw&V(7L8Y|1ms;*O4C99CU_iQvlR=T+C|fT%7>BZm$YSs1M=@lLMJ+eo~05NjxTwr2k&u$kh*d}Hv1 z`^goLUJIW$V}g|Lf#R|sPM5Zx0fT1Y8}=%iJm0+-od9uS@gDwQ!8@Y_q zpgcroslxOTGfG5-=P)fRQFYBgHFYql7Ci7Hn!J{K?##`dxQ5GT^(nBvg*}TuUA_19 z1&>izmkcB%o(z?vmBw<_HB0MMT{%zJIaL<}C1?3U!q3ju@Jyqq(=QWUC!(5@_n#KI z2e_xs$vypiKZcA8+OG!T$v4FXe?QFMMqPlXaQ^Nwkhc(tnZJ7gU92Fda`NP|Z=r3L zu&FK?4do;83$W`6#B+Gb78D?+WE_VTa}+^ zqA;v6&L0@`=(mCB>tE=#-bxYw@YA%`1ph$Fx|{<2eF%ff7a(R>piGJCL=o&20y=hP zC)6S|=xYcqh+CIgP?gCi5*v~iO!t3IIfY5UOjhi8E#XEl-^L18EkbM(@s;t+ZROm2 z45*D~O1B0xrAK(W%pF*ATq>7uDGe^^j}^-+?B@R9l57mY@Lz-kb4T?OaEV;$a zzCc(p{L%(kpt))JfNt-dCzL^$)-w#dlx%-3QOF;5Jx8%jGo>!Gb=lM4u$?C)%l6?! zsd63_u%G@KAU9z0v5fWKAvTbl_bf>dXg;@wXTI>rvraX&c)lLFo@sHsnKsjo?7!FZUKe&mBRA& z3^yx3)wb~`+{a)mzU>g1>~1*pL{22~%oPtKr18wMtpH;)kZq9x5CUsxD9HgkU-Eiv zR9~sIKJ|IfRy^w7sn>13sP2czbt-2=o-3aozz%y)i3lxmVg;-EDJ5>t$VF9Pd>)G`0nBmN~L ziE>So`}ES&2}jbg0j1vb6?X&B%vMk`EFqIl9&)x&ywkQmAfD`EYZHX9br5^vQs=F} zNnV||U3-2}d#1GK5$*YH?fD(;`CaY#J?(i&d;U;+?!$ARX#j2L`=aVv3l4gs?UWNu zX-GZ|#DRv(7x<3D!uA-Z4`E5z9tLv2?F5BMslv?Cs+kUDJZ+=X1s252Ol^U&+t6V_ zb7cAXJ`tkHU(E3gU)S&JqXT<7DUzP+PVh{FsLV$U5qfVm1v3p*LT_O(Q`>~J!|prq z5bE)g{=n0Kbo~NS1ylqkP;(x&d6u>Fu{Hp=Es1XU?Wm)h8R}UbQ z3Zz*enH2*#k|qWN{YWzDNb^-tzVlfDhhj5~Jo+%vg40CPypFd&s!f%7 zM3`=(qww}SP%QKod9p(9T9aSstuMf1r5TU4g_Ntb3cV34osBmNy$@P~;8giD8w-NS zU@9PVpDjq|W?&E6R|vfp5do>XABSYSFH@&#oa%!hi+u94ph`usO&Z%*J>-LA&|Qq; z5^N+WDbPy1)q`M_f;JoUv!ETIEk@1fKu-g0HRywNo?);Auxtj4R5(EwYIJpd5$Nfl z^!yT7a8Rxpl5GlX9M zQoC=z9kZbMlRRY3_Ky}Oe_^^MQ|1e(S#BQYa_LJ!&s{Is1D+#6`FJ2XYD&Dq)j^(A zoe%u0FZnD|_R*SvJdlE?Dpdh_I8_hdJIMRbfWWH}rJ_+jlBzEb$nOV0=scYJ&ZO!~ z8s&3={!tJlSKcUpkg6}$31_O_p=0pyUzhX#UpWK)Z`%X?zphEulVyLOf;Xh#45^i7 zZ|XsdHx;q^Q}@~YskMdv)Pr_^>K=hFvABi5vSG6f;yr>GL@=xf%5V^Rt$S|b{dMA94E$bl!<}HFb3Xa~I{AoCeidi_ z{QII>_?yZ1^dGP<&y;^lbg2GssQ@&fEj-LoUZrsWAkD>c3p zV&5QH+Lx-wA(7Aupa_G6ia^aeS`O5rqfVe!9km06hJIaFJ#-UF%8GQ9lojhJDJ#)Y zQdX*?q|5=-mwK>}?1;HhNb59TYNbFX7AJy-W-mRbkl)8amCG-BhV_W7?7g6>Sd_Jh z!nZnw-qi(x18_sS5Oo{OLa!EW10`8spB9$2C=9PMf^kDW0ypF%a6>)xFH{b8zx6!o*aaOe>4E|^sRaT4$3d)0`NgP%BBlZ1p!#e z1z>%@niOei&zyV-y*^CwahS;zz`%ngh=IO>JZjFP7NIv_B@z~!Fw!k~bKH_M$K)1- z-W3#4^BPx7Li5U3SSH$8p|zoPWQ8ryEi}+z8asdfg8sr3_P-Uj6b%GhsZx>h!Bq(p zQ$DkXl&5?a4RNM?Rt>SId^U6|M=KG|z;Y;Ur>I}$V>=j}tmtB(iT2X2weoLsJT;%E+FKkql01*rsa z&}2@Y9uJy_f*v$;_|DPkXSBCXG?x5&R!`^0md7TQ&wI5P8Q2;+RdV^v&PFdy@!g|+ z=b&s7TU5RS&MvKRAYY&h7rv7dMwmLoWs{7wI_R#TyfK(@JTXeqtS&0A*~sO_R)vr2 znyPRBKoeRG&*5z~$QQH1?tZx+d~I2M^7ubnV0^J&#d4DluQXt_0aqHZ%Ya)AxZ8mH z40yzVrww@CfX*el-s=sx!hpGsb{O;n27Kb8@`DE5cM<>R2K|-+&l}LWRM&jOpjR4n zrva-Cm}|e{GPae{?i_SpqtjIv!JPab3pRQ6cE$~u(~G=)%0>L#w@C^NUZjf5gB$PF z>8hd5{|PSpMg5v+Vxit|rvWGUIpj~&pPc&tEwmf`D>tBMz^Zz^|2aK3*ER>W@11yO zpnU^%cSp3cYZ$u$NRH>Os$w^Q_tgpBCjI*(##jwSr`-s@gj@00&LU zQDyBaygID)kxbQ8-W|lNnZgTba^9&tO^#3DHED89Q+b-)x+%P7O|BI@oDq$_G;4BE z@OGKNGrsGRZ;_Abw0p~zEgkDI4Cck?v~k3wc6Y$1>Ely)cWC;WCi9wgxph-`x?C%G zQEF$C{{1)i{kt-x)ASv@iTbMBwi+lK0k1sIqgBAh!K1e!h8#Y8frprOkaX*HX&NyA z?ZGX@&GM*$6<)+gfscNesKQ@Ur;q{NLx}a$~KI0WHva{k< zwkXxbYdZE4ddyW*^2}=J2vZd zS#)MA>8F0Lf$Wl*jBSBFf?8iz6*QRy8xt@V0b>!WOQNPC6DwMea=V4uW0t77wV=xA zca_OuVU9&b%u#J?Dx&tT!Usn~e;kwfJKX!ApP;G0#0oI!rZPJ#>%iSy$jZ=0S+z52 za#>-!1I%UWV>Xh}>|tZY)wZbKAM|CBl?gLSqGU(vKV1ZKbw z9>#qZUNV;zF!m81=6#aPrI(nQeZ3`H(25uE^K^gIb(JtDVsZp`75>XH_fy5}Q@^z^ z@hvO68UnVzgev%^~!CP-#h+(G@ti@(X^CVN#u$we1! zAjA@D^zoCg(ET3Tw1ZcUJL<-Iz_@5^)aD}a8gQ@2y$LtfEMR8VvSDLed}B+rCnmM6 z?+R5$H+Qh6&4EZ*YPqP0>5?6xxD@NzJW<17hc-pqA{`fVb83xsOwHn2OM5rQ6*Y1b z;(Z6at>EpN#EULv>`CyxJBhaoyuLqR6VgG~$35TqH_IxitF|pFC1Ro49=x!|zkW^< zFWA#_Zz5Wsh_p-Lo=9Cpifv}>O)6O%j!B8Ot~HVS!;uc8+}1UbjhM@>P)FUS_E40> z>%(1AD29IkSXYOn|FT#txH%-%CStKrL~4rlw1*ISziDj_#k#|hHYt>2u`iheaf9fI z)rZ=suT8N~9M#w=%Wa8JY;#j6*4Y#5Zi}>I6bTF?#^!!hd!U2Oo5XG27z?#^fXRMC z+|`@oQmDH*+#T}8F@O+ZY@W#DVH^8zjk+xW@=l+(q*oONkwbZoe zgUl6$BKL)3J&|sj0Jdm~L{B8N39@vbyJT0)yb?FlK=GNoV^+9Nr}+M?kF-SGu* zr3LLVX~D)&TXaF!!UYQ#7~{IA9RD1YFfCI=^o9J%^N(VIqj!tLoiD3u?jH`hy@U2m z?1t>qqS4HIIwbxl*Po9CoKD6T;--9j5#V;*9|9-%IP%F3;7tmCj;pK5oJ*0NV__ z1MpD;C-?&c-v>Bi-~@kV;NyTz`1{lD8+5o8IO!y~+rSB)!cF}=4R~+4j&}iCE@i9> ze1a9YNrvEU2HpYq!`ZL{`1=6qKR$iH3F7^MhF=eO6#4BT@F~B#4fk8XC+CoqZ~imx z_rNDOhCiH@-l)S5fQ!H<=Yf-RM9LN4MB=y(G6c`!egHT@8*=L}QC+~At6_WKUceA? zy?wwbU)_V7Z1QbD756hFkNkTE?h*6{?>)4$$Z6noCZV&#laMD!=L*6J(wTs8f|S2s z4|#$^xW5Galk5MZ1x(uc8cSj8)ZWusKugI#!pA=X8AlnNjUNHL_*%MrQ+JnmUnmww znz(HK!pf@oVkpwy(-DqrST=u6bNzyv`C?o`KH1iVlwsNY&7t`G<$vKQUfLFqhq~8y zZ5E**5??ky5sTauZ{HZ|Zi_GI4!6g8;ystmG>>2FCrZbcjEZ5c5-W^ z>eD%0ATI9U$DOV+{wu%7zQ?(ze2=(i)1Iw+%J)|7t=T)W_w-(6@A%%2yNUjs$N!%$ CS?ZYp literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Hash/_keccak.pyd b/env/Lib/site-packages/Crypto/Hash/_keccak.pyd new file mode 100644 index 0000000000000000000000000000000000000000..2786dfe99b2cf1738bf668349a06460beebbe8a0 GIT binary patch literal 15360 zcmeHOe|%KcmA{i@!Xyx9(2PdZWiW%pQexn-Ef}IRGSU0&i$(~_`fC!B37I9om>&qP zt;tT(GF)d%TiRM{yQ1A)Kl`D(*jfzKes+c+N%)z>Ur<*`t8E{Kwt;pDXuGrDbKjdV zVEydw=hHuSxBK9|bMCq4o_o%@=iYnno0ppVx3XNuSRU?lnz24Wx;*mvpFAv#T{e5q zW$eX*!*lv9+Tl5MEy1v>Gt{v*)X?T?Y-n%qh`81ST%l;YE7kHm-OM20X0tsiFD3oxztbgiF zb|M$$r{1)gaPl3aTqw&cf{iWI$E6n4Rx{T0`Fys1$DI01-2}VdH9hCDYncm_KQpLx zR{%OB-UYJ_JcqG-NxA@87YO7`Tsf>BtVW{@_-NUnj4feo5N)1c3VoX%4G&AJNolU=@~NqHo&F#?)xHXfw13%6+pFnPJ4Re7kX zA<_Wdia>%)b{TF`DC_bde98?rc5WIfK!7tCgHG9(hp}XNr@<&~tVJ#Lxj36kHusc= z!l6cxrBCXCkUaP-`|^ObJQQf{0Hai?6Sc&5b2gWhciY93Ki$$Hc{MlNXZJ`a@XDgo zH!*f5dO1&}9L6UKCnaz`zXA4 zY+qTw{y9vGajSkefPGg|(o``_@T46~HN8puS5vZBf<`?4pxegd5x0ZKA9K6B_o4$A z7iTnaS{c%R1tC32dk@VHJ!)la%dyQqx5x96C02F#Qy|)VU%_+0`Xdxb1w9F-o@C^1 z{b&J7A0VRiIUwdTG#*;N1jbQ0PEp%g-%Md>XE z^-)B#rzah~RWGqYk~qd=OIXCs#h^YLRp_)t6Z`j2163U1B3BbddoZKC>5;Ux8`DMl zXgo(ra`B+Mf{TcI2^WvKIeg*rKHR^XW{Z9Z&#mNV^2>d2Mulg~d13XUeQ)ydBBQYf zO)C8T>A|?Q3>EqVFjIQQ6|36y=TJKOX}trM>4jv!1)hk_{Wj)8IrM>! zBbQ}((#{HydeaeWES($mO!Ziv)nnvFva2`6Y`K)lK+K&FBKyWTLpfj_HL*U-2FBMVX>uI^jSu8msOJlGQS{V&(BoR3D?_w!QS{ji5e?77 z3>`^G8g?xqKYCW)OAhpKaXjr@c@3J2@6-CPq^mIp5_Rr+_%QACQVon<%41FLB~U?g zw{U@4O$?G-DlmLR?4&}OGDOqLm3Cf^X6b=gI_I$$Xz=_GKad$a*D5Yf8AD)w?-LOI z!$r@YSnv%}wX#1N;jusodZ{T%JFk>VRhRI1wIl6hD2XIrNcgE)5>M2)jPh$#u6CiB zgQvz^Tmi0BJGheKyJ0deNH0Y}xzH82`%-X!1F3*W;e2u!#XG5pk$XLs5c2e2KouUdx%HnL zNDlfc>z8Z3{tsA+;pjO5F7oDL(ScgrHg4sWgOOq-Il=8O6!{=9`m068n!nRBK1uU| z1 z8eOi3jKVZM2xR)bFUVfyVjh!t+aec+#VVa|4C{qZGBt{i8$bFAKXL+o%|Qj z^aF-;tNxN?g=0-SziXCHeg}v#|Lwc1Y0Smmp&!y*v-|otTYrtRz5&ai{#me@9w^p3 zjY0wZ6#zQ^{uYSE(qPOX^x*2MdJ#>TI38UaJ8ya9FZH)E2pGw--nXJ|X!>d==Wl zbkh3osXR@VBjxb2(roXrt|te?2GhT>KW=RU17dLl02K+M_&C6RtBR*ZdK@b3h7jz* zc*r=w_L%Ccxu3D-rPgkQT680CEP4?LHsAFGau0MbrJOwK@0*SlqQTSzg&s))H{0q(vSosWB8a!Q9>p)>! zjz{u(Mx!(IvuJ?*^w$6xgDJ)m)~|xCKQ=c?-2F1OIMZLj#Dm(2nh9^;Fm2xVgG|M=^R6p_iErs(wN6AYU>${Q+G&SOBm%U{Kpv7do~WL%Z-Ya8Zcd$k8~oIp2kRD4 zMBfZ}-j?9G`oG9+{Au?ve8s!>k;@)JKu_gFE}ppV3$*+us=5HiFGsdT4!{z~$&^^X zjaQxsoit`D%}-+-UuEfT`~b%Z5WqDns^XeIiw@yyX)Iz1hdG6}z2OuX;oQLOa@jCa6Xf2Nw zeZ>uj&7HV4ZLdn^TUey5sy;u5Mdnm>5}0M(dH#3Esa%h^`ECm~U-B=vKbHy@`-Ii? zNo1I@L5nYLbpjoq2HFfDeOTj}FTp>ooOtX8(iOe?1Z6FGnZE6N-Yp0hR!>i%?Qv{y z1J-n@vAO%=MZW`T-}8%Nd+bXjC|>jn5Ppc|l*O+bGwQAhZvw});V{HohI#6zEHCcWlr$VBHQRmjqb<7MS;JzbmpNHs~6#ZblJ9jEc_#>jT%Y zuk@I&@>sM)N^*}a8;cgjiq@jm6Tj{u02J3)(1pMF16Nz30nt| zr>^we+i;Rk&+U=V&&%g#`FvkKcgW{I$>&b_{GNQiCZD_Hvk%XmmVWeseUUu5aut%| zI`1m)z25sUy=mTPV!cZfySspLVbO#aE1$YV6G4wA?%`bAvzm+b^_ut#nM2D z7n^V(2-a%x7v0oM6E8eZ07s%)NTMmHVao#l3p;`Hc#oSS47l*N5YKAxY&Cft;DHNe zTlBG8&RjYFgtwD~K$#bkCPNDto&hc=C8TY{`u{ap$m37AsZ%bVkR4IGPEDA~p?42@ z*I<5`7T@7Ey58nCx~jv7fE{TjfDdr`=0Pkob<9%$0+! zDdz~s93}K~IfxxJDV&L{c3|YRdi^FL^a8 zZ;%Gk_sTXom>hYs(qjyzc!y7JleMaNlY@Z{$27<0a6*$>$jI z_(F3`<1ZUQf(W8u;R{VD4)T-{KbP^z5gMEpwRBKbYT#0MQdN#n@EOrP2o9b)V&kPo zV^taCsl7Oo83Krr;zp%hpY)g}&o7Zw>P)+q;t%_IFRLUlEXS4g#S5z^{6(f>m zY~cj1PQIa){)Pl_kLKQ<~ zcj*CDIY>(k(#!*r0Cx|EYVpE?x{fy?uuF%y0?utlV4-$(rt>3kAq_HU_0(-?k_@BpQk9g3L8VDReYH?W zhzAm?mL4T}@fmJa9F=9rm*+5>Ay~k$ThNkTJ7|>Mn+s+f2TwCl<6ax#a^Z=4 zp-+;>y(Ox^vC9jW!*p*w+2#QvjpRVkh0JHVyU2Xtt9iV8vx(OO-)iD5z-ds4#|B(> z0~|R-H-!h$adhmKs{n8;&G+ycOt*M<64hFKy^F`OF~m1jXz?I)2-foWJ*%~N!$vJ` ztl8cBbZ~n8-iN&pc^~vY{E$rgr*=D>p;uvJx{q>C2c74-PfgH7M1DZ;46x9|d#S4$ zaZVFI<-J4poYCoPz2|RGxz&qFkyxdxl>=Up7W-9k$ZNmt-@Wtq<6!2)YpoOd+sHb7 z^Cz|3ceVK+`V3X6ch&I2!Lw6-NWu@Wf8NFRYZ?3DSh&)(e}Cv_Q-+f^y^W!x$gLM{?XGEF(8UvrYC!9JP>m^Kg6nIii#bNp6!iF^x<_W~Rn zxi~K8Ms;RxECXWS6@MB+dWP)1ry*O!oyfU;9e_{5%n#AwDSu#(vz5QqH|3}UA@6`qvUCv;xSaq3O^mMFcF>G z0n*M&@X?#y9*Rw$A(eh6s4(pHV#!W?g*4@r`@Le&P}aWuhkF;^MG=2M!mR z*15m-%^%?NCw+z?)ll*-@=L{U(`oR~IFe(AP+?5Ytb7&zIY>@LCYJG!*~AME{lpYW z3ime9f{XpEg2MAb+QM6nH$@N2SH?@s$ebc(hv5`{;k?_mn|#j@i!{%_L1@BRWVrMV z=_&_io}vC92*a-!PSqsm&vA&$=KMop_}-bRFjUfpKumC-IQlVxxb%nQxD9i?BIQjZ zo{Rf2HWYifn?{C{hqHUnY#Qa*Kqel~ zh2AyBFYAOq<+dq=UiD#*jksOrt3G2spO^FbJUNl}x(Ohre=yX_X_SlizB^5k?Xx;2=3Gpc}K;dbmq3DBvD=HsFYoW z-r?e?9zC5-d&L{R_)AhMgC)3W)<;g`h zMsI>IzGQroe;v}lUi&UM7`YZXIC@3Gi(Cu6%S?`5r1|^zJOgX-4uj_QO%TiD)9~6R z>gqGBeYCL36`(d-a8|{&lW?Fg0s5iY= zTbMV3HKIgZiXH>`Swo`Rdf9qt1KN49l9`I(_lZ#8awNj z8V?Q5Lnk-BkDtD>$S%DZl&!L||3ZbqJfLTo5n0%sEdnVu4h;0(NlO$x2qiU9-6>Wf zXRoQ(ZD1!!71-?2g>0b=buYn|khE`0i4^GY-=E~=sx-`AY{I)txWu zeqi!%G0V@IFyDmjCd|kyyM}dPEnpc~TW*v)FM%2UKN`Hv=;s*|X5?bJU38Idbp3Ug z1Z;}|yBPh$CMJwB1E0ijNR{*ggWk1;8P{eLf-Y;2UXCX8W#8e_bYDMh}}IS5huUXln3b>F87~PJNQ!$WqCzKWAj>!C(1TzvwQ{K+AjAJkLP0A{Y2vz z(NxJ-dofM&t-gp>EBWd#rb)h*i)eL{uM;$wYU)xa`MN;sLJVfUYol27OqJb_KKf|W z8koU6aE7!a6lrv;UMY`XM7v+gtIej>8GNfRq8WVkpvitW8sEQVzW*x^7-jknu$FW+ zd@H}4I|mx_{R~Y3${aW2woK9FmvgkHA0r!#w&)3$hp`FbGsLqASYX2`pnOd@EbGx( z6SXnF$7A*K6GZq0-`%+pZMf<|dlJy3UGg;_aeVYbmu#lf#+;D?=4_hAoQw0BbJ5JP zv)3BmRlSS>nLiWB7h;R9zXAS@tg;0^f8{UAshx?>moANEMEcq&v0kzsXRNY-jZiwdAH%KmRb31)?z!G6}ob)xbup7l23ZC0&gXL z3E2aA1f?!3a%wI1d}fC&cGzNHG`-VOU||JoP;bj)woqPYZhcOLsdt5?D328_E?`BA z@@osIzw7bAl_@`r!Tde$3CJgC$+55;jDw}v#)_M8*G*%^=%aX%qtoKF!gqU^)6&QC ziAVY)pA|03?=9Mh%NG032xQaR7yP4UVmNELh@6&F^e)nnALuPha_Frd1 z|M1D8>GgB0Im{Z#XO4M{{Q&E_Z;8<+yZll83z(f`z@Kx-w)dtnD{LS81p37flRc)4 z>~d!OJ~AHU#|pCExM%|<%!m0y{y^Bv+lI0W_Y&M)u$^SnZ>eOP2axowMV%ja8}7$& zZ^8XF+$1RnKV|0OS4;o8Kx1RWy7JCVO{{j4+8&JfO%gL|{0(cu9icUuQd1xt33Y6m zDhAhrkSPVxJcEbVM+1RJ1JoPM^UU@6Wx!zH)-2i^pml-Pmqpuw+~#@EhO=mgKuc!% zCL=Qg|34qhc*20M=dFmeE^p|JL_>k)9r%GCUrGNf(X@_^bR?1@}8Y!*5~w+9+X*V<4ZjAm>_-aXMk zXj5$<)Z7tjYiMtT6;YTGVskz!U2S4>v#50~p+G|ui0n5+UAZY73AEJ(+XCJ&3s<;yU5g%g>oxufd;ROh* z1&yJ|f|fu-=YrOS3l=UgZM>u&e{~VT5M{G0BM&upB2qKg;fGwK&99}?nJ*kOYbC9d zNC((WE|c?|0g0Y{g;{{LvSzjb-rv9)fCusS3x2kSZDJABg_(;jV^w%&bB4f4wG7k{ zYh*XEbtpCBUjwjmz|581>f_|!0_mA0?Y|>0%+|9g`VFv0p>;c33prI%*M*tx|0wz` z`&jG$d-i|4z8UNPb9qySnTW%|*h1Wtk1hu6!Tmen1fN1a)dc)$Ko@eM=YbQv2f0ul z@&Njqm@na84162lAnw(`34Ws(`z`RN0gIgYF%!50@MYZfzzLqfO?}ZfSMzWaUIy4; z;!S{0nmEDln)pt@V|SbG(8fqa5pxJf3#Z6;3eZQP{i7~neOU4*v+=3Na4 zoM0Jlq7%Hw#G3&BX%_qd`c6Rl#1K#mim#7;aQY73egjO}cH zcV<>v;iUPe?dfSUI&bdByYIgD?tAyXJF}zu+xIauW6Xk*$uQOrNS9B$|DKzPu`3oG zyn;PB`-R2*roan}8@GpJLL}O=E!x^8w6%73_r!&*At9RR7Q)?vbW4NK)zcm-cQ^_> zhUztkJ5qVyw!M)pg+Ke7H}XM0^YZLd3~>JO*{4ie{p>e98sBkhwg&B|3N%=#)jx8Q zoic-dH4WKrFvys=@>mN*^+s9naa!jbsM2k}eI>?2jawMINHECQ{pe@v ziZDjDJFuLwg;UIrBO|D<(gteORiK|-`m!eF@zBmVc#n}%8Zyn>my^?9te_+SOkUSVB6fw$Jpvz`VSd@Ub{LvoQrxjGsxABbJsI= zJ~3Aw7;tr57|3MKC-Tk@$^*w-r~ev}0`hY5Y+2uEd}Z>CDPBgm`N>PBctJmHFFAR* zY*76?R(-;%-U8r!Dy{MA_z1z%P6*ZarJY|M(8yvml9f9>Hd%>#T(WY%M-Vrm13~8J z1N@x#nEDQs^roG?!)T}`tc>j)drHbE(EA>WG3-sjZO zS-^TTiJR488#M7TIa$Tx9+?lTi%^A5iv#@VL26LPkIUQ~;Escf7!xxmGS<6bSItHf zdET_l@AOp2JnpHI`TZUlwh+WEgHJ;VQ=i9uAK97gavO|M;oJK@w=PHfzVrhQy|E8X zDw+o~!-};S73v+Bre+x-S@RE-7f#A$&03Y{4%#L#O!65xnM~0RM6^76lL7sL2%(?3OLvZz265H zR^umOGW8oDWir!Mf1O+9!0ywlk{MIt1}I&DX5LY1b`B<&``*K@Qjh*7lR?MdL?7rl zennO$kx29^*^VGOV{u%uY3#umTkmMOoR5``@n4j$tC-`#RO~ zL0Zb4SPh@JrFqL>cHOl7Op7Pf@r(Q?GCv0=Nm$jtfe`|Hh`%J$2ASFfSpVkJe}SQ+ zy{|!cfY-doGVc2jkY)Z{#{F3!8FySGhY!xSF!C$m4mnf0op?$BK9oFL_4FrTGu4gt z#^h%`OL3lXw3=)8aL6xg)!F!bX{&)tA8f)}KQ8 z4_E9zwd`v&)at=RTuz3H(Mx@C#ywvfl)qS3{H~0ff#PX-IpJp)Xn3k#(CL?n_6um{ zk_W~G*$dq3cgfxX`DsiU7qpjv@J@FnJEZ}bzmXc=23s8 zBYEhnY*35&>UR)|Vd%vnnOjQH62p>h6IQu;I9}vUPs+}Jc2t6$cxQzk^ADINE|5Qv z-W__|S?W`WM-x}74WJP=zoN4(E7B9Q)V~06z@&9ZZ4mrxu+I?uzpU%a>Uscjz3Bus znyW^2VwM^PGI2Jiy?Prs>IU2lCbg%SDl46I*2F!JKzg7}l8QcMbaVw}9yW?|jPoE_|`?*bm9qoYLTf)>EL>yAcM}&q2(vK#>~J ziP^AM2SsX)n8cJ{m z-C8W0M}X$bDf{j~%KogZ6nW~ZPsEG)I(wjcAX>mz+sp%j>U0#r@KD*rDetI^ z1B!M^QSx37`y62f*q8Z$dD=ED9@Oo<_ce70ruB}(E``0XCuYfi?(RV3(@m+%?5?>3 z9JcqhxTSYAVOQTn1DvPt0%Rvldn{$`gV=7dRxO#L<9Ym~pS7 z8t7S-4T7n0tyUA74utUalu8v83P~p+0-ZBvO?BTeRU%jhWu=e0p@C>jvHz>ot9DlO zJxJpmq%)W6EpV2%;Dislv3mgPT;Sg;pF6Xwj|BkkYuz{)X>9FDQroG3U`z1c$_r5Oi z=fpje-SJSTxaZv3P7M-v1Cem2i(oe%ge?m+UJ$zc~;q zlDO4{lSxbtna3A0nJGNGy!Spx!hh5Bx%q z3rA$kN}x1DxMuLi!NhIc`h7?zhiyaFM}e40f%Wf!iSG>x@v7voFl2oYB$gNzUjWt* zsS&AUOsXDB6l>N>}4aK2zGO-M^sSAJpz=wEN@Q{d?N|fOh}Bc7IvBKc(IKaX(-hL?45ctL-^^ z00T$r)PD_t7x{9pXP?ITiQIS0*&D`UAqY8pTY%IhFS_ukg!GJ^-U!<1GI6mxdW$#ZA0=j& zAwVXZuGJAt{jg z8DyW@G9IOHN6YvlAoK*=djJT(wW&_IPpDU7BYy9D^~!z2bw7qCZEp2S+jzaQW6Zh8 z$Fk4Y{Vr7U-&}hLxoPz|IkjGP)?Va?U=xWC$@~@dCOhgIVAOZbCn-;fJU}b zk-X$`_PNok)3sjikife2sN`v29nQ}WcgW7qzeo#%qk5pj85^^iYAaXPR`OaT_$NtZ z01JhR*KE9YmCR4d180QfFHBGn#LA2)_f^T}6QpHW9#93DKWnU-(y)pauGXjBr2s#s ztoPMnMMW%WKwHuvfTXOK>ib@dFPU0Qlb|i;D4qcE+=6-YhE;sMPvYw(eqWWuujM-y z`uRt^wo**KZRRnH#2ai9zoS^^!^-FD-D%oLv0X8(CRl+0~riBpZE`rI&6&gmoyafvy?vy zP%)~kuc|YjAPdXplXd1->&!31`tXi2vV|Vf;CEGIeDs07l?ESS0rMs7GfBBml9X7J z$X}h_ZuI=y_jNqY2Jl7!4?l6w_5gH50=ylr4`(5#^QNbKBmmcigA}}@Imm8QZrS`X z7oq$V+aG7Ls3+zb_gz-$HbVk#I&t^0PyU5MUf1tuBB>7JOE67!7Qn_iu0$2!k1MIdo1CafYB zQ4w8yjivNMVu_Y4uZXX)89ZB-SIpPg4W2#ATgYcAqQKW$6w$-i+7z*jueB@Ub>8%l zSb=dNM$C9-AMB^|@N%X`8=+`woR>wO9DC_G2|a+kuij@Sb$C1QqB=qmhq@bDOHl(k z4P`Mi)%Xy-ZWA|RnGk@jH_)2^J$m600HRq%9aptvFJOeUjI(zQItVD= zGEN!VFL?jfA}2GhyBCQ4P1I$|{RrtyIg$Vw_jeJUoVTUvyr`nMJS6j1pv`RACQZ?^b($i_F@CGJNs+Gz9hE<3-(Pm2gW0}{?LgaJ7JI{}=86kraE z`~vXgugv@qND3Y2sO|`eK_GSfh!#uDI7hGo+@*)F6Ql)$AP9r>#9zh{JRBZ&JsVH;^ia#JzQaGX>-UXqnqH+Hklo5o=_$aDJMMbtu z$06DL3PKU$kQP8NDO3x5>XIWYU`|W$R6M!WAwG-Aahxh?F4}ayjjzVC=kV=1-_BRN zX7I5c($m`k8nF}7Ik`oY9$!W6W#m(Ka&%XUBiAa$3u`WzA} zv04@375?gTB2H-U>;CEw_@MJkX(<)_&~JW4$8M{J2;$D3*LAdM`=di`B>d{G>E*P^vy6&3n#Y zeO#LNlE3<-^GkzL>W&X3^C^+Pr1esQehx`=ALWNGjFe24s8Y$KRC3XKT=E|Adyk6> zX5>BThr3%!F8bkaHa|VSNF|6{cY1=N_X8O-Z=%DiMMNNRQM?kH#Vz7y@y=}ABKryy zhinwL4C6}t8oWs6>ue%F%kQ-$e`ywZ1YY=ASWAXeHo>^(S{08kg zl6*(Te(%)o-TU<$PkGjt`S(l2g!Wehebyp`v!)59Ru*(c&m=a^1^; zX$_G;fkh5W$$+dmcdldl$T2Ap9LD*7lAMjw1L|@RLwEy>dh;Q@$D`=~Lofp7L39Ff z5jA?3Z((*&DPzzmoN;D3^$`-^{fes5nG2`x0YApa|O05mxI;GYQP@&Xn(WQ!_%a&hi(Ixw` z7V{H1n|b=@iQISj>AxC#$9MVhzZyHqclr6h+J2c0=ov5f;&|@My~v)qoGeXwQ#}`} zkVn_?6RHETBIm6vz1#NlVwwL$Oul&$DYf_>vjq5w_&geDeOu6SVJv|Bux&U{a-yzn za2+p#5o%p{-3nlPPcoUE@j~ZQfXVSmoPz-`x;b{o>6*8Y3YGpCFL81Q`TX?ndztYW z6$X)0Us{0YK_4X)0v>+$*89kdH=iCmi=QTP*n)Z#oPAm)o`SRTy6}>TtZ;?)0A=EM zZGz*O-U|d$j%VJa9MvD;8@J26zD3=BM%PqSS4SL*w=M88#+8;FNbq2lFfJY5}`DOi-UBz}IFtIG0yjG`O z*Ug04{{Oq+q)*qsPt;*nFTEqly@;RvR)vC!4=Ma^FG{K2qKXFpKXBQM@m;RkwOk*s zV8F}kwKtU?WB%_mijDD=8L+~DBWq^b`+s|6Yik<=^u5jaRg7uh;CQ;aX$<3dk$VGR zmglLcVDs_I>X)W?JGJj==$oGMFnvSg`4qLodayE*sil&EV;B~`oQsEuVqUeM&sXHLw;iMcpe6h=(5O>Fj7)Y~k~7PUmoEqN8jxGPK!3v*P? zW{ws4!P(T`HTdAg&>zEMeuT0Q`U#ryOe_zJZYr{|qIQ(VS*!?s6s>SYOl~V|caXVF z{VbnkG<)Q;f))7@eLNUTrIk777DmVxG=91WUU`+7%{I+vc@>N`!LRp$x46{CO5ZAC zSI6C0m#dhqaRKvP&Da-E+CPnY$mG&jF2p>}=eJmcmI~6*L^>81u*EN5!LB~+WX@mP zne8nHv&8ZJ2i~~WqC95k$)&S4|CkMP=5r<1qi}TAh&5tqF$eRs{xD^>cy2zMi+&vF z$9CAw()fMFgx{@-s`T;Z(x!!Wwy+(gv5+mSEMN;)6i;GxN9LxT3eKK zHn67YFd9x%3Hrb>q{Rv9jjlDTaKoH}h!ffCE8y)0Z)Xm#6S>J_;O)!dJp^9*GJ9lo z5d334mun5iUa@S5ch;)!Uewg*4Z;w#sm5--rxvm+5%ljv@XhkLsH-SOxy#@?iw zP2p%f(b~DO`<`%jJFIQp*uA~AyS+2i?%&xKijer4aA!Oe#TReZhEV)>RttG+L-E=~ zG#cuT2ctc0A%yzBvo?mJUEyx*i7bmfWUh-DL{D^0sFlVVjD})p#x_`PO@yMmf}vZ@j_~Y8^+F5B1w{d$k)Y=Xv`*-3t?25%hU5(+ckQl=RLWHqWlN64% zZUvr2g@P|ny*tSi(N z3jy0yyJ6#J8tdx)%PT8%ou6QB7*kIhoxmN@P>8WxSo1xtu~;Yy`5PGYh2t( zS|iIkmoHns%$Vb4_4sN&jwQ+!Sw$9Vi(sezcU>=R7DhW>&SbJ*xMgE2#nn8N>jk5y zi#jAeU1coJy6}OufK|H*cP~ELUJ6))KY@aqam84awXy3V*~2>7F18$!73@>299+7# zvpZnb&3H_tz>Q~{-y`42LaZCoHPA$f)$fznl>YJdJ1yXHF}56q^5IIrUX=S`Sb~or zpL+}ZuLBCm#mc}Zcq?+TUf}dU5r2vDB=Cm;hf&6W6Z~2cV~2r%9nj&%>nPwZz-Lg# zffGE1LVeLUH%m|mF9U2f@OHqj8aTo48u$UgF#{*~Zw5XI7)1J0guI8~ZWPi<@L>Zd z_%jq5=Q!YwVjb@Uv|Ne&0epgGC?rGhRs(Mb{N4iaK8^1`0O=dgD&Pe1zCgpT1AGDb zr38L%{ziG-2tF*{2|mGhP`(O0H#ei4ZE`Ve13tky$XmvNQyy~@$|P{gWp*I%x(IwX zAbnY5gX_%5y^xcw{S(Hn1D{|siVWPfgt5=!;jjsK1n~8xjKzTq`0(c-%0s{@?@FMM z-F5=Lgz^pW@g9VoM`=R8co(8QFSG!sXHv@V1;`Vm+@ElQl(!R3kaFzDffIZT#g4Xr zy#B9Qz@$Cj7GrD892w=vTFUw{KE4QNGU{4d5DNe=zgFG6v#V3MI~0u}ZK_$ayu4zG z5bAF0X%BaAt68$Kam})-B|v2h-OHB +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_keccak_lib = load_pycryptodome_raw_lib("Crypto.Hash._keccak", + """ + int keccak_init(void **state, + size_t capacity_bytes, + uint8_t padding_byte); + int keccak_destroy(void *state); + int keccak_absorb(void *state, + const uint8_t *in, + size_t len); + int keccak_squeeze(const void *state, + uint8_t *out, + size_t len); + int keccak_digest(void *state, uint8_t *digest, size_t len); + """) + +class Keccak_Hash(object): + """A Keccak hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + def __init__(self, data, digest_bytes, update_after_digest): + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + 0x01) + if result: + raise ValueError("Error %d while instantiating keccak" % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating keccak" % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while squeezing keccak" % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self, **kwargs): + """Create a fresh Keccak hash object.""" + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`Keccak_Hash.update`. + digest_bytes (integer): + The size of the digest, in bytes (28, 32, 48, 64). + digest_bits (integer): + The size of the digest, in bits (224, 256, 384, 512). + update_after_digest (boolean): + Whether :meth:`Keccak.digest` can be followed by another + :meth:`Keccak.update` (default: ``False``). + + :Return: A :class:`Keccak_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + raise TypeError("Digest size (bits, bytes) not provided") + if digest_bytes is not None: + if digest_bytes not in (28, 32, 48, 64): + raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64") + else: + if digest_bits not in (224, 256, 384, 512): + raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512") + digest_bytes = digest_bits // 8 + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return Keccak_Hash(data, digest_bytes, update_after_digest) diff --git a/env/Lib/site-packages/Crypto/Hash/keccak.pyi b/env/Lib/site-packages/Crypto/Hash/keccak.pyi new file mode 100644 index 0000000..844d256 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Hash/keccak.pyi @@ -0,0 +1,23 @@ +from typing import Union, Any + +Buffer = Union[bytes, bytearray, memoryview] + +class Keccak_Hash(object): + digest_size: int + def __init__(self, + data: Buffer, + digest_bytes: int, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> Keccak_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self, + data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + update_after_digest: bool = ...) -> Keccak_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + update_after_digest: bool = ...) -> Keccak_Hash: ... diff --git a/env/Lib/site-packages/Crypto/IO/PEM.py b/env/Lib/site-packages/Crypto/IO/PEM.py new file mode 100644 index 0000000..4c07b25 --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/PEM.py @@ -0,0 +1,189 @@ +# +# Util/PEM.py : Privacy Enhanced Mail utilities +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['encode', 'decode'] + +import re +from binascii import a2b_base64, b2a_base64, hexlify, unhexlify + +from Crypto.Hash import MD5 +from Crypto.Util.Padding import pad, unpad +from Crypto.Cipher import DES, DES3, AES +from Crypto.Protocol.KDF import PBKDF1 +from Crypto.Random import get_random_bytes +from Crypto.Util.py3compat import tobytes, tostr + + +def encode(data, marker, passphrase=None, randfunc=None): + """Encode a piece of binary data into PEM format. + + Args: + data (byte string): + The piece of binary data to encode. + marker (string): + The marker for the PEM block (e.g. "PUBLIC KEY"). + Note that there is no official master list for all allowed markers. + Still, you can refer to the OpenSSL_ source code. + passphrase (byte string): + If given, the PEM block will be encrypted. The key is derived from + the passphrase. + randfunc (callable): + Random number generation function; it accepts an integer N and returns + a byte string of random data, N bytes long. If not given, a new one is + instantiated. + + Returns: + The PEM block, as a string. + + .. _OpenSSL: https://github.com/openssl/openssl/blob/master/include/openssl/pem.h + """ + + if randfunc is None: + randfunc = get_random_bytes + + out = "-----BEGIN %s-----\n" % marker + if passphrase: + # We only support 3DES for encryption + salt = randfunc(8) + key = PBKDF1(passphrase, salt, 16, 1, MD5) + key += PBKDF1(key + passphrase, salt, 8, 1, MD5) + objenc = DES3.new(key, DES3.MODE_CBC, salt) + out += "Proc-Type: 4,ENCRYPTED\nDEK-Info: DES-EDE3-CBC,%s\n\n" %\ + tostr(hexlify(salt).upper()) + # Encrypt with PKCS#7 padding + data = objenc.encrypt(pad(data, objenc.block_size)) + elif passphrase is not None: + raise ValueError("Empty password") + + # Each BASE64 line can take up to 64 characters (=48 bytes of data) + # b2a_base64 adds a new line character! + chunks = [tostr(b2a_base64(data[i:i + 48])) + for i in range(0, len(data), 48)] + out += "".join(chunks) + out += "-----END %s-----" % marker + return out + + +def _EVP_BytesToKey(data, salt, key_len): + d = [ b'' ] + m = (key_len + 15 ) // 16 + for _ in range(m): + nd = MD5.new(d[-1] + data + salt).digest() + d.append(nd) + return b"".join(d)[:key_len] + + +def decode(pem_data, passphrase=None): + """Decode a PEM block into binary. + + Args: + pem_data (string): + The PEM block. + passphrase (byte string): + If given and the PEM block is encrypted, + the key will be derived from the passphrase. + + Returns: + A tuple with the binary data, the marker string, and a boolean to + indicate if decryption was performed. + + Raises: + ValueError: if decoding fails, if the PEM file is encrypted and no passphrase has + been provided or if the passphrase is incorrect. + """ + + # Verify Pre-Encapsulation Boundary + r = re.compile(r"\s*-----BEGIN (.*)-----\s+") + m = r.match(pem_data) + if not m: + raise ValueError("Not a valid PEM pre boundary") + marker = m.group(1) + + # Verify Post-Encapsulation Boundary + r = re.compile(r"-----END (.*)-----\s*$") + m = r.search(pem_data) + if not m or m.group(1) != marker: + raise ValueError("Not a valid PEM post boundary") + + # Removes spaces and slit on lines + lines = pem_data.replace(" ", '').split() + + # Decrypts, if necessary + if lines[1].startswith('Proc-Type:4,ENCRYPTED'): + if not passphrase: + raise ValueError("PEM is encrypted, but no passphrase available") + DEK = lines[2].split(':') + if len(DEK) != 2 or DEK[0] != 'DEK-Info': + raise ValueError("PEM encryption format not supported.") + algo, salt = DEK[1].split(',') + salt = unhexlify(tobytes(salt)) + + padding = True + + if algo == "DES-CBC": + key = _EVP_BytesToKey(passphrase, salt, 8) + objdec = DES.new(key, DES.MODE_CBC, salt) + elif algo == "DES-EDE3-CBC": + key = _EVP_BytesToKey(passphrase, salt, 24) + objdec = DES3.new(key, DES3.MODE_CBC, salt) + elif algo == "AES-128-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 16) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo == "AES-192-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 24) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo == "AES-256-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 32) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo.lower() == "id-aes256-gcm": + key = _EVP_BytesToKey(passphrase, salt[:8], 32) + objdec = AES.new(key, AES.MODE_GCM, nonce=salt) + padding = False + else: + raise ValueError("Unsupport PEM encryption algorithm (%s)." % algo) + lines = lines[2:] + else: + objdec = None + + # Decode body + data = a2b_base64(''.join(lines[1:-1])) + enc_flag = False + if objdec: + if padding: + data = unpad(objdec.decrypt(data), objdec.block_size) + else: + # There is no tag, so we don't use decrypt_and_verify + data = objdec.decrypt(data) + enc_flag = True + + return (data, marker, enc_flag) diff --git a/env/Lib/site-packages/Crypto/IO/PEM.pyi b/env/Lib/site-packages/Crypto/IO/PEM.pyi new file mode 100644 index 0000000..2e324c4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/PEM.pyi @@ -0,0 +1,10 @@ +from typing import Tuple, Optional, Callable + +def encode(data: bytes, + marke: str, + passphrase: Optional[bytes] = ..., + randfunc: Optional[Callable[[int],bytes]] = ...) -> str: ... + + +def decode(pem_data: str, + passphrase: Optional[bytes] = ...) -> Tuple[bytes, str, bool]: ... diff --git a/env/Lib/site-packages/Crypto/IO/PKCS8.py b/env/Lib/site-packages/Crypto/IO/PKCS8.py new file mode 100644 index 0000000..3bda834 --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/PKCS8.py @@ -0,0 +1,231 @@ +# +# PublicKey/PKCS8.py : PKCS#8 functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + + +from Crypto.Util.py3compat import * + +from Crypto.Util.asn1 import ( + DerNull, + DerSequence, + DerObjectId, + DerOctetString, + ) + +from Crypto.IO._PBES import PBES1, PBES2, PbesError + + +__all__ = ['wrap', 'unwrap'] + + +def wrap(private_key, key_oid, passphrase=None, protection=None, + prot_params=None, key_params=None, randfunc=None): + """Wrap a private key into a PKCS#8 blob (clear or encrypted). + + Args: + + private_key (byte string): + The private key encoded in binary form. The actual encoding is + algorithm specific. In most cases, it is DER. + + key_oid (string): + The object identifier (OID) of the private key to wrap. + It is a dotted string, like ``1.2.840.113549.1.1.1`` (for RSA keys). + + passphrase (bytes string or string): + The secret passphrase from which the wrapping key is derived. + Set it only if encryption is required. + + protection (string): + The identifier of the algorithm to use for securely wrapping the key. + The default value is ``PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC``. + + prot_params (dictionary): + Parameters for the protection algorithm. + + +------------------+-----------------------------------------------+ + | Key | Description | + +==================+===============================================+ + | iteration_count | The KDF algorithm is repeated several times to| + | | slow down brute force attacks on passwords | + | | (called *N* or CPU/memory cost in scrypt). | + | | The default value for PBKDF2 is 1000. | + | | The default value for scrypt is 16384. | + +------------------+-----------------------------------------------+ + | salt_size | Salt is used to thwart dictionary and rainbow | + | | attacks on passwords. The default value is 8 | + | | bytes. | + +------------------+-----------------------------------------------+ + | block_size | *(scrypt only)* Memory-cost (r). The default | + | | value is 8. | + +------------------+-----------------------------------------------+ + | parallelization | *(scrypt only)* CPU-cost (p). The default | + | | value is 1. | + +------------------+-----------------------------------------------+ + + key_params (DER object): + The algorithm parameters associated to the private key. + It is required for algorithms like DSA, but not for others like RSA. + + randfunc (callable): + Random number generation function; it should accept a single integer + N and return a string of random data, N bytes long. + If not specified, a new RNG will be instantiated + from :mod:`Crypto.Random`. + + Return: + The PKCS#8-wrapped private key (possibly encrypted), as a byte string. + """ + + if key_params is None: + key_params = DerNull() + + # + # PrivateKeyInfo ::= SEQUENCE { + # version Version, + # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + # privateKey PrivateKey, + # attributes [0] IMPLICIT Attributes OPTIONAL + # } + # + pk_info = DerSequence([ + 0, + DerSequence([ + DerObjectId(key_oid), + key_params + ]), + DerOctetString(private_key) + ]) + pk_info_der = pk_info.encode() + + if passphrase is None: + return pk_info_der + + if not passphrase: + raise ValueError("Empty passphrase") + + # Encryption with PBES2 + passphrase = tobytes(passphrase) + if protection is None: + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + return PBES2.encrypt(pk_info_der, passphrase, + protection, prot_params, randfunc) + + +def unwrap(p8_private_key, passphrase=None): + """Unwrap a private key from a PKCS#8 blob (clear or encrypted). + + Args: + p8_private_key (byte string): + The private key wrapped into a PKCS#8 blob, DER encoded. + passphrase (byte string or string): + The passphrase to use to decrypt the blob (if it is encrypted). + + Return: + A tuple containing + + #. the algorithm identifier of the wrapped key (OID, dotted string) + #. the private key (byte string, DER encoded) + #. the associated parameters (byte string, DER encoded) or ``None`` + + Raises: + ValueError : if decoding fails + """ + + if passphrase: + passphrase = tobytes(passphrase) + + found = False + try: + p8_private_key = PBES1.decrypt(p8_private_key, passphrase) + found = True + except PbesError as e: + error_str = "PBES1[%s]" % str(e) + except ValueError: + error_str = "PBES1[Invalid]" + + if not found: + try: + p8_private_key = PBES2.decrypt(p8_private_key, passphrase) + found = True + except PbesError as e: + error_str += ",PBES2[%s]" % str(e) + except ValueError: + error_str += ",PBES2[Invalid]" + + if not found: + raise ValueError("Error decoding PKCS#8 (%s)" % error_str) + + pk_info = DerSequence().decode(p8_private_key, nr_elements=(2, 3, 4)) + if len(pk_info) == 2 and not passphrase: + raise ValueError("Not a valid clear PKCS#8 structure " + "(maybe it is encrypted?)") + + # + # PrivateKeyInfo ::= SEQUENCE { + # version Version, + # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + # privateKey PrivateKey, + # attributes [0] IMPLICIT Attributes OPTIONAL + # } + # Version ::= INTEGER + if pk_info[0] != 0: + raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") + + # PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier + # + # EncryptedPrivateKeyInfo ::= SEQUENCE { + # encryptionAlgorithm EncryptionAlgorithmIdentifier, + # encryptedData EncryptedData + # } + # EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + + # AlgorithmIdentifier ::= SEQUENCE { + # algorithm OBJECT IDENTIFIER, + # parameters ANY DEFINED BY algorithm OPTIONAL + # } + + algo = DerSequence().decode(pk_info[1], nr_elements=(1, 2)) + algo_oid = DerObjectId().decode(algo[0]).value + if len(algo) == 1: + algo_params = None + else: + try: + DerNull().decode(algo[1]) + algo_params = None + except: + algo_params = algo[1] + + # EncryptedData ::= OCTET STRING + private_key = DerOctetString().decode(pk_info[2]).payload + + return (algo_oid, private_key, algo_params) diff --git a/env/Lib/site-packages/Crypto/IO/PKCS8.pyi b/env/Lib/site-packages/Crypto/IO/PKCS8.pyi new file mode 100644 index 0000000..ad233d4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/PKCS8.pyi @@ -0,0 +1,14 @@ +from typing import Dict, Tuple, Optional, Union, Callable + +from Crypto.Util.asn1 import DerObject + +def wrap(private_key: bytes, + key_oid: str, + passphrase: Union[bytes, str] = ..., + protection: str = ..., + prot_params: Dict = ..., + key_params: DerObject = ..., + randfunc: Optional[Callable[[int],str]] = ...) -> bytes: ... + + +def unwrap(p8_private_key: bytes, passphrase: Optional[Union[bytes, str]] = ...) -> Tuple[str, bytes, Optional[bytes]]: ... diff --git a/env/Lib/site-packages/Crypto/IO/_PBES.py b/env/Lib/site-packages/Crypto/IO/_PBES.py new file mode 100644 index 0000000..a47c775 --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/_PBES.py @@ -0,0 +1,435 @@ +# +# PublicKey/_PBES.py : Password-Based Encryption functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto import Random +from Crypto.Util.asn1 import ( + DerSequence, DerOctetString, + DerObjectId, DerInteger, + ) + +from Crypto.Util.Padding import pad, unpad +from Crypto.Hash import MD5, SHA1, SHA224, SHA256, SHA384, SHA512 +from Crypto.Cipher import DES, ARC2, DES3, AES +from Crypto.Protocol.KDF import PBKDF1, PBKDF2, scrypt + +_OID_PBE_WITH_MD5_AND_DES_CBC = "1.2.840.113549.1.5.3" +_OID_PBE_WITH_MD5_AND_RC2_CBC = "1.2.840.113549.1.5.6" +_OID_PBE_WITH_SHA1_AND_DES_CBC = "1.2.840.113549.1.5.10" +_OID_PBE_WITH_SHA1_AND_RC2_CBC = "1.2.840.113549.1.5.11" + +_OID_PBES2 = "1.2.840.113549.1.5.13" + +_OID_PBKDF2 = "1.2.840.113549.1.5.12" +_OID_SCRYPT = "1.3.6.1.4.1.11591.4.11" + +_OID_HMAC_SHA1 = "1.2.840.113549.2.7" +_OID_HMAC_SHA224 = "1.2.840.113549.2.8" +_OID_HMAC_SHA256 = "1.2.840.113549.2.9" +_OID_HMAC_SHA384 = "1.2.840.113549.2.10" +_OID_HMAC_SHA512 = "1.2.840.113549.2.11" + +_OID_DES_EDE3_CBC = "1.2.840.113549.3.7" +_OID_AES128_CBC = "2.16.840.1.101.3.4.1.2" +_OID_AES192_CBC = "2.16.840.1.101.3.4.1.22" +_OID_AES256_CBC = "2.16.840.1.101.3.4.1.42" + + +class PbesError(ValueError): + pass + +# These are the ASN.1 definitions used by the PBES1/2 logic: +# +# EncryptedPrivateKeyInfo ::= SEQUENCE { +# encryptionAlgorithm EncryptionAlgorithmIdentifier, +# encryptedData EncryptedData +# } +# +# EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier +# +# EncryptedData ::= OCTET STRING +# +# AlgorithmIdentifier ::= SEQUENCE { +# algorithm OBJECT IDENTIFIER, +# parameters ANY DEFINED BY algorithm OPTIONAL +# } +# +# PBEParameter ::= SEQUENCE { +# salt OCTET STRING (SIZE(8)), +# iterationCount INTEGER +# } +# +# PBES2-params ::= SEQUENCE { +# keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, +# encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} +# } +# +# PBKDF2-params ::= SEQUENCE { +# salt CHOICE { +# specified OCTET STRING, +# otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} +# }, +# iterationCount INTEGER (1..MAX), +# keyLength INTEGER (1..MAX) OPTIONAL, +# prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 +# } +# +# scrypt-params ::= SEQUENCE { +# salt OCTET STRING, +# costParameter INTEGER (1..MAX), +# blockSize INTEGER (1..MAX), +# parallelizationParameter INTEGER (1..MAX), +# keyLength INTEGER (1..MAX) OPTIONAL +# } + +class PBES1(object): + """Deprecated encryption scheme with password-based key derivation + (originally defined in PKCS#5 v1.5, but still present in `v2.0`__). + + .. __: http://www.ietf.org/rfc/rfc2898.txt + """ + + @staticmethod + def decrypt(data, passphrase): + """Decrypt a piece of data using a passphrase and *PBES1*. + + The algorithm to use is automatically detected. + + :Parameters: + data : byte string + The piece of data to decrypt. + passphrase : byte string + The passphrase to use for decrypting the data. + :Returns: + The decrypted data, as a binary string. + """ + + enc_private_key_info = DerSequence().decode(data) + encrypted_algorithm = DerSequence().decode(enc_private_key_info[0]) + encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload + + pbe_oid = DerObjectId().decode(encrypted_algorithm[0]).value + cipher_params = {} + if pbe_oid == _OID_PBE_WITH_MD5_AND_DES_CBC: + # PBE_MD5_DES_CBC + hashmod = MD5 + ciphermod = DES + elif pbe_oid == _OID_PBE_WITH_MD5_AND_RC2_CBC: + # PBE_MD5_RC2_CBC + hashmod = MD5 + ciphermod = ARC2 + cipher_params['effective_keylen'] = 64 + elif pbe_oid == _OID_PBE_WITH_SHA1_AND_DES_CBC: + # PBE_SHA1_DES_CBC + hashmod = SHA1 + ciphermod = DES + elif pbe_oid == _OID_PBE_WITH_SHA1_AND_RC2_CBC: + # PBE_SHA1_RC2_CBC + hashmod = SHA1 + ciphermod = ARC2 + cipher_params['effective_keylen'] = 64 + else: + raise PbesError("Unknown OID for PBES1") + + pbe_params = DerSequence().decode(encrypted_algorithm[1], nr_elements=2) + salt = DerOctetString().decode(pbe_params[0]).payload + iterations = pbe_params[1] + + key_iv = PBKDF1(passphrase, salt, 16, iterations, hashmod) + key, iv = key_iv[:8], key_iv[8:] + + cipher = ciphermod.new(key, ciphermod.MODE_CBC, iv, **cipher_params) + pt = cipher.decrypt(encrypted_data) + return unpad(pt, cipher.block_size) + + +class PBES2(object): + """Encryption scheme with password-based key derivation + (defined in `PKCS#5 v2.0`__). + + .. __: http://www.ietf.org/rfc/rfc2898.txt.""" + + @staticmethod + def encrypt(data, passphrase, protection, prot_params=None, randfunc=None): + """Encrypt a piece of data using a passphrase and *PBES2*. + + :Parameters: + data : byte string + The piece of data to encrypt. + passphrase : byte string + The passphrase to use for encrypting the data. + protection : string + The identifier of the encryption algorithm to use. + The default value is '``PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC``'. + prot_params : dictionary + Parameters of the protection algorithm. + + +------------------+-----------------------------------------------+ + | Key | Description | + +==================+===============================================+ + | iteration_count | The KDF algorithm is repeated several times to| + | | slow down brute force attacks on passwords | + | | (called *N* or CPU/memory cost in scrypt). | + | | | + | | The default value for PBKDF2 is 1 000. | + | | The default value for scrypt is 16 384. | + +------------------+-----------------------------------------------+ + | salt_size | Salt is used to thwart dictionary and rainbow | + | | attacks on passwords. The default value is 8 | + | | bytes. | + +------------------+-----------------------------------------------+ + | block_size | *(scrypt only)* Memory-cost (r). The default | + | | value is 8. | + +------------------+-----------------------------------------------+ + | parallelization | *(scrypt only)* CPU-cost (p). The default | + | | value is 1. | + +------------------+-----------------------------------------------+ + + + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data, + N bytes long. If not specified, a new RNG will be + instantiated from ``Crypto.Random``. + + :Returns: + The encrypted data, as a binary string. + """ + + if prot_params is None: + prot_params = {} + + if randfunc is None: + randfunc = Random.new().read + + if protection == 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC': + key_size = 24 + module = DES3 + cipher_mode = DES3.MODE_CBC + enc_oid = _OID_DES_EDE3_CBC + elif protection in ('PBKDF2WithHMAC-SHA1AndAES128-CBC', + 'scryptAndAES128-CBC'): + key_size = 16 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES128_CBC + elif protection in ('PBKDF2WithHMAC-SHA1AndAES192-CBC', + 'scryptAndAES192-CBC'): + key_size = 24 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES192_CBC + elif protection in ('PBKDF2WithHMAC-SHA1AndAES256-CBC', + 'scryptAndAES256-CBC'): + key_size = 32 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES256_CBC + else: + raise ValueError("Unknown PBES2 mode") + + # Get random data + iv = randfunc(module.block_size) + salt = randfunc(prot_params.get("salt_size", 8)) + + # Derive key from password + if protection.startswith('PBKDF2'): + count = prot_params.get("iteration_count", 1000) + key = PBKDF2(passphrase, salt, key_size, count) + kdf_info = DerSequence([ + DerObjectId(_OID_PBKDF2), # PBKDF2 + DerSequence([ + DerOctetString(salt), + DerInteger(count) + ]) + ]) + else: + # It must be scrypt + count = prot_params.get("iteration_count", 16384) + scrypt_r = prot_params.get('block_size', 8) + scrypt_p = prot_params.get('parallelization', 1) + key = scrypt(passphrase, salt, key_size, + count, scrypt_r, scrypt_p) + kdf_info = DerSequence([ + DerObjectId(_OID_SCRYPT), # scrypt + DerSequence([ + DerOctetString(salt), + DerInteger(count), + DerInteger(scrypt_r), + DerInteger(scrypt_p) + ]) + ]) + + # Create cipher and use it + cipher = module.new(key, cipher_mode, iv) + encrypted_data = cipher.encrypt(pad(data, cipher.block_size)) + enc_info = DerSequence([ + DerObjectId(enc_oid), + DerOctetString(iv) + ]) + + # Result + enc_private_key_info = DerSequence([ + # encryptionAlgorithm + DerSequence([ + DerObjectId(_OID_PBES2), + DerSequence([ + kdf_info, + enc_info + ]), + ]), + DerOctetString(encrypted_data) + ]) + return enc_private_key_info.encode() + + @staticmethod + def decrypt(data, passphrase): + """Decrypt a piece of data using a passphrase and *PBES2*. + + The algorithm to use is automatically detected. + + :Parameters: + data : byte string + The piece of data to decrypt. + passphrase : byte string + The passphrase to use for decrypting the data. + :Returns: + The decrypted data, as a binary string. + """ + + enc_private_key_info = DerSequence().decode(data, nr_elements=2) + enc_algo = DerSequence().decode(enc_private_key_info[0]) + encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload + + pbe_oid = DerObjectId().decode(enc_algo[0]).value + if pbe_oid != _OID_PBES2: + raise PbesError("Not a PBES2 object") + + pbes2_params = DerSequence().decode(enc_algo[1], nr_elements=2) + + ### Key Derivation Function selection + kdf_info = DerSequence().decode(pbes2_params[0], nr_elements=2) + kdf_oid = DerObjectId().decode(kdf_info[0]).value + + kdf_key_length = None + + # We only support PBKDF2 or scrypt + if kdf_oid == _OID_PBKDF2: + + pbkdf2_params = DerSequence().decode(kdf_info[1], nr_elements=(2, 3, 4)) + salt = DerOctetString().decode(pbkdf2_params[0]).payload + iteration_count = pbkdf2_params[1] + + left = len(pbkdf2_params) - 2 + idx = 2 + + if left > 0: + try: + kdf_key_length = pbkdf2_params[idx] - 0 + left -= 1 + idx += 1 + except TypeError: + pass + + # Default is HMAC-SHA1 + pbkdf2_prf_oid = "1.2.840.113549.2.7" + if left > 0: + pbkdf2_prf_algo_id = DerSequence().decode(pbkdf2_params[idx]) + pbkdf2_prf_oid = DerObjectId().decode(pbkdf2_prf_algo_id[0]).value + + elif kdf_oid == _OID_SCRYPT: + + scrypt_params = DerSequence().decode(kdf_info[1], nr_elements=(4, 5)) + salt = DerOctetString().decode(scrypt_params[0]).payload + iteration_count, scrypt_r, scrypt_p = [scrypt_params[x] + for x in (1, 2, 3)] + if len(scrypt_params) > 4: + kdf_key_length = scrypt_params[4] + else: + kdf_key_length = None + else: + raise PbesError("Unsupported PBES2 KDF") + + ### Cipher selection + enc_info = DerSequence().decode(pbes2_params[1]) + enc_oid = DerObjectId().decode(enc_info[0]).value + + if enc_oid == _OID_DES_EDE3_CBC: + # DES_EDE3_CBC + ciphermod = DES3 + key_size = 24 + elif enc_oid == _OID_AES128_CBC: + # AES128_CBC + ciphermod = AES + key_size = 16 + elif enc_oid == _OID_AES192_CBC: + # AES192_CBC + ciphermod = AES + key_size = 24 + elif enc_oid == _OID_AES256_CBC: + # AES256_CBC + ciphermod = AES + key_size = 32 + else: + raise PbesError("Unsupported PBES2 cipher") + + if kdf_key_length and kdf_key_length != key_size: + raise PbesError("Mismatch between PBES2 KDF parameters" + " and selected cipher") + + IV = DerOctetString().decode(enc_info[1]).payload + + # Create cipher + if kdf_oid == _OID_PBKDF2: + if pbkdf2_prf_oid == _OID_HMAC_SHA1: + hmac_hash_module = SHA1 + elif pbkdf2_prf_oid == _OID_HMAC_SHA224: + hmac_hash_module = SHA224 + elif pbkdf2_prf_oid == _OID_HMAC_SHA256: + hmac_hash_module = SHA256 + elif pbkdf2_prf_oid == _OID_HMAC_SHA384: + hmac_hash_module = SHA384 + elif pbkdf2_prf_oid == _OID_HMAC_SHA512: + hmac_hash_module = SHA512 + else: + raise PbesError("Unsupported HMAC %s" % pbkdf2_prf_oid) + + key = PBKDF2(passphrase, salt, key_size, iteration_count, + hmac_hash_module=hmac_hash_module) + else: + key = scrypt(passphrase, salt, key_size, iteration_count, + scrypt_r, scrypt_p) + cipher = ciphermod.new(key, ciphermod.MODE_CBC, IV) + + # Decrypt data + pt = cipher.decrypt(encrypted_data) + return unpad(pt, cipher.block_size) diff --git a/env/Lib/site-packages/Crypto/IO/_PBES.pyi b/env/Lib/site-packages/Crypto/IO/_PBES.pyi new file mode 100644 index 0000000..a8a34ce --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/_PBES.pyi @@ -0,0 +1,19 @@ +from typing import Dict, Optional, Callable + +class PbesError(ValueError): + ... + +class PBES1(object): + @staticmethod + def decrypt(data: bytes, passphrase: bytes) -> bytes: ... + +class PBES2(object): + @staticmethod + def encrypt(data: bytes, + passphrase: bytes, + protection: str, + prot_params: Optional[Dict] = ..., + randfunc: Optional[Callable[[int],bytes]] = ...) -> bytes: ... + + @staticmethod + def decrypt(data:bytes, passphrase: bytes) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/IO/__init__.py b/env/Lib/site-packages/Crypto/IO/__init__.py new file mode 100644 index 0000000..85a0d0b --- /dev/null +++ b/env/Lib/site-packages/Crypto/IO/__init__.py @@ -0,0 +1,31 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['PEM', 'PKCS8'] diff --git a/env/Lib/site-packages/Crypto/Math/Numbers.py b/env/Lib/site-packages/Crypto/Math/Numbers.py new file mode 100644 index 0000000..c2c4483 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/Numbers.py @@ -0,0 +1,42 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ["Integer"] + +try: + from Crypto.Math._IntegerGMP import IntegerGMP as Integer + from Crypto.Math._IntegerGMP import implementation as _implementation +except (ImportError, OSError, AttributeError): + try: + from Crypto.Math._IntegerCustom import IntegerCustom as Integer + from Crypto.Math._IntegerCustom import implementation as _implementation + except (ImportError, OSError): + from Crypto.Math._IntegerNative import IntegerNative as Integer + _implementation = {} diff --git a/env/Lib/site-packages/Crypto/Math/Numbers.pyi b/env/Lib/site-packages/Crypto/Math/Numbers.pyi new file mode 100644 index 0000000..126268c --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/Numbers.pyi @@ -0,0 +1,4 @@ +from Crypto.Math._IntegerBase import IntegerBase + +class Integer(IntegerBase): + pass diff --git a/env/Lib/site-packages/Crypto/Math/Primality.py b/env/Lib/site-packages/Crypto/Math/Primality.py new file mode 100644 index 0000000..884c418 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/Primality.py @@ -0,0 +1,369 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Functions to create and test prime numbers. + +:undocumented: __package__ +""" + +from Crypto import Random +from Crypto.Math.Numbers import Integer + +from Crypto.Util.py3compat import iter_range + +COMPOSITE = 0 +PROBABLY_PRIME = 1 + + +def miller_rabin_test(candidate, iterations, randfunc=None): + """Perform a Miller-Rabin primality test on an integer. + + The test is specified in Section C.3.1 of `FIPS PUB 186-4`__. + + :Parameters: + candidate : integer + The number to test for primality. + iterations : integer + The maximum number of iterations to perform before + declaring a candidate a probable prime. + randfunc : callable + An RNG function where bases are taken from. + + :Returns: + ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + if candidate in (1, 2, 3, 5): + return PROBABLY_PRIME + + if candidate.is_even(): + return COMPOSITE + + one = Integer(1) + minus_one = Integer(candidate - 1) + + if randfunc is None: + randfunc = Random.new().read + + # Step 1 and 2 + m = Integer(minus_one) + a = 0 + while m.is_even(): + m >>= 1 + a += 1 + + # Skip step 3 + + # Step 4 + for i in iter_range(iterations): + + # Step 4.1-2 + base = 1 + while base in (one, minus_one): + base = Integer.random_range(min_inclusive=2, + max_inclusive=candidate - 2, + randfunc=randfunc) + assert(2 <= base <= candidate - 2) + + # Step 4.3-4.4 + z = pow(base, m, candidate) + if z in (one, minus_one): + continue + + # Step 4.5 + for j in iter_range(1, a): + z = pow(z, 2, candidate) + if z == minus_one: + break + if z == one: + return COMPOSITE + else: + return COMPOSITE + + # Step 5 + return PROBABLY_PRIME + + +def lucas_test(candidate): + """Perform a Lucas primality test on an integer. + + The test is specified in Section C.3.3 of `FIPS PUB 186-4`__. + + :Parameters: + candidate : integer + The number to test for primality. + + :Returns: + ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + # Step 1 + if candidate in (1, 2, 3, 5): + return PROBABLY_PRIME + if candidate.is_even() or candidate.is_perfect_square(): + return COMPOSITE + + # Step 2 + def alternate(): + value = 5 + while True: + yield value + if value > 0: + value += 2 + else: + value -= 2 + value = -value + + for D in alternate(): + if candidate in (D, -D): + continue + js = Integer.jacobi_symbol(D, candidate) + if js == 0: + return COMPOSITE + if js == -1: + break + # Found D. P=1 and Q=(1-D)/4 (note that Q is guaranteed to be an integer) + + # Step 3 + # This is \delta(n) = n - jacobi(D/n) + K = candidate + 1 + # Step 4 + r = K.size_in_bits() - 1 + # Step 5 + # U_1=1 and V_1=P + U_i = Integer(1) + V_i = Integer(1) + U_temp = Integer(0) + V_temp = Integer(0) + # Step 6 + for i in iter_range(r - 1, -1, -1): + # Square + # U_temp = U_i * V_i % candidate + U_temp.set(U_i) + U_temp *= V_i + U_temp %= candidate + # V_temp = (((V_i ** 2 + (U_i ** 2 * D)) * K) >> 1) % candidate + V_temp.set(U_i) + V_temp *= U_i + V_temp *= D + V_temp.multiply_accumulate(V_i, V_i) + if V_temp.is_odd(): + V_temp += candidate + V_temp >>= 1 + V_temp %= candidate + # Multiply + if K.get_bit(i): + # U_i = (((U_temp + V_temp) * K) >> 1) % candidate + U_i.set(U_temp) + U_i += V_temp + if U_i.is_odd(): + U_i += candidate + U_i >>= 1 + U_i %= candidate + # V_i = (((V_temp + U_temp * D) * K) >> 1) % candidate + V_i.set(V_temp) + V_i.multiply_accumulate(U_temp, D) + if V_i.is_odd(): + V_i += candidate + V_i >>= 1 + V_i %= candidate + else: + U_i.set(U_temp) + V_i.set(V_temp) + # Step 7 + if U_i == 0: + return PROBABLY_PRIME + return COMPOSITE + + +from Crypto.Util.number import sieve_base as _sieve_base_large +## The optimal number of small primes to use for the sieve +## is probably dependent on the platform and the candidate size +_sieve_base = set(_sieve_base_large[:100]) + + +def test_probable_prime(candidate, randfunc=None): + """Test if a number is prime. + + A number is qualified as prime if it passes a certain + number of Miller-Rabin tests (dependent on the size + of the number, but such that probability of a false + positive is less than 10^-30) and a single Lucas test. + + For instance, a 1024-bit candidate will need to pass + 4 Miller-Rabin tests. + + :Parameters: + candidate : integer + The number to test for primality. + randfunc : callable + The routine to draw random bytes from to select Miller-Rabin bases. + :Returns: + ``PROBABLE_PRIME`` if the number if prime with very high probability. + ``COMPOSITE`` if the number is a composite. + For efficiency reasons, ``COMPOSITE`` is also returned for small primes. + """ + + if randfunc is None: + randfunc = Random.new().read + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + # First, check trial division by the smallest primes + if int(candidate) in _sieve_base: + return PROBABLY_PRIME + try: + map(candidate.fail_if_divisible_by, _sieve_base) + except ValueError: + return COMPOSITE + + # These are the number of Miller-Rabin iterations s.t. p(k, t) < 1E-30, + # with p(k, t) being the probability that a randomly chosen k-bit number + # is composite but still survives t MR iterations. + mr_ranges = ((220, 30), (280, 20), (390, 15), (512, 10), + (620, 7), (740, 6), (890, 5), (1200, 4), + (1700, 3), (3700, 2)) + + bit_size = candidate.size_in_bits() + try: + mr_iterations = list(filter(lambda x: bit_size < x[0], + mr_ranges))[0][1] + except IndexError: + mr_iterations = 1 + + if miller_rabin_test(candidate, mr_iterations, + randfunc=randfunc) == COMPOSITE: + return COMPOSITE + if lucas_test(candidate) == COMPOSITE: + return COMPOSITE + return PROBABLY_PRIME + + +def generate_probable_prime(**kwargs): + """Generate a random probable prime. + + The prime will not have any specific properties + (e.g. it will not be a *strong* prime). + + Random numbers are evaluated for primality until one + passes all tests, consisting of a certain number of + Miller-Rabin tests with random bases followed by + a single Lucas test. + + The number of Miller-Rabin iterations is chosen such that + the probability that the output number is a non-prime is + less than 1E-30 (roughly 2^{-100}). + + This approach is compliant to `FIPS PUB 186-4`__. + + :Keywords: + exact_bits : integer + The desired size in bits of the probable prime. + It must be at least 160. + randfunc : callable + An RNG function where candidate primes are taken from. + prime_filter : callable + A function that takes an Integer as parameter and returns + True if the number can be passed to further primality tests, + False if it should be immediately discarded. + + :Return: + A probable prime in the range 2^exact_bits > p > 2^(exact_bits-1). + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + exact_bits = kwargs.pop("exact_bits", None) + randfunc = kwargs.pop("randfunc", None) + prime_filter = kwargs.pop("prime_filter", lambda x: True) + if kwargs: + raise ValueError("Unknown parameters: " + kwargs.keys()) + + if exact_bits is None: + raise ValueError("Missing exact_bits parameter") + if exact_bits < 160: + raise ValueError("Prime number is not big enough.") + + if randfunc is None: + randfunc = Random.new().read + + result = COMPOSITE + while result == COMPOSITE: + candidate = Integer.random(exact_bits=exact_bits, + randfunc=randfunc) | 1 + if not prime_filter(candidate): + continue + result = test_probable_prime(candidate, randfunc) + return candidate + + +def generate_probable_safe_prime(**kwargs): + """Generate a random, probable safe prime. + + Note this operation is much slower than generating a simple prime. + + :Keywords: + exact_bits : integer + The desired size in bits of the probable safe prime. + randfunc : callable + An RNG function where candidate primes are taken from. + + :Return: + A probable safe prime in the range + 2^exact_bits > p > 2^(exact_bits-1). + """ + + exact_bits = kwargs.pop("exact_bits", None) + randfunc = kwargs.pop("randfunc", None) + if kwargs: + raise ValueError("Unknown parameters: " + kwargs.keys()) + + if randfunc is None: + randfunc = Random.new().read + + result = COMPOSITE + while result == COMPOSITE: + q = generate_probable_prime(exact_bits=exact_bits - 1, randfunc=randfunc) + candidate = q * 2 + 1 + if candidate.size_in_bits() != exact_bits: + continue + result = test_probable_prime(candidate, randfunc=randfunc) + return candidate diff --git a/env/Lib/site-packages/Crypto/Math/Primality.pyi b/env/Lib/site-packages/Crypto/Math/Primality.pyi new file mode 100644 index 0000000..7813483 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/Primality.pyi @@ -0,0 +1,18 @@ +from typing import Callable, Optional, Union, Set + +PrimeResult = int + +COMPOSITE: PrimeResult +PROBABLY_PRIME: PrimeResult + +def miller_rabin_test(candidate: int, iterations: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... +def lucas_test(candidate: int) -> PrimeResult: ... +_sieve_base: Set[int] +def test_probable_prime(candidate: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... +def generate_probable_prime(*, + exact_bits: int = ..., + randfunc: Callable[[int],bytes] = ..., + prime_filter: Callable[[int],bool] = ...) -> int: ... +def generate_probable_safe_prime(*, + exact_bits: int = ..., + randfunc: Callable[[int],bytes] = ...) -> int: ... diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerBase.py b/env/Lib/site-packages/Crypto/Math/_IntegerBase.py new file mode 100644 index 0000000..48e8f48 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerBase.py @@ -0,0 +1,392 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import abc + +from Crypto.Util.py3compat import iter_range, bord, bchr, ABC + +from Crypto import Random + + +class IntegerBase(ABC): + + # Conversions + @abc.abstractmethod + def __int__(self): + pass + + @abc.abstractmethod + def __str__(self): + pass + + @abc.abstractmethod + def __repr__(self): + pass + + @abc.abstractmethod + def to_bytes(self, block_size=0): + pass + + @staticmethod + @abc.abstractmethod + def from_bytes(byte_string): + pass + + # Relations + @abc.abstractmethod + def __eq__(self, term): + pass + + @abc.abstractmethod + def __ne__(self, term): + pass + + @abc.abstractmethod + def __lt__(self, term): + pass + + @abc.abstractmethod + def __le__(self, term): + pass + + @abc.abstractmethod + def __gt__(self, term): + pass + + @abc.abstractmethod + def __ge__(self, term): + pass + + @abc.abstractmethod + def __nonzero__(self): + pass + __bool__ = __nonzero__ + + @abc.abstractmethod + def is_negative(self): + pass + + # Arithmetic operations + @abc.abstractmethod + def __add__(self, term): + pass + + @abc.abstractmethod + def __sub__(self, term): + pass + + @abc.abstractmethod + def __mul__(self, factor): + pass + + @abc.abstractmethod + def __floordiv__(self, divisor): + pass + + @abc.abstractmethod + def __mod__(self, divisor): + pass + + @abc.abstractmethod + def inplace_pow(self, exponent, modulus=None): + pass + + @abc.abstractmethod + def __pow__(self, exponent, modulus=None): + pass + + @abc.abstractmethod + def __abs__(self): + pass + + @abc.abstractmethod + def sqrt(self, modulus=None): + pass + + @abc.abstractmethod + def __iadd__(self, term): + pass + + @abc.abstractmethod + def __isub__(self, term): + pass + + @abc.abstractmethod + def __imul__(self, term): + pass + + @abc.abstractmethod + def __imod__(self, term): + pass + + # Boolean/bit operations + @abc.abstractmethod + def __and__(self, term): + pass + + @abc.abstractmethod + def __or__(self, term): + pass + + @abc.abstractmethod + def __rshift__(self, pos): + pass + + @abc.abstractmethod + def __irshift__(self, pos): + pass + + @abc.abstractmethod + def __lshift__(self, pos): + pass + + @abc.abstractmethod + def __ilshift__(self, pos): + pass + + @abc.abstractmethod + def get_bit(self, n): + pass + + # Extra + @abc.abstractmethod + def is_odd(self): + pass + + @abc.abstractmethod + def is_even(self): + pass + + @abc.abstractmethod + def size_in_bits(self): + pass + + @abc.abstractmethod + def size_in_bytes(self): + pass + + @abc.abstractmethod + def is_perfect_square(self): + pass + + @abc.abstractmethod + def fail_if_divisible_by(self, small_prime): + pass + + @abc.abstractmethod + def multiply_accumulate(self, a, b): + pass + + @abc.abstractmethod + def set(self, source): + pass + + @abc.abstractmethod + def inplace_inverse(self, modulus): + pass + + @abc.abstractmethod + def inverse(self, modulus): + pass + + @abc.abstractmethod + def gcd(self, term): + pass + + @abc.abstractmethod + def lcm(self, term): + pass + + @staticmethod + @abc.abstractmethod + def jacobi_symbol(a, n): + pass + + @staticmethod + def _tonelli_shanks(n, p): + """Tonelli-shanks algorithm for computing the square root + of n modulo a prime p. + + n must be in the range [0..p-1]. + p must be at least even. + + The return value r is the square root of modulo p. If non-zero, + another solution will also exist (p-r). + + Note we cannot assume that p is really a prime: if it's not, + we can either raise an exception or return the correct value. + """ + + # See https://rosettacode.org/wiki/Tonelli-Shanks_algorithm + + if n in (0, 1): + return n + + if p % 4 == 3: + root = pow(n, (p + 1) // 4, p) + if pow(root, 2, p) != n: + raise ValueError("Cannot compute square root") + return root + + s = 1 + q = (p - 1) // 2 + while not (q & 1): + s += 1 + q >>= 1 + + z = n.__class__(2) + while True: + euler = pow(z, (p - 1) // 2, p) + if euler == 1: + z += 1 + continue + if euler == p - 1: + break + # Most probably p is not a prime + raise ValueError("Cannot compute square root") + + m = s + c = pow(z, q, p) + t = pow(n, q, p) + r = pow(n, (q + 1) // 2, p) + + while t != 1: + for i in iter_range(0, m): + if pow(t, 2**i, p) == 1: + break + if i == m: + raise ValueError("Cannot compute square root of %d mod %d" % (n, p)) + b = pow(c, 2**(m - i - 1), p) + m = i + c = b**2 % p + t = (t * b**2) % p + r = (r * b) % p + + if pow(r, 2, p) != n: + raise ValueError("Cannot compute square root") + + return r + + @classmethod + def random(cls, **kwargs): + """Generate a random natural integer of a certain size. + + :Keywords: + exact_bits : positive integer + The length in bits of the resulting random Integer number. + The number is guaranteed to fulfil the relation: + + 2^bits > result >= 2^(bits - 1) + + max_bits : positive integer + The maximum length in bits of the resulting random Integer number. + The number is guaranteed to fulfil the relation: + + 2^bits > result >=0 + + randfunc : callable + A function that returns a random byte string. The length of the + byte string is passed as parameter. Optional. + If not provided (or ``None``), randomness is read from the system RNG. + + :Return: a Integer object + """ + + exact_bits = kwargs.pop("exact_bits", None) + max_bits = kwargs.pop("max_bits", None) + randfunc = kwargs.pop("randfunc", None) + + if randfunc is None: + randfunc = Random.new().read + + if exact_bits is None and max_bits is None: + raise ValueError("Either 'exact_bits' or 'max_bits' must be specified") + + if exact_bits is not None and max_bits is not None: + raise ValueError("'exact_bits' and 'max_bits' are mutually exclusive") + + bits = exact_bits or max_bits + bytes_needed = ((bits - 1) // 8) + 1 + significant_bits_msb = 8 - (bytes_needed * 8 - bits) + msb = bord(randfunc(1)[0]) + if exact_bits is not None: + msb |= 1 << (significant_bits_msb - 1) + msb &= (1 << significant_bits_msb) - 1 + + return cls.from_bytes(bchr(msb) + randfunc(bytes_needed - 1)) + + @classmethod + def random_range(cls, **kwargs): + """Generate a random integer within a given internal. + + :Keywords: + min_inclusive : integer + The lower end of the interval (inclusive). + max_inclusive : integer + The higher end of the interval (inclusive). + max_exclusive : integer + The higher end of the interval (exclusive). + randfunc : callable + A function that returns a random byte string. The length of the + byte string is passed as parameter. Optional. + If not provided (or ``None``), randomness is read from the system RNG. + :Returns: + An Integer randomly taken in the given interval. + """ + + min_inclusive = kwargs.pop("min_inclusive", None) + max_inclusive = kwargs.pop("max_inclusive", None) + max_exclusive = kwargs.pop("max_exclusive", None) + randfunc = kwargs.pop("randfunc", None) + + if kwargs: + raise ValueError("Unknown keywords: " + str(kwargs.keys)) + if None not in (max_inclusive, max_exclusive): + raise ValueError("max_inclusive and max_exclusive cannot be both" + " specified") + if max_exclusive is not None: + max_inclusive = max_exclusive - 1 + if None in (min_inclusive, max_inclusive): + raise ValueError("Missing keyword to identify the interval") + + if randfunc is None: + randfunc = Random.new().read + + norm_maximum = max_inclusive - min_inclusive + bits_needed = cls(norm_maximum).size_in_bits() + + norm_candidate = -1 + while not 0 <= norm_candidate <= norm_maximum: + norm_candidate = cls.random( + max_bits=bits_needed, + randfunc=randfunc + ) + return norm_candidate + min_inclusive + diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerBase.pyi b/env/Lib/site-packages/Crypto/Math/_IntegerBase.pyi new file mode 100644 index 0000000..3f534db --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerBase.pyi @@ -0,0 +1,61 @@ +from typing import Optional, Union, Callable + +RandFunc = Callable[[int],int] + +class IntegerBase: + + def __int__(self) -> int: ... + def __str__(self) -> str: ... + def __repr__(self) -> str: ... + def to_bytes(self, block_size: Optional[int]=0) -> bytes: ... + @staticmethod + def from_bytes(byte_string: bytes) -> IntegerBase: ... + def __eq__(self, term: object) -> bool: ... + def __ne__(self, term: object) -> bool: ... + def __lt__(self, term: Union[IntegerBase, int]) -> bool: ... + def __le__(self, term: Union[IntegerBase, int]) -> bool: ... + def __gt__(self, term: Union[IntegerBase, int]) -> bool: ... + def __ge__(self, term: Union[IntegerBase, int]) -> bool: ... + def __nonzero__(self) -> bool: ... + def is_negative(self) -> bool: ... + def __add__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __sub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __mul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __floordiv__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def __mod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def inplace_pow(self, exponent: int, modulus: Optional[Union[IntegerBase, int]]=None) -> IntegerBase: ... + def __pow__(self, exponent: int, modulus: Optional[int]) -> IntegerBase: ... + def __abs__(self) -> IntegerBase: ... + def sqrt(self, modulus: Optional[int]) -> IntegerBase: ... + def __iadd__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __isub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __imul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __imod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def __and__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __or__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __rshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __irshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __lshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __ilshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def get_bit(self, n: int) -> bool: ... + def is_odd(self) -> bool: ... + def is_even(self) -> bool: ... + def size_in_bits(self) -> int: ... + def size_in_bytes(self) -> int: ... + def is_perfect_square(self) -> bool: ... + def fail_if_divisible_by(self, small_prime: Union[IntegerBase, int]) -> None: ... + def multiply_accumulate(self, a: Union[IntegerBase, int], b: Union[IntegerBase, int]) -> IntegerBase: ... + def set(self, source: Union[IntegerBase, int]) -> IntegerBase: ... + def inplace_inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... + def inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... + def gcd(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def lcm(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + @staticmethod + def jacobi_symbol(a: Union[IntegerBase, int], n: Union[IntegerBase, int]) -> IntegerBase: ... + @staticmethod + def _tonelli_shanks(n: Union[IntegerBase, int], p: Union[IntegerBase, int]) -> IntegerBase : ... + @classmethod + def random(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... + @classmethod + def random_range(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... + diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerCustom.py b/env/Lib/site-packages/Crypto/Math/_IntegerCustom.py new file mode 100644 index 0000000..3ed1dda --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerCustom.py @@ -0,0 +1,111 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from ._IntegerNative import IntegerNative + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, backend, + c_size_t, c_ulonglong) + + +from Crypto.Random.random import getrandbits + +c_defs = """ +int monty_pow(const uint8_t *base, + const uint8_t *exp, + const uint8_t *modulus, + uint8_t *out, + size_t len, + uint64_t seed); +""" + + +_raw_montgomery = load_pycryptodome_raw_lib("Crypto.Math._modexp", c_defs) +implementation = {"library": "custom", "api": backend} + + +class IntegerCustom(IntegerNative): + + @staticmethod + def from_bytes(byte_string): + return IntegerCustom(bytes_to_long(byte_string)) + + def inplace_pow(self, exponent, modulus=None): + exp_value = int(exponent) + if exp_value < 0: + raise ValueError("Exponent must not be negative") + + # No modular reduction + if modulus is None: + self._value = pow(self._value, exp_value) + return self + + # With modular reduction + mod_value = int(modulus) + if mod_value < 0: + raise ValueError("Modulus must be positive") + if mod_value == 0: + raise ZeroDivisionError("Modulus cannot be zero") + + # C extension only works with odd moduli + if (mod_value & 1) == 0: + self._value = pow(self._value, exp_value, mod_value) + return self + + # C extension only works with bases smaller than modulus + if self._value >= mod_value: + self._value %= mod_value + + max_len = len(long_to_bytes(max(self._value, exp_value, mod_value))) + + base_b = long_to_bytes(self._value, max_len) + exp_b = long_to_bytes(exp_value, max_len) + modulus_b = long_to_bytes(mod_value, max_len) + + out = create_string_buffer(max_len) + + error = _raw_montgomery.monty_pow( + out, + base_b, + exp_b, + modulus_b, + c_size_t(max_len), + c_ulonglong(getrandbits(64)) + ) + + if error: + raise ValueError("monty_pow failed with error: %d" % error) + + result = bytes_to_long(get_raw_buffer(out)) + self._value = result + return self diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerCustom.pyi b/env/Lib/site-packages/Crypto/Math/_IntegerCustom.pyi new file mode 100644 index 0000000..2dd75c7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerCustom.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from ._IntegerNative import IntegerNative + +_raw_montgomery = Any + +class IntegerCustom(IntegerNative): + pass diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerGMP.py b/env/Lib/site-packages/Crypto/Math/_IntegerGMP.py new file mode 100644 index 0000000..a231e9d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerGMP.py @@ -0,0 +1,708 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import sys + +from Crypto.Util.py3compat import tobytes, is_native_int + +from Crypto.Util._raw_api import (backend, load_lib, + get_raw_buffer, get_c_string, + null_pointer, create_string_buffer, + c_ulong, c_size_t) + +from ._IntegerBase import IntegerBase + +gmp_defs = """typedef unsigned long UNIX_ULONG; + typedef struct { int a; int b; void *c; } MPZ; + typedef MPZ mpz_t[1]; + typedef UNIX_ULONG mp_bitcnt_t; + void __gmpz_init (mpz_t x); + void __gmpz_init_set (mpz_t rop, const mpz_t op); + void __gmpz_init_set_ui (mpz_t rop, UNIX_ULONG op); + int __gmp_sscanf (const char *s, const char *fmt, ...); + void __gmpz_set (mpz_t rop, const mpz_t op); + int __gmp_snprintf (uint8_t *buf, size_t size, const char *fmt, ...); + void __gmpz_add (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_add_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_sub_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_addmul (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_addmul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_submul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_import (mpz_t rop, size_t count, int order, size_t size, + int endian, size_t nails, const void *op); + void * __gmpz_export (void *rop, size_t *countp, int order, + size_t size, + int endian, size_t nails, const mpz_t op); + size_t __gmpz_sizeinbase (const mpz_t op, int base); + void __gmpz_sub (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_mul (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_mul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + int __gmpz_cmp (const mpz_t op1, const mpz_t op2); + void __gmpz_powm (mpz_t rop, const mpz_t base, const mpz_t exp, const + mpz_t mod); + void __gmpz_powm_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp, + const mpz_t mod); + void __gmpz_pow_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp); + void __gmpz_sqrt(mpz_t rop, const mpz_t op); + void __gmpz_mod (mpz_t r, const mpz_t n, const mpz_t d); + void __gmpz_neg (mpz_t rop, const mpz_t op); + void __gmpz_abs (mpz_t rop, const mpz_t op); + void __gmpz_and (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_ior (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_clear (mpz_t x); + void __gmpz_tdiv_q_2exp (mpz_t q, const mpz_t n, mp_bitcnt_t b); + void __gmpz_fdiv_q (mpz_t q, const mpz_t n, const mpz_t d); + void __gmpz_mul_2exp (mpz_t rop, const mpz_t op1, mp_bitcnt_t op2); + int __gmpz_tstbit (const mpz_t op, mp_bitcnt_t bit_index); + int __gmpz_perfect_square_p (const mpz_t op); + int __gmpz_jacobi (const mpz_t a, const mpz_t b); + void __gmpz_gcd (mpz_t rop, const mpz_t op1, const mpz_t op2); + UNIX_ULONG __gmpz_gcd_ui (mpz_t rop, const mpz_t op1, + UNIX_ULONG op2); + void __gmpz_lcm (mpz_t rop, const mpz_t op1, const mpz_t op2); + int __gmpz_invert (mpz_t rop, const mpz_t op1, const mpz_t op2); + int __gmpz_divisible_p (const mpz_t n, const mpz_t d); + int __gmpz_divisible_ui_p (const mpz_t n, UNIX_ULONG d); + """ + +if sys.platform == "win32": + raise ImportError("Not using GMP on Windows") + +lib = load_lib("gmp", gmp_defs) +implementation = {"library": "gmp", "api": backend} + +if hasattr(lib, "__mpir_version"): + raise ImportError("MPIR library detected") + +# In order to create a function that returns a pointer to +# a new MPZ structure, we need to break the abstraction +# and know exactly what ffi backend we have +if implementation["api"] == "ctypes": + from ctypes import Structure, c_int, c_void_p, byref + + class _MPZ(Structure): + _fields_ = [('_mp_alloc', c_int), + ('_mp_size', c_int), + ('_mp_d', c_void_p)] + + def new_mpz(): + return byref(_MPZ()) + +else: + # We are using CFFI + from Crypto.Util._raw_api import ffi + + def new_mpz(): + return ffi.new("MPZ*") + + +# Lazy creation of GMP methods +class _GMP(object): + + def __getattr__(self, name): + if name.startswith("mpz_"): + func_name = "__gmpz_" + name[4:] + elif name.startswith("gmp_"): + func_name = "__gmp_" + name[4:] + else: + raise AttributeError("Attribute %s is invalid" % name) + func = getattr(lib, func_name) + setattr(self, name, func) + return func + + +_gmp = _GMP() + + +class IntegerGMP(IntegerBase): + """A fast, arbitrary precision integer""" + + _zero_mpz_p = new_mpz() + _gmp.mpz_init_set_ui(_zero_mpz_p, c_ulong(0)) + + def __init__(self, value): + """Initialize the integer to the given value.""" + + self._mpz_p = new_mpz() + self._initialized = False + + if isinstance(value, float): + raise ValueError("A floating point type is not a natural number") + + self._initialized = True + + if is_native_int(value): + _gmp.mpz_init(self._mpz_p) + result = _gmp.gmp_sscanf(tobytes(str(value)), b"%Zd", self._mpz_p) + if result != 1: + raise ValueError("Error converting '%d'" % value) + elif isinstance(value, IntegerGMP): + _gmp.mpz_init_set(self._mpz_p, value._mpz_p) + else: + raise NotImplementedError + + # Conversions + def __int__(self): + # buf will contain the integer encoded in decimal plus the trailing + # zero, and possibly the negative sign. + # dig10(x) < log10(x) + 1 = log2(x)/log2(10) + 1 < log2(x)/3 + 1 + buf_len = _gmp.mpz_sizeinbase(self._mpz_p, 2) // 3 + 3 + buf = create_string_buffer(buf_len) + + _gmp.gmp_snprintf(buf, c_size_t(buf_len), b"%Zd", self._mpz_p) + return int(get_c_string(buf)) + + def __str__(self): + return str(int(self)) + + def __repr__(self): + return "Integer(%s)" % str(self) + + # Only Python 2.x + def __hex__(self): + return hex(int(self)) + + # Only Python 3.x + def __index__(self): + return int(self) + + def to_bytes(self, block_size=0): + """Convert the number into a byte string. + + This method encodes the number in network order and prepends + as many zero bytes as required. It only works for non-negative + values. + + :Parameters: + block_size : integer + The exact size the output byte string must have. + If zero, the string has the minimal length. + :Returns: + A byte string. + :Raise ValueError: + If the value is negative or if ``block_size`` is + provided and the length of the byte string would exceed it. + """ + + if self < 0: + raise ValueError("Conversion only valid for non-negative numbers") + + buf_len = (_gmp.mpz_sizeinbase(self._mpz_p, 2) + 7) // 8 + if buf_len > block_size > 0: + raise ValueError("Number is too big to convert to byte string" + "of prescribed length") + buf = create_string_buffer(buf_len) + + _gmp.mpz_export( + buf, + null_pointer, # Ignore countp + 1, # Big endian + c_size_t(1), # Each word is 1 byte long + 0, # Endianess within a word - not relevant + c_size_t(0), # No nails + self._mpz_p) + + return b'\x00' * max(0, block_size - buf_len) + get_raw_buffer(buf) + + @staticmethod + def from_bytes(byte_string): + """Convert a byte string into a number. + + :Parameters: + byte_string : byte string + The input number, encoded in network order. + It can only be non-negative. + :Return: + The ``Integer`` object carrying the same value as the input. + """ + result = IntegerGMP(0) + _gmp.mpz_import( + result._mpz_p, + c_size_t(len(byte_string)), # Amount of words to read + 1, # Big endian + c_size_t(1), # Each word is 1 byte long + 0, # Endianess within a word - not relevant + c_size_t(0), # No nails + byte_string) + return result + + # Relations + def _apply_and_return(self, func, term): + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + return func(self._mpz_p, term._mpz_p) + + def __eq__(self, term): + if not (isinstance(term, IntegerGMP) or is_native_int(term)): + return False + return self._apply_and_return(_gmp.mpz_cmp, term) == 0 + + def __ne__(self, term): + if not (isinstance(term, IntegerGMP) or is_native_int(term)): + return True + return self._apply_and_return(_gmp.mpz_cmp, term) != 0 + + def __lt__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) < 0 + + def __le__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) <= 0 + + def __gt__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) > 0 + + def __ge__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) >= 0 + + def __nonzero__(self): + return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) != 0 + __bool__ = __nonzero__ + + def is_negative(self): + return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) < 0 + + # Arithmetic operations + def __add__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_add(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __sub__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_sub(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __mul__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_mul(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __floordiv__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + if _gmp.mpz_cmp(divisor._mpz_p, + self._zero_mpz_p) == 0: + raise ZeroDivisionError("Division by zero") + result = IntegerGMP(0) + _gmp.mpz_fdiv_q(result._mpz_p, + self._mpz_p, + divisor._mpz_p) + return result + + def __mod__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + comp = _gmp.mpz_cmp(divisor._mpz_p, + self._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Division by zero") + if comp < 0: + raise ValueError("Modulus must be positive") + result = IntegerGMP(0) + _gmp.mpz_mod(result._mpz_p, + self._mpz_p, + divisor._mpz_p) + return result + + def inplace_pow(self, exponent, modulus=None): + + if modulus is None: + if exponent < 0: + raise ValueError("Exponent must not be negative") + + # Normal exponentiation + if exponent > 256: + raise ValueError("Exponent is too big") + _gmp.mpz_pow_ui(self._mpz_p, + self._mpz_p, # Base + c_ulong(int(exponent)) + ) + else: + # Modular exponentiation + if not isinstance(modulus, IntegerGMP): + modulus = IntegerGMP(modulus) + if not modulus: + raise ZeroDivisionError("Division by zero") + if modulus.is_negative(): + raise ValueError("Modulus must be positive") + if is_native_int(exponent): + if exponent < 0: + raise ValueError("Exponent must not be negative") + if exponent < 65536: + _gmp.mpz_powm_ui(self._mpz_p, + self._mpz_p, + c_ulong(exponent), + modulus._mpz_p) + return self + exponent = IntegerGMP(exponent) + elif exponent.is_negative(): + raise ValueError("Exponent must not be negative") + _gmp.mpz_powm(self._mpz_p, + self._mpz_p, + exponent._mpz_p, + modulus._mpz_p) + return self + + def __pow__(self, exponent, modulus=None): + result = IntegerGMP(self) + return result.inplace_pow(exponent, modulus) + + def __abs__(self): + result = IntegerGMP(0) + _gmp.mpz_abs(result._mpz_p, self._mpz_p) + return result + + def sqrt(self, modulus=None): + """Return the largest Integer that does not + exceed the square root""" + + if modulus is None: + if self < 0: + raise ValueError("Square root of negative value") + result = IntegerGMP(0) + _gmp.mpz_sqrt(result._mpz_p, + self._mpz_p) + else: + if modulus <= 0: + raise ValueError("Modulus must be positive") + modulus = int(modulus) + result = IntegerGMP(self._tonelli_shanks(int(self) % modulus, modulus)) + + return result + + def __iadd__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_add_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_sub_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + return self + term = IntegerGMP(term) + _gmp.mpz_add(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __isub__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_sub_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_add_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + return self + term = IntegerGMP(term) + _gmp.mpz_sub(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __imul__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_mul_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_mul_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + _gmp.mpz_neg(self._mpz_p, self._mpz_p) + return self + term = IntegerGMP(term) + _gmp.mpz_mul(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __imod__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + comp = _gmp.mpz_cmp(divisor._mpz_p, + divisor._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Division by zero") + if comp < 0: + raise ValueError("Modulus must be positive") + _gmp.mpz_mod(self._mpz_p, + self._mpz_p, + divisor._mpz_p) + return self + + # Boolean/bit operations + def __and__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_and(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __or__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_ior(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __rshift__(self, pos): + result = IntegerGMP(0) + if pos < 0: + raise ValueError("negative shift count") + if pos > 65536: + if self < 0: + return -1 + else: + return 0 + _gmp.mpz_tdiv_q_2exp(result._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return result + + def __irshift__(self, pos): + if pos < 0: + raise ValueError("negative shift count") + if pos > 65536: + if self < 0: + return -1 + else: + return 0 + _gmp.mpz_tdiv_q_2exp(self._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return self + + def __lshift__(self, pos): + result = IntegerGMP(0) + if not 0 <= pos < 65536: + raise ValueError("Incorrect shift count") + _gmp.mpz_mul_2exp(result._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return result + + def __ilshift__(self, pos): + if not 0 <= pos < 65536: + raise ValueError("Incorrect shift count") + _gmp.mpz_mul_2exp(self._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return self + + def get_bit(self, n): + """Return True if the n-th bit is set to 1. + Bit 0 is the least significant.""" + + if self < 0: + raise ValueError("no bit representation for negative values") + if n < 0: + raise ValueError("negative bit count") + if n > 65536: + return 0 + return bool(_gmp.mpz_tstbit(self._mpz_p, + c_ulong(int(n)))) + + # Extra + def is_odd(self): + return _gmp.mpz_tstbit(self._mpz_p, 0) == 1 + + def is_even(self): + return _gmp.mpz_tstbit(self._mpz_p, 0) == 0 + + def size_in_bits(self): + """Return the minimum number of bits that can encode the number.""" + + if self < 0: + raise ValueError("Conversion only valid for non-negative numbers") + return _gmp.mpz_sizeinbase(self._mpz_p, 2) + + def size_in_bytes(self): + """Return the minimum number of bytes that can encode the number.""" + return (self.size_in_bits() - 1) // 8 + 1 + + def is_perfect_square(self): + return _gmp.mpz_perfect_square_p(self._mpz_p) != 0 + + def fail_if_divisible_by(self, small_prime): + """Raise an exception if the small prime is a divisor.""" + + if is_native_int(small_prime): + if 0 < small_prime < 65536: + if _gmp.mpz_divisible_ui_p(self._mpz_p, + c_ulong(small_prime)): + raise ValueError("The value is composite") + return + small_prime = IntegerGMP(small_prime) + if _gmp.mpz_divisible_p(self._mpz_p, + small_prime._mpz_p): + raise ValueError("The value is composite") + + def multiply_accumulate(self, a, b): + """Increment the number by the product of a and b.""" + + if not isinstance(a, IntegerGMP): + a = IntegerGMP(a) + if is_native_int(b): + if 0 < b < 65536: + _gmp.mpz_addmul_ui(self._mpz_p, + a._mpz_p, + c_ulong(b)) + return self + if -65535 < b < 0: + _gmp.mpz_submul_ui(self._mpz_p, + a._mpz_p, + c_ulong(-b)) + return self + b = IntegerGMP(b) + _gmp.mpz_addmul(self._mpz_p, + a._mpz_p, + b._mpz_p) + return self + + def set(self, source): + """Set the Integer to have the given value""" + + if not isinstance(source, IntegerGMP): + source = IntegerGMP(source) + _gmp.mpz_set(self._mpz_p, + source._mpz_p) + return self + + def inplace_inverse(self, modulus): + """Compute the inverse of this number in the ring of + modulo integers. + + Raise an exception if no inverse exists. + """ + + if not isinstance(modulus, IntegerGMP): + modulus = IntegerGMP(modulus) + + comp = _gmp.mpz_cmp(modulus._mpz_p, + self._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if comp < 0: + raise ValueError("Modulus must be positive") + + result = _gmp.mpz_invert(self._mpz_p, + self._mpz_p, + modulus._mpz_p) + if not result: + raise ValueError("No inverse value can be computed") + return self + + def inverse(self, modulus): + result = IntegerGMP(self) + result.inplace_inverse(modulus) + return result + + def gcd(self, term): + """Compute the greatest common denominator between this + number and another term.""" + + result = IntegerGMP(0) + if is_native_int(term): + if 0 < term < 65535: + _gmp.mpz_gcd_ui(result._mpz_p, + self._mpz_p, + c_ulong(term)) + return result + term = IntegerGMP(term) + _gmp.mpz_gcd(result._mpz_p, self._mpz_p, term._mpz_p) + return result + + def lcm(self, term): + """Compute the least common multiplier between this + number and another term.""" + + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_lcm(result._mpz_p, self._mpz_p, term._mpz_p) + return result + + @staticmethod + def jacobi_symbol(a, n): + """Compute the Jacobi symbol""" + + if not isinstance(a, IntegerGMP): + a = IntegerGMP(a) + if not isinstance(n, IntegerGMP): + n = IntegerGMP(n) + if n <= 0 or n.is_even(): + raise ValueError("n must be positive even for the Jacobi symbol") + return _gmp.mpz_jacobi(a._mpz_p, n._mpz_p) + + # Clean-up + def __del__(self): + + try: + if self._mpz_p is not None: + if self._initialized: + _gmp.mpz_clear(self._mpz_p) + + self._mpz_p = None + except AttributeError: + pass diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerGMP.pyi b/env/Lib/site-packages/Crypto/Math/_IntegerGMP.pyi new file mode 100644 index 0000000..2181b47 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerGMP.pyi @@ -0,0 +1,3 @@ +from ._IntegerBase import IntegerBase +class IntegerGMP(IntegerBase): + pass diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerNative.py b/env/Lib/site-packages/Crypto/Math/_IntegerNative.py new file mode 100644 index 0000000..07bf1c6 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerNative.py @@ -0,0 +1,380 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from ._IntegerBase import IntegerBase + +from Crypto.Util.number import long_to_bytes, bytes_to_long + + +class IntegerNative(IntegerBase): + """A class to model a natural integer (including zero)""" + + def __init__(self, value): + if isinstance(value, float): + raise ValueError("A floating point type is not a natural number") + try: + self._value = value._value + except AttributeError: + self._value = value + + # Conversions + def __int__(self): + return self._value + + def __str__(self): + return str(int(self)) + + def __repr__(self): + return "Integer(%s)" % str(self) + + # Only Python 2.x + def __hex__(self): + return hex(self._value) + + # Only Python 3.x + def __index__(self): + return int(self._value) + + def to_bytes(self, block_size=0): + if self._value < 0: + raise ValueError("Conversion only valid for non-negative numbers") + result = long_to_bytes(self._value, block_size) + if len(result) > block_size > 0: + raise ValueError("Value too large to encode") + return result + + @classmethod + def from_bytes(cls, byte_string): + return cls(bytes_to_long(byte_string)) + + # Relations + def __eq__(self, term): + if term is None: + return False + return self._value == int(term) + + def __ne__(self, term): + return not self.__eq__(term) + + def __lt__(self, term): + return self._value < int(term) + + def __le__(self, term): + return self.__lt__(term) or self.__eq__(term) + + def __gt__(self, term): + return not self.__le__(term) + + def __ge__(self, term): + return not self.__lt__(term) + + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + def is_negative(self): + return self._value < 0 + + # Arithmetic operations + def __add__(self, term): + try: + return self.__class__(self._value + int(term)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __sub__(self, term): + try: + return self.__class__(self._value - int(term)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __mul__(self, factor): + try: + return self.__class__(self._value * int(factor)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __floordiv__(self, divisor): + return self.__class__(self._value // int(divisor)) + + def __mod__(self, divisor): + divisor_value = int(divisor) + if divisor_value < 0: + raise ValueError("Modulus must be positive") + return self.__class__(self._value % divisor_value) + + def inplace_pow(self, exponent, modulus=None): + exp_value = int(exponent) + if exp_value < 0: + raise ValueError("Exponent must not be negative") + + if modulus is not None: + mod_value = int(modulus) + if mod_value < 0: + raise ValueError("Modulus must be positive") + if mod_value == 0: + raise ZeroDivisionError("Modulus cannot be zero") + else: + mod_value = None + self._value = pow(self._value, exp_value, mod_value) + return self + + def __pow__(self, exponent, modulus=None): + result = self.__class__(self) + return result.inplace_pow(exponent, modulus) + + def __abs__(self): + return abs(self._value) + + def sqrt(self, modulus=None): + + value = self._value + if modulus is None: + if value < 0: + raise ValueError("Square root of negative value") + # http://stackoverflow.com/questions/15390807/integer-square-root-in-python + + x = value + y = (x + 1) // 2 + while y < x: + x = y + y = (x + value // x) // 2 + result = x + else: + if modulus <= 0: + raise ValueError("Modulus must be positive") + result = self._tonelli_shanks(self % modulus, modulus) + + return self.__class__(result) + + def __iadd__(self, term): + self._value += int(term) + return self + + def __isub__(self, term): + self._value -= int(term) + return self + + def __imul__(self, term): + self._value *= int(term) + return self + + def __imod__(self, term): + modulus = int(term) + if modulus == 0: + raise ZeroDivisionError("Division by zero") + if modulus < 0: + raise ValueError("Modulus must be positive") + self._value %= modulus + return self + + # Boolean/bit operations + def __and__(self, term): + return self.__class__(self._value & int(term)) + + def __or__(self, term): + return self.__class__(self._value | int(term)) + + def __rshift__(self, pos): + try: + return self.__class__(self._value >> int(pos)) + except OverflowError: + if self._value >= 0: + return 0 + else: + return -1 + + def __irshift__(self, pos): + try: + self._value >>= int(pos) + except OverflowError: + if self._value >= 0: + return 0 + else: + return -1 + return self + + def __lshift__(self, pos): + try: + return self.__class__(self._value << int(pos)) + except OverflowError: + raise ValueError("Incorrect shift count") + + def __ilshift__(self, pos): + try: + self._value <<= int(pos) + except OverflowError: + raise ValueError("Incorrect shift count") + return self + + def get_bit(self, n): + if self._value < 0: + raise ValueError("no bit representation for negative values") + try: + try: + result = (self._value >> n._value) & 1 + if n._value < 0: + raise ValueError("negative bit count") + except AttributeError: + result = (self._value >> n) & 1 + if n < 0: + raise ValueError("negative bit count") + except OverflowError: + result = 0 + return result + + # Extra + def is_odd(self): + return (self._value & 1) == 1 + + def is_even(self): + return (self._value & 1) == 0 + + def size_in_bits(self): + + if self._value < 0: + raise ValueError("Conversion only valid for non-negative numbers") + + if self._value == 0: + return 1 + + bit_size = 0 + tmp = self._value + while tmp: + tmp >>= 1 + bit_size += 1 + + return bit_size + + def size_in_bytes(self): + return (self.size_in_bits() - 1) // 8 + 1 + + def is_perfect_square(self): + if self._value < 0: + return False + if self._value in (0, 1): + return True + + x = self._value // 2 + square_x = x ** 2 + + while square_x > self._value: + x = (square_x + self._value) // (2 * x) + square_x = x ** 2 + + return self._value == x ** 2 + + def fail_if_divisible_by(self, small_prime): + if (self._value % int(small_prime)) == 0: + raise ValueError("Value is composite") + + def multiply_accumulate(self, a, b): + self._value += int(a) * int(b) + return self + + def set(self, source): + self._value = int(source) + + def inplace_inverse(self, modulus): + modulus = int(modulus) + if modulus == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if modulus < 0: + raise ValueError("Modulus cannot be negative") + r_p, r_n = self._value, modulus + s_p, s_n = 1, 0 + while r_n > 0: + q = r_p // r_n + r_p, r_n = r_n, r_p - q * r_n + s_p, s_n = s_n, s_p - q * s_n + if r_p != 1: + raise ValueError("No inverse value can be computed" + str(r_p)) + while s_p < 0: + s_p += modulus + self._value = s_p + return self + + def inverse(self, modulus): + result = self.__class__(self) + result.inplace_inverse(modulus) + return result + + def gcd(self, term): + r_p, r_n = abs(self._value), abs(int(term)) + while r_n > 0: + q = r_p // r_n + r_p, r_n = r_n, r_p - q * r_n + return self.__class__(r_p) + + def lcm(self, term): + term = int(term) + if self._value == 0 or term == 0: + return self.__class__(0) + return self.__class__(abs((self._value * term) // self.gcd(term)._value)) + + @staticmethod + def jacobi_symbol(a, n): + a = int(a) + n = int(n) + + if n <= 0: + raise ValueError("n must be a positive integer") + + if (n & 1) == 0: + raise ValueError("n must be even for the Jacobi symbol") + + # Step 1 + a = a % n + # Step 2 + if a == 1 or n == 1: + return 1 + # Step 3 + if a == 0: + return 0 + # Step 4 + e = 0 + a1 = a + while (a1 & 1) == 0: + a1 >>= 1 + e += 1 + # Step 5 + if (e & 1) == 0: + s = 1 + elif n % 8 in (1, 7): + s = 1 + else: + s = -1 + # Step 6 + if n % 4 == 3 and a1 % 4 == 3: + s = -s + # Step 7 + n1 = n % a1 + # Step 8 + return s * IntegerNative.jacobi_symbol(n1, a1) diff --git a/env/Lib/site-packages/Crypto/Math/_IntegerNative.pyi b/env/Lib/site-packages/Crypto/Math/_IntegerNative.pyi new file mode 100644 index 0000000..3f65a39 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Math/_IntegerNative.pyi @@ -0,0 +1,3 @@ +from ._IntegerBase import IntegerBase +class IntegerNative(IntegerBase): + pass diff --git a/env/Lib/site-packages/Crypto/Math/__init__.py b/env/Lib/site-packages/Crypto/Math/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/env/Lib/site-packages/Crypto/Math/_modexp.pyd b/env/Lib/site-packages/Crypto/Math/_modexp.pyd new file mode 100644 index 0000000000000000000000000000000000000000..8716ef27afa5e9b244286edb0822f41b2ba5ed3c GIT binary patch literal 32768 zcmeHw4SZD9weOkCOqhIe29lAeSO*zuG=hPlH8@aa$PDZQXCQ^}p@0dHAgPeV$qa&@ z!AaVTyVKHMTkUOcuX^o$zCL@Uy*`Ro+YAXLdM$hcE_g-gztiASHYp=cc!GH61$tX#Z36G{p(k_Jb=N7+z{bvB#qpy5+ zwDfew3lnx3)E6d{ZK$rd*9B^C4XoK@U$>^FrZ#9_TV)S~YV6fDcJHDk_D!{wRa0_u zG98JcFKI4)esRyY&Lp3eKl;s?Wr)A~@a(gt2t+Pu+7(1hfGevxH^|}pIN3utyK1r(lZo0JX$q5z7v_9!7dsf=$ z@zOG+T&JhDjYDV^aD#yYg3}}^T_g>KQYR9Ki1FBT0=Ak;}xL27)&dsBi{TY^X{Sc)HN zGW9*t4?yD2E=d(r0+nlmYb5ExTqL1PDG$%hcvAk{V4EpAn#8h@u^kWfp$1RNpBvqn zQl}$|K0bm>3m$4n%AcFc3)BbJAx+R^2LRAsx*Jc*pIhKnZL9?%(THdz@Ic$tKer^! zNsa#(48kp0y3AANSZ-rYv&)N27PVP` z#G*O>9f4*$P=QF(Ik_=_k?}cCu*ltxQiZpvk&TWD)?X#5e50dIjV{v^!zfSmE(7C! zr0w_T9`Ny2e{PEse(SuV?N(i{$lsj`PL;28OY7ImS2m(DNmBS8){o|Nph(5I&(9C| zbMe+b)rdECqY&0#`MJc5F_C;=Fj+B}G`y>)>NtrqI{Z{EifK`e`&Abj(n|RjQ3jPh zn5-DQnne^s$vV}RzJJ@JBxOy3JwV!u8kNSB`;h!k6Zj;L2k8d4KJgdo# zw*ZkvRyo|BKL7$4bTEa#rV1V|d_#@S6twj77yX6j{l*LHw>$SK{6p1svc6sc8%5_1 zFj3}V)!5=MJg|Pd!qevWJB)s6voXe8F>0@3a1SUm+ym%^ph39D5PF$4?F&v>6wMwd z7=Do74H?cBg^%$^6$^76QF%=MaSJhwt21y)(OQ-8Zjh`OdHwSJwVI}>Lqyb)8W7aes7ti9d2n-@>R#@$*iFs2}T{ zts0N|U7aj^OmgmH#unApCI2`!KWZ9-UMLat6gf=Wi<$#JV;FPBnof|Ma0ek8&J+2f zw)gI$AZI7oQM2t>g$Rw$EuK~7VsvPP=x=C#Ggr-%Gx_CYA^gH{&X zXYF|imI<&iiUmzagEPYK7=i`#b4mD9Lojof7_x9%L2LY8XqjGfd=Ub9XA9~qXjM46 z(pJzTqhbE07WpR_2}JV~BW|Iwwvw>2>ukdK2Vf27zW5mw(%2$5cHqT$$Sg?@bTxY& zZujpE;aR_FLZaNX2fyv+e?Wwq5N`oeV@q&R{BQkl#-E??7X=n;8lv73ZF&J#Zj?bQGXQEi^lx5BW*JZiV;pPccMIM?>-qTO)M)eWRQ$J!H{E=V;*p!yP`x&CkIm zVIXs6yVp@fqtlzG*%l#@cSVo_MriyUQb9HT3P=`zn0FT3!1~4G2|YsaY{;!!E5klMxx0X z(G>=aKv8dA5?8bR1V!j==uLN?pj_T6bTdZ7?YX7=7MfOK+yp-hHLmah{wm||W0Hi- z@t=bTDsPAMB?cK}0!+*Do*$y=c;CH5*Z#+y`!rkD3SzVan(fo&G>1(h+VO0LNuqgW zU&*vd1IS?!)d4~DZt*i@AjfSO@1~aEsAq^3U$_Q5{de~u`PYas?gi6D%y(kMSd`#! z3ud1=lj7XGj)I8!MMw<7bOY|ftL^50K?>E{Zhis~4Q0FecL``0S&Qtp7MeW2rZ?M7 z2C8`|o#$M82ht;|$(~Gv<&l$Q6b_#|t+ZXJ&L?RvbD;yycGvqD4JnG>+nXJyV9dj?&`A4;Eyh;yhaCVOKb z|N9|ifm0ak??m)fieltf!|HgqfSAh4!n2lu2c%HCX1g2}4N2mIplfvlUeG3GA_Ke` z&AAEj9tqQ%Ma`=Lw+@CVFa9=~!on6u{4Y9?hPnz`h0KrtH;7^odO{WBrb$Jpka1?| zH8WR7Fwfc2$K>DUI8b2d%rsr*?=bZC(|n+EZ`0o$72gSY)O%@s3E~i&Kh={>(^`5* z#eay%4uhzW-huF+1wMoD|A}5+vaI`n*Vq!Gm&V2edSq0*8bI&6DeuRZBPG5VKZ!(o zpJXb7Eeu%}bmS){=qDu1Fh_ghZ4iQxESH2oGla$n(ZTo&jJIpHzsHJ<*lGn=Vdm&F zcO!}KASQ{JzVxo>>wTh-pcTY+KsHl{dj5p8_Kf^rcl%|EYuyr|J z08qXL3XJ==%^+VusC-uzB*^+1p8QamTDV*Oy(f=FzYSZw6JR=vW;dwO?ENf~=TKZn zg86)Yw(5!nGWi?}tYlY90K#y0LGMZDQ3eG?oRSy5(;?kMP$u{?9buYa)BIMQ-VYp) zw}Z6Kqo7Mx>=%xC zQ6>xl(lRKIlu6XrCqD>+cpZ!?okEqOHaFxJKyY6+C~t`}WBgA-jsL>Y30kpjZA7xw zU}!;0F>6`$iiaT5SacT2-QKaVwuk@_fqhhJxD__}$v}@jQ)zzc{qqk*cw{S&2xYq?y$6* zrve}*kC?9qtRXg^_}#?NS)UnBWzpG`GF`2*wAt)D6^1 zCk*H&^1NZL(6~9zsS#g;yu5`){7Eb@DY$4OCVnw4D^#1^_!C;pE1La`bgo;|~OYRd*8rENbP(y9LmKc{SJA6GTLX zl@j$gEBr|O{4|<&a?@%`jy&h+Lkm@YPczEKY=tr)S5iHeN`6|Oe+PhvFETw#_MT=N z2U4;~I!(Lj7$i3f@3m+xG||HY)mLCK zB8Vb(B#OwO)`w>s(Q#9-z|R-KL-n$s&$9!IL}!u0=M|`tN=R_T?D1V!VbWg%NzD(T zk#@E4T*`W$8?@aJJEIvK7#`PhQ{X*?AJlB#WD{HfQphRNBfdve9;f!^kTZ!9NrRcd zyKRwaRATHervf6rI4y#QAub?9zT5&iMLhE10)Yo6O)c%~K%#lg{N$%v1QWfF`ho%4 z7o3Kk5bT+;+_ZU;0Kd>5NjP+pU3y35KKS%rfL|dv!Fg1?c&Wc|5BzH^(SLZYlKlsA zJ}61Z4u8|R;FXCt#`z2PZode=ZcW49N2A=NFYDH)wkN|#9AB#P$~{(p(`mW!I?{_Y z?)%YRI z20xdS==8k{qBGCLm%R(I#Y^0@QsN63?;ztEu%_9VosSEYEmYWKwq`bKP;{RYavAB*1+%&tRA86YU5w z9A?--nq;K$#emr36S2|KTlP1h#xJG$_&1?`{xw*F;2y0-r;c??B#Z!T&P97u1Dd~Szxja9beLW-h zQ3eKg*H{|i!o5@x2C6{euQ1XNfbH==Va*C1l*Xd>VNHC&62O=)gQIG<((iCELy4Ol zzB5WFMfiRDE3p>Dasg2TSb9bY<{faI$QBtaq&)n4_5C8!o zAFg#O*8rxTfkz-v2|7iR71;+Xs-Oy}o*5+-DHRoC*z zx|9-%2TRy8R04L5LlrI-B^Dib_SY-ai#mfAPi`3|mj- zn;zkP(BN|zzvp4+KB&+JhM5O%s?F0-EQMq(#agyptAL`cb>D4^m8O_YF`WTm5}7m@Tr^ zC#`4lz1TO2LvZ+a$zyySLn9)`=6+c6n(Ao1GJlj zT`1TcFTrMr@1KmC!)Fbl(_WIqKJ;>|NBAf`XmZ5YCJK^w`rACNKJaa)hj+?zUiR?4 z*Y5Wg?)K_+`P$~nI~^8bD|n8*%l9Aa);hcsy9+-EPD42!-u4W2Rg~qv_E3quGjH^< zcle=W-Cq8dH>+DzSh$x8LzSeyH#m=S-!7PxNMpO)c-h05)x+!Z`rmgR^+pwq4|pMd zE248XV|#x;(6+#&3Hsn3J~KB%lEo2c;z|mvF>z_{QEqa@0WKTm7ndg z#Cj|q`Kg1(BOds4JH*hY&)Y{jj{ zX51|+$+tm&w5Ck`6aEeoBvwC%|y z>#8=@)~n7=wkup)S+gckTd6ub7#7s}F%rM=h?>=cx6_nZEXVKMk3ComQk2+kJ5mja zsJt@?v0e35!L4g6tAc*sr8-|#on4CYgc57Ds?HON@wnf33?+?God=L+^E+Q-dDY8@P!#=@maz(zXxJpqZk0YFo!(0WHXG#mjmXUBh2XX^yA~uj%-r zJ!man6AesMbJ6le%UXx^r0cKYC)<ThK?;Ks{rT zQKDUmhK?+S5Q)emsqk-YnZT zV6e&GVo@XI(7|BG1e;>LG>l;xPh*jkq>*uzzqWJ_<0xikp3ZZuXsS_P^ci|4Sr5M-VLnO6W@#hORZV?$J7C0q>F2=v_Akn-qv0P@nGrkm?!oU>Ni@gcS?+|a0hdES8I#si!Q3?tC zPi{mLVa_0i#?WaSY^k=DddkaJw>q`<5-T-7a$6p}YYl*WP4CH#j{_i!mqp=xf+xpK z3*;gwdoO?6dCn6%ogSU}Ja8n{IMK`B3x96%%Jbe4{)`ffuwBM6<12II-)9%$M9Q;q z3+9ac)Lvsp_)R(XW}0F=7|((mOWR9oE_}fnXa{RHpuF&&^Ry=0qO~hnC?%RCnA!!a zD9}Jl;v)DmU>}m=#)kga!REVQm_nN;I+e;t3H_8fyOHnZo@|xx^ZbDZolL28%>#q7 zm2b81SNKg_I!_A(#cz4J$W!@5l=SMSzb}Xc6ujiJ9QR*8pJC409@$EHx;?B zJ?uFa_rS4!*c0(skG2|9jupY(j(kr^l~IIQEW3G46-C^^&oZ?}4QSYfOIX z7>q5&cuD+bSWNH&=*AXVT^kBNG=8S-@wna%z>Y>;GzXIwL*jf@g8moT&lGaA2b*J? zrz?fWao~y>55SKYf3Hw%w3IQ}drc>TtYL-}n#W+j1EX*ccDMuYIa_*fWQCtG{u0*S zqg37+-=)8FC4;nrag<9o|5yfNA1>3vL(Fwpyky=5UcpjAoN5@hH##^7)Ltv?zhcr0 zmk@>{F&1L_gYBMnJObwN^B%~5#@`v?x#(R1PK@TfhyeRxHxOGsC#G+zKkSz}()R?? zGlZ0s%;dx1Z>X?5W2Yde4m0CPoGNQoqmB<1*O^d`zG#p8j%!+Po*({J68_|PzRa{FmZcpiM0?NBTkv=EAQ>#iof?Mg%j8Us!#+jeWhPX_qv%tJtH z4ddGso`(nRItC|?hnzfJbj}f$-|Yiv6IG34tV{@M8WfNK-EQ|c;S}QuI;25VXcbi8 z`_-r^T_?i;9nDN3!%=iR+3Q!}gH8uISY)cedPS!K+)%p}7sUB7bVQ9#2NPFYK@&A{ zyCvB*v^ov~ut>F4CzZ-Q`AKrL(m2o{G&&lnrLaFB$A;6U74)=&Hg-kO2EQyxo4HAO z4*K9d&_^+>YQxH_jDA4i1chK)5QUC2RLwAI{B{~32spjk6jDLx{7uv{j3kzSkdEj% zI0CLk^kaxlMCbyY&`@K7&_p=B0Yr9wL@9)h*$u8>AOo6eClbt#!X5EzF5_7(gC3e;f{!e4j=(^o_{FUrhn*>YeBfEX{^WD>B(x*_IoDo2C6p; zz9~#Ye?IW(J$K@zFUN%ENnjR#B_{k$0<-9%n5Y-E6N<|(p(mHOguBzScB5UC9R4H? zJ#6=7k>Kq=$u4hU{asAMdbv?3#Si6s9~S^^9#*WEH=f4Hq!-uACp!_gt(Q;4*2}$} zIFe+gu0v%Z=`~Pdu9d8k4GhM49WW2#jSgTQ2j&VZyZX~rU5Sq zd**jK(p=<1DPAv-dSKWmxgRU|O5x%ZJ9h`*NrfqM9j5xnp-eA79KWX%3J#mNFX+DL z3oy&E$WSBoZVx}C@+?dgHR1$~;UmQ1Sz2S>SSQ5diJ|whK31t6D3v`H* z`!dd=6GraK%&-LW36BeVwhxMq;*dICrArg!;5U>&?k>nzT^;^h9974)kiEehQuHaY zZKrB5;S_B)&WP{THyE_`Qp03pd4PNR+Aom8TToY?Zj0RHL@UoWOMDP(T@HNHTOo3IK&2*fR}u*kSt$puuzs_ zM+CMdkpx{vIhgHeF%JHxw)kk0#IP4ci^n8ej9UfJSm-EG7+u;eTKwuzi(geEEA1m& zT!0#(03foH1TNOKYeK(&Wqb9uQYnHR>hp)U_lqn6@+#46BhG!Y$O33joxU(6_5x0} z78ql(MFflj+Fc@^pkWa^WkuN9D)Q$ZLVxiayv`$zD(UUTNF7lP*+UGmPZ%S3vx9b= z-eBRo+(K74bX}3QP|S^1KTV()uuhrJvrkb|_&8ltC=nlK-*U@BzUq`k;h!mmpF&08 zOsB$M2IUi40$9l0JD&$nLH^fb2oI@+FF-%|_{&0oxBI|xq$nW6NdZBD;vI-Z6oL%g zEjgnL@cG<(DqjfjdHKMA23b(7xX5Z>P;_zey@*OdcEK=-31OYpBINa6U1*{Z$W}rG z4+t5ri|Zs}QnX??WFc8eocSOtAoL()A?6n7Vs3=E-RsSQJm`Oejg+=R$U@}m1|y{H z2}s5Rke|nNiQ9WbNLrK&vmWAA)MwRkPIf_h9w6z7t=KP-nsad>33-dsPJ1uIQEnUVaZ zIPI9L{6WVfz^4;qy(&M&Vt>X~_E{ErQWOpcq@;vTUy#4cfyDS(Se=)1n7v5mqfnk- zehSQtrLEHcCU_gE^B@f|OApaHknZdh9#-V1un&fnwh#Nt(DEuasO6`q3AEbA4vjs2 zc?aDQ(widgScUgk71#N|Pt}Cekd~^{amMwAooh9apJ)Nz!us8W{%2D?yIFKe8r3XH z{RTS7!uLpGV*E^38n-VH6JtIC&GvKwMP{C)CI1qYzl}4C{=&Cae#T#T)=zury}Eo` z!1MB)xYn0Ox6&|K{kkqFY{l)q1rhJH1K@NxvR%hjejMyh#!vV?L#P9mr1*VD{LPtF3+6+M(zE{as}aRn|Mu*e2XSWFxisq4e0Wn@d-si~2QZd}GxBOAK> z@C-%jI-yKyl5Aj2?*+He9AIi>rUUE8okYlG!o@$TO*4myMFagkl51?PJyt+Oz>ZR zf~<-Yigw@#h=-Fkq8SBZ{BKCyFQnF36|@4aNy8EDFSZ5@Fc!D&bOz`=7sL1~ILd^{ z(SJml*vckmit*1Jxs#@kxQaPx7yf2l=qwi}H^c9)plSCAxDiGg?ODS)Rf@eZlk+Hf z`P}eU&_%`0muT{8e*_+c#oHG)hmtEsKW=rxkINMPx{8V034z32D$Dz{K{!6Ka9HK*!{<_wyhvv4Ls6G#!rO8$xi(tpPXGk98E&b zM~CpRX{%yIL=|D{5xzJ~dd!XC{?7eqGZwwLp<0R)6OFBu{zooifNI6{6LFY-LIlo1K z`#=*(!fXUl;1nIY&4~NcxSu9eh<$!^rmi7qNO7C6osLKzk7vC|gZMdXH$qNIO6=`X z;-Kowf(Ym;jpGUo-j(9Yv>M%%>i?ltCf!Q#O*i-F&3i$Q-kpX@zJOep5vt?E>wtM9EB2oRv0@U>+)CSIC8J@>`@Fi zR`nI6g1feb2e4ek?K_Cy-4M^NS5>&39*U`1$65G{A+*nVwD%_pNGvz4ga=mK_$7`3 zSEEBjA(DXQ37>H)3y?+w4`M?PQXN-OaZ8i-+i3G8s3`E~6b{gEqj8O(k1(#FT!nUk z8I0XTp&wul&5wA;4Pc*;men{Iugj|}j6JRf@24RLj|^=a4mKfCe{%o8N4lpAm*xIM zo@5Ui1ajS|Mi&pT!VeZj^LE3yyTk*JhAR^aYQRU=aZ@ZIvBaI&KJnA6E>STA4@wSy zmNwV`Y(4xq1|vQ>4@CVs+w)H#OhQJP;lY~cuf_*+j{qB#*5Ae9Jz5juG6K~+Td%pVglhiOPK+*Y z85Rxabs}Q_k?@;_q3#?U9-}*Su&bchim}!D#B7vATdhL+yifa{aIX`Maj5XuU`=t5 z{80dOLAUV-A_ZAZ0ANTtkH$N(oMhMxfeqHK^DA9|4PB1o+R(=s;X^nt9>VFLeIdFF z`WxyFSNkEGX7MMFXh9aX{_JF;$Kpi*C+J`Sz^io74sfCl<^y!-pcUXG z9kc-aI^8a@H>-}Th5bMq6BDs9q0qGW0Oxq96H%KFq{LW$T~PFiD-po~^Qh?@bM4t| z--W}-=pdaxBRxgiuOIIg+poptbdQ}C#_6_U0W-d?6w*A#>LUIm4k9W>tVYh#8TsJq zg#MCu&coEbZ=l&oViB>?%_WeW^JrJQpt%J5ngX;pmwcu}`OPKgb;#OW^0^LKnoBO= zRRmB9=*O=?%T<^U!tvU(oFF5b+~Wr{IFochmw$-?|NSg*>0i@slq+Jy%J^VJPMKtHfWH09UVJ~lwe|roX z1gpyKk1hm@cKD;&??bx6q&|s_z?i%fXTaXHs9DfVIP1|3()L*8otWz#Y8H+Z_T*Dy zYJz=aLcz#{qC|oyAYM!{hEHVRV2nRv0J(x22FJI=fNuc|$UC!y-a|v+I+S-}WCy{| zhQRYf;O9f&g&|N&fJ!8T;aqs=A2HO=Dgmm)&o7|-QOySYrZ-bZ(!@0JKwcAMZ}u1kkl*Yv3BcOyF$=)b z>`6znnmwaZTTADsNMl%OQ*$)!Pik z?7@-dW~4u_!wVjk3vv{~xdwY{fL9AxKYu{DPsj!486r*h-y=Fd5q*TJrfK?+i2xiD z{{aB%?P!{u*o_sFGxUM@lEwz)$veGQYWd(Wl@D1-9~wqI0pt(DBJEC2U&b5tT@0#-7JK-_}NC z#MuX2r*20E;OF3{p~~&Sh4?KBejUGaLgi$hLUh;B_9;vw=bM^_N|>iTofy7%w&$55 zgle?W(Si^9@X8JwCyAx2N+o6&irJdt~|V{xOnu);e$ zk%gwHNf?Bl$Xp!YxCzJf@f{6_?-ppQeLXwSP7hyZ5_U7#r6c~-r(}aHG>H;$E=GSL zpG4nb7AX?Rl&{%x^ej7rPDSo-J>hQ|Q35#^RuDaSjY)3&M?ijlFD}@Qe-90Z%7H)g zNe8JYNZH^%O~X+S7P&!*{TU`HPKi7(y614I(GV7t4fIaJ{@knEpXY@A*$(@&J=Oli z*$&#&r|GA3)!D}P+w$!#{6(}LCTW-*_C2{V(;i8 z_UC(p_9x7IU$iO>ry<}-)4Rwx{EElGrEqAZSXp<0y`h;a5y1bQPm$@D(@M$tVfZhw zu+9M>yFz~IfNo*+QRcrffsQT6Phoxx7=n`r&8#k(>Cv1B0^ENc2d0B3c3D0&pBSF_ zKG+3HWOG5vI8Oo=Ic8{3!8=75Y`L*T92Qz;64$+O2hyn7KEH(U=w?*#CgKGuSK$kb ze0;7ccojYfV^uM^u%m^I2i+W-TcEE5dvO(~HwTV)!KrZ{+K|G6ze2gFw-^P7R7?oX zb{D1ag*YC2yAIhk+pRi;eHjNbM zWRg?`uT20;(sIg8+{HgGukT+9@B7s6T;bP;h)<%Ntu;GPi&dlD7G zpF=A^GI_BUKd#W03%*kFiIALsPab2EoIe5}Y}{s%LM9IY6~b&gIw0)f8YJLM5eeMs z0L3+%^XQ%c+6bWgHZ$q{NF6*Nw->-Wgi+m)@29QmNL`nY-)>TdU%Q#|O`u0}#v#yb z6Ksg!h9epNiF;gs!3hTBA4Y)tg~(URr%RYa16dt z@GH|u6bv^gK2U@6Hipe(8fHs$%k}`% z;Jg0JaK~AY0_RFv>vQ$9e#2hza7EXF#55Z;+m*ETiQr&ZAIYI{9(<9Gh`nKWMM=d; zo}~($ix*bCxK(O$?(1DBG%Z~A&^<52eGo1D7Dym4>oVMcZjGM;|3YG9{DF=h0)HZl z=DkkcN?fY_xhRgbZO941^#@Eu%$ZFh6(>9^0l+n{*;a}av;g~m?W3rGo>DBEAd^WH z7pM@$4A&MQh=DE`fpL<>;w~fBAn}{=MU>ua$#wJs+A{&4Z%g%mCGU%h`?aoRrob8K zS?3A(r>qzOI&1o>07L_NUKST1FdE6R)`ui1ED_Q-Z#-p?USL&oKWzu=>o*89eZdaf z^GzYl9!8JF*#A%f&_mw^fMLugg5!!dE;Qssc^}`8vNIP z6|252;&wwTXzhFtG!GW)1b+nthoaGE9ub`CN@&2IKZ6(MntBTa!LOn{g~C^o{wKKl zg}S&Ai);(9TpfXT>1Gn{ri&>6QN9KMOe^{@)WP`ITA;_G^WcFniKE?YMnPyCE&dB} z(^YsY-V@ZMFCF3_ME`)IyDFMh$2>^NVt5$by0d|N6doNUAH^IUBp*eI4w8?;tApgD z@aZ7=DCUxn!Y%ZjySU(D{)u1V8a~5!R}g@{ID$6lpJ?5j`8@e2dVYu>9Izr!1XGN! z4S6E2!my`!BEB=sU-$3hkLWc&Pe!`9`0{BE0bq;w0q{LNe}Ofm+W~!mQdnDnAa;-? zzi)-(=CE6y!Fx~wcFp#b4#95ul@7sf`MD0kZuyabVDdB*5KesHW<&+F@kb^HSsRma zSq2sw9L>1==$qj&ZO-2=?2Q&&QK0+ay2;@&1$w^JZ(yrD#7i_&X2y-PEqq_2)h+1iF}j>=e}*lW_(FjelW&kNC*A?pZwe2pm*!>MEJ)u(fOqyHWLEZg<=jV!w0c0Ii~ z5su6k2dlr3_#dB8lxUwj5wb*B^z)1N>))$|^GeE83HD7znIwsCaXU8EP+b+3l7v;; zV49<-Nct(#emt1AMSO?50OLEQn!cs&z#Q5MJnd#ms~DE12)qr$(*$1Ku(VQvw`F*mz-t(mRwnS8ho=d=?MUl0pnZw&ixck> z59@Kqop;_@xfb1!T0)6_1cK`vibs^!F)Zz7QC??iTA7a5H7rfX>qc4&2s34i{vGn< zci5*?>2dlVdnxKo&=z(QzQ>L<`s{fk%}Hq+kd`tA(wKrN5hCj^a~0l`F(Wv zPtYG6TA8x89zDk2Bl`E=lix*`c3@19EWUs4D4GgTszBPqLusN-l>XQL4B6>Y_B2ah zdW)&TSejNe!JHDxM1N%}S(qJv0#F#^5{D z5X6(^8pfKWvC~F(XSZb5O|Y6IYh{LHo^Fvk$KrFbxstSs%B1#ulqupiq-{vlL;0!k z)OV;Fu#7auW|3^c49QkGO0rE)mu%CC$D>W$CiOCEa;-xfN7CwR;b6{|6jX)AG^81%v{ICl zXOZ$M@sy2{@=!Ga<474vUNWg5ZJa z2I(BgGGPbTUZ9#4~`ufVw!bj@6wp6J1d@+;FN_hL!<7d+W_>-`@dNBLiY zbLj-g62n9Z+NjQ4VJf0I-bd}HIeFSHS=z3W>Xu>-FO#H=i8_YIF)x89jl?54Q&uP) z#CwO9qyO|SrM);o#LptFEm0ojr^bi9LwuG}zaiT!W#2DaTWSKmC*)hC{L>c6`l40J zZj-^=qb2uBN%~hjh-&p|4Wc>E*q=ILyab`#Q1 zQ##fCBJ@7u-^Vi`>V}RV5vOtn-;wZ#_+Z&&aiVMqWlJba8d>)Ud_!g$>6;suuIonhk4eDmPYD`nIgAsw4Ed)f_HEV)Z$t3CfMx{OxsSV7nT0?Cu z4OG?RHEFTw+o7t!*3zoL`r5#zH8tzdix9dIkS2Vwbfr?7l#*JuAyBoZ5{c4>l)7YV zeXwd%S@otWPdz$NMUXVf;H|D-vlcOdtX8iLtO;x_S+j9ty;N%?93tqt9JYgHiG$_2i~3w^3{+LX$T8);otUS3j;FZryiUSGYgTqho=H3{kU z!OHUB*1D?l>YDYn<&{|StG3vRKXHmB$eN>roO%^0DMU*M=Niwsj4g&nV74|_$K|eZ%U}KItapTl5QA+ z%d4c>Qh9yVhVu1dNXw}M`BM41+M4y%w_+U1@d=_p?H%Q70=I64qf~!WCrL7^s%CR_ zptfcc4VN^17)EVP)fPyJAQ4!Su9C{Dwp6VP1*^)3Wn6;#2v3sM)KyR3R6iMvH+fwk zIC(?Wn!3pwr%s+aIYEUH`S^Fxf*8b96c^EO-6lwlnF6W|D6Nu~O7&8e6u|E){D$xke$|Nh8vNU4o1|*QR!JU+ z)jG*5t-(L@wFYrjs+HCO+6cG|*qe~M3Qskj$ta~tx*g$KlvWL#O2FTdzJWJ_7}X;1 z#B^ZS18*I|O~BfS9C~-7v{fpH?_|0(1$oK8AkywYjRBNYFWJGnv+%nHsWnKqONDr* z0hwA9z<*m2pCxdLl0}HnFwFb*zZ21m`-(TQZ>Q@#!oCP>&`bGZS;_QGpi#01VA2@ss~3R2>$+ZyaLevTJXGG`Z^vOiMe0#pFR@T09)r8eWbsPzosTiUPFU`zc0h9 z6Z4K{Fi8ZOpVS)oV*am?FIAv_rDz+;wR-$FMcN?x^Pm35I^?fQ_2s4d`N;pr#zK=r zqf+=|Jl6rHum#Udz!Vf2-4wk97-R6T)IVI{;Idaj7Kz0&q6M zd+NWgZ4ECExvIssQ8Y{By` z%A~L_kxrp$yj~84emvC9QiLZG@M{QzSD;Twr|@AsghOF00l$dwYgd8}TO`Sja1|aa zU<$EMCE#BmoHYUKQ>3T*;m9BN0FHaCLOO+?;;92{#iaZ7G|&ccs{bt2w?;m;J$Rl5 z4uwbYbO5H%HXUmqz;y_Jg(nU;)elBKHu965M|!HijQnUn{3ga>s~-LoFau2C<9Jp8 z-hpt*4Op)MPW6eAuk4Cqj62dPybjMJfQt}z;n@MW8{xAzf}Vg|5I*if8sHrW-@*m2 z^MLyhu7%$z|4y_YVbZUa>WfPCQ^kNYeLir3PhkL0BVY=j#Pc9v3fu8K4mj1vl`14u7=EQw$1l^EMVFjKb zm4on2JdN}&!sNPW2m+Kk43YgKkKt@IgFXfGNCUF~%1#g;VjI2b}5? zB45#dJdV5d@J+zQfGM<;VSE8oI3CX`z^T3$@&P@CrxEEClAq=mfK&Z7slFWY>BLu{ zZNQ;$*G-srfGK?XX0#13g&laZ??F3P4*6}!e^av>e1mifGjD+$1DxtxA%6||V>Ti^ z)h|Q-nGNNj2hu5g3(pgPDf}4EbAY9aA>T-<-{dBwe;*I|SSb809_zh&_y@oaz!bLN znGTr3SMYcNQ@D36#uhMzop^$PyAe)-*-QUE-gboKWBWc}3O~j}@@)X&609=m-)O=< zwiLiqv<3JGX}_cxFom@LF|b(=Y43wz3TfYhU@Ax%+Ba7P>fwf&b@kLKMOWLaYSz_O zR@dA*>*}Rtb0-&HZLbf)U$bT-7TL3|-da_E_3UruWX>dC!KSqvx7txaP5rE^LxGwb z>(_0l+O(#A@}}x_f!g}o^})&OYB$}urhe0u%~P+o!`)K79v+HiskKp2*C)$D9xZyb_)+(x?9qoGedN*N$4Vbt@z}s)(mz@L$@)+KU(@IR@y`D*NVj%B literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/Protocol/KDF.py b/env/Lib/site-packages/Crypto/Protocol/KDF.py new file mode 100644 index 0000000..1348265 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/KDF.py @@ -0,0 +1,574 @@ +# coding=utf-8 +# +# KDF.py : a collection of Key Derivation Functions +# +# Part of the Python Cryptography Toolkit +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import re +import struct +from functools import reduce + +from Crypto.Util.py3compat import (tobytes, bord, _copy_bytes, iter_range, + tostr, bchr, bstr) + +from Crypto.Hash import SHA1, SHA256, HMAC, CMAC, BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Random import get_random_bytes +from Crypto.Util.number import size as bit_size, long_to_bytes, bytes_to_long + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, c_size_t) + +_raw_salsa20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._Salsa20", + """ + int Salsa20_8_core(const uint8_t *x, const uint8_t *y, + uint8_t *out); + """) + +_raw_scrypt_lib = load_pycryptodome_raw_lib("Crypto.Protocol._scrypt", + """ + typedef int (core_t)(const uint8_t [64], const uint8_t [64], uint8_t [64]); + int scryptROMix(const uint8_t *data_in, uint8_t *data_out, + size_t data_len, unsigned N, core_t *core); + """) + + +def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): + """Derive one key from a password (or passphrase). + + This function performs key derivation according to an old version of + the PKCS#5 standard (v1.5) or `RFC2898 + `_. + + Args: + password (string): + The secret password to generate the key from. + salt (byte string): + An 8 byte string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. + dkLen (integer): + The length of the desired key. The default is 16 bytes, suitable for + instance for :mod:`Crypto.Cipher.AES`. + count (integer): + The number of iterations to carry out. The recommendation is 1000 or + more. + hashAlgo (module): + The hash algorithm to use, as a module or an object from the :mod:`Crypto.Hash` package. + The digest length must be no shorter than ``dkLen``. + The default algorithm is :mod:`Crypto.Hash.SHA1`. + + Return: + A byte string of length ``dkLen`` that can be used as key. + """ + + if not hashAlgo: + hashAlgo = SHA1 + password = tobytes(password) + pHash = hashAlgo.new(password+salt) + digest = pHash.digest_size + if dkLen > digest: + raise TypeError("Selected hash algorithm has a too short digest (%d bytes)." % digest) + if len(salt) != 8: + raise ValueError("Salt is not 8 bytes long (%d bytes instead)." % len(salt)) + for i in iter_range(count-1): + pHash = pHash.new(pHash.digest()) + return pHash.digest()[:dkLen] + + +def PBKDF2(password, salt, dkLen=16, count=1000, prf=None, hmac_hash_module=None): + """Derive one or more keys from a password (or passphrase). + + This function performs key derivation according to the PKCS#5 standard (v2.0). + + Args: + password (string or byte string): + The secret password to generate the key from. + salt (string or byte string): + A (byte) string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. It is recommended to use at least 16 bytes. + dkLen (integer): + The cumulative length of the keys to produce. + + Due to a flaw in the PBKDF2 design, you should not request more bytes + than the ``prf`` can output. For instance, ``dkLen`` should not exceed + 20 bytes in combination with ``HMAC-SHA1``. + count (integer): + The number of iterations to carry out. The higher the value, the slower + and the more secure the function becomes. + + You should find the maximum number of iterations that keeps the + key derivation still acceptable on the slowest hardware you must support. + + Although the default value is 1000, **it is recommended to use at least + 1000000 (1 million) iterations**. + prf (callable): + A pseudorandom function. It must be a function that returns a + pseudorandom byte string from two parameters: a secret and a salt. + The slower the algorithm, the more secure the derivation function. + If not specified, **HMAC-SHA1** is used. + hmac_hash_module (module): + A module from ``Crypto.Hash`` implementing a Merkle-Damgard cryptographic + hash, which PBKDF2 must use in combination with HMAC. + This parameter is mutually exclusive with ``prf``. + + Return: + A byte string of length ``dkLen`` that can be used as key material. + If you want multiple keys, just break up this string into segments of the desired length. + """ + + password = tobytes(password) + salt = tobytes(salt) + + if prf and hmac_hash_module: + raise ValueError("'prf' and 'hmac_hash_module' are mutually exlusive") + + if prf is None and hmac_hash_module is None: + hmac_hash_module = SHA1 + + if prf or not hasattr(hmac_hash_module, "_pbkdf2_hmac_assist"): + # Generic (and slow) implementation + + if prf is None: + prf = lambda p,s: HMAC.new(p, s, hmac_hash_module).digest() + + def link(s): + s[0], s[1] = s[1], prf(password, s[1]) + return s[0] + + key = b'' + i = 1 + while len(key) < dkLen: + s = [ prf(password, salt + struct.pack(">I", i)) ] * 2 + key += reduce(strxor, (link(s) for j in range(count)) ) + i += 1 + + else: + # Optimized implementation + key = b'' + i = 1 + while len(key)I", i)).digest() + key += base._pbkdf2_hmac_assist(first_digest, count) + i += 1 + + return key[:dkLen] + + +class _S2V(object): + """String-to-vector PRF as defined in `RFC5297`_. + + This class implements a pseudorandom function family + based on CMAC that takes as input a vector of strings. + + .. _RFC5297: http://tools.ietf.org/html/rfc5297 + """ + + def __init__(self, key, ciphermod, cipher_params=None): + """Initialize the S2V PRF. + + :Parameters: + key : byte string + A secret that can be used as key for CMACs + based on ciphers from ``ciphermod``. + ciphermod : module + A block cipher module from `Crypto.Cipher`. + cipher_params : dictionary + A set of extra parameters to use to create a cipher instance. + """ + + self._key = _copy_bytes(None, None, key) + self._ciphermod = ciphermod + self._last_string = self._cache = b'\x00' * ciphermod.block_size + + # Max number of update() call we can process + self._n_updates = ciphermod.block_size * 8 - 1 + + if cipher_params is None: + self._cipher_params = {} + else: + self._cipher_params = dict(cipher_params) + + @staticmethod + def new(key, ciphermod): + """Create a new S2V PRF. + + :Parameters: + key : byte string + A secret that can be used as key for CMACs + based on ciphers from ``ciphermod``. + ciphermod : module + A block cipher module from `Crypto.Cipher`. + """ + return _S2V(key, ciphermod) + + def _double(self, bs): + doubled = bytes_to_long(bs)<<1 + if bord(bs[0]) & 0x80: + doubled ^= 0x87 + return long_to_bytes(doubled, len(bs))[-len(bs):] + + def update(self, item): + """Pass the next component of the vector. + + The maximum number of components you can pass is equal to the block + length of the cipher (in bits) minus 1. + + :Parameters: + item : byte string + The next component of the vector. + :Raise TypeError: when the limit on the number of components has been reached. + """ + + if self._n_updates == 0: + raise TypeError("Too many components passed to S2V") + self._n_updates -= 1 + + mac = CMAC.new(self._key, + msg=self._last_string, + ciphermod=self._ciphermod, + cipher_params=self._cipher_params) + self._cache = strxor(self._double(self._cache), mac.digest()) + self._last_string = _copy_bytes(None, None, item) + + def derive(self): + """"Derive a secret from the vector of components. + + :Return: a byte string, as long as the block length of the cipher. + """ + + if len(self._last_string) >= 16: + # xorend + final = self._last_string[:-16] + strxor(self._last_string[-16:], self._cache) + else: + # zero-pad & xor + padded = (self._last_string + b'\x80' + b'\x00' * 15)[:16] + final = strxor(padded, self._double(self._cache)) + mac = CMAC.new(self._key, + msg=final, + ciphermod=self._ciphermod, + cipher_params=self._cipher_params) + return mac.digest() + + +def HKDF(master, key_len, salt, hashmod, num_keys=1, context=None): + """Derive one or more keys from a master secret using + the HMAC-based KDF defined in RFC5869_. + + Args: + master (byte string): + The unguessable value used by the KDF to generate the other keys. + It must be a high-entropy secret, though not necessarily uniform. + It must not be a password. + salt (byte string): + A non-secret, reusable value that strengthens the randomness + extraction step. + Ideally, it is as long as the digest size of the chosen hash. + If empty, a string of zeroes in used. + key_len (integer): + The length in bytes of every derived key. + hashmod (module): + A cryptographic hash algorithm from :mod:`Crypto.Hash`. + :mod:`Crypto.Hash.SHA512` is a good choice. + num_keys (integer): + The number of keys to derive. Every key is :data:`key_len` bytes long. + The maximum cumulative length of all keys is + 255 times the digest size. + context (byte string): + Optional identifier describing what the keys are used for. + + Return: + A byte string or a tuple of byte strings. + + .. _RFC5869: http://tools.ietf.org/html/rfc5869 + """ + + output_len = key_len * num_keys + if output_len > (255 * hashmod.digest_size): + raise ValueError("Too much secret data to derive") + if not salt: + salt = b'\x00' * hashmod.digest_size + if context is None: + context = b"" + + # Step 1: extract + hmac = HMAC.new(salt, master, digestmod=hashmod) + prk = hmac.digest() + + # Step 2: expand + t = [ b"" ] + n = 1 + tlen = 0 + while tlen < output_len: + hmac = HMAC.new(prk, t[-1] + context + struct.pack('B', n), digestmod=hashmod) + t.append(hmac.digest()) + tlen += hashmod.digest_size + n += 1 + derived_output = b"".join(t) + if num_keys == 1: + return derived_output[:key_len] + kol = [derived_output[idx:idx + key_len] + for idx in iter_range(0, output_len, key_len)] + return list(kol[:num_keys]) + + + +def scrypt(password, salt, key_len, N, r, p, num_keys=1): + """Derive one or more keys from a passphrase. + + Args: + password (string): + The secret pass phrase to generate the keys from. + salt (string): + A string to use for better protection from dictionary attacks. + This value does not need to be kept secret, + but it should be randomly chosen for each derivation. + It is recommended to be at least 16 bytes long. + key_len (integer): + The length in bytes of every derived key. + N (integer): + CPU/Memory cost parameter. It must be a power of 2 and less + than :math:`2^{32}`. + r (integer): + Block size parameter. + p (integer): + Parallelization parameter. + It must be no greater than :math:`(2^{32}-1)/(4r)`. + num_keys (integer): + The number of keys to derive. Every key is :data:`key_len` bytes long. + By default, only 1 key is generated. + The maximum cumulative length of all keys is :math:`(2^{32}-1)*32` + (that is, 128TB). + + A good choice of parameters *(N, r , p)* was suggested + by Colin Percival in his `presentation in 2009`__: + + - *( 2¹⁴, 8, 1 )* for interactive logins (≤100ms) + - *( 2²⁰, 8, 1 )* for file encryption (≤5s) + + Return: + A byte string or a tuple of byte strings. + + .. __: http://www.tarsnap.com/scrypt/scrypt-slides.pdf + """ + + if 2 ** (bit_size(N) - 1) != N: + raise ValueError("N must be a power of 2") + if N >= 2 ** 32: + raise ValueError("N is too big") + if p > ((2 ** 32 - 1) * 32) // (128 * r): + raise ValueError("p or r are too big") + + prf_hmac_sha256 = lambda p, s: HMAC.new(p, s, SHA256).digest() + + stage_1 = PBKDF2(password, salt, p * 128 * r, 1, prf=prf_hmac_sha256) + + scryptROMix = _raw_scrypt_lib.scryptROMix + core = _raw_salsa20_lib.Salsa20_8_core + + # Parallelize into p flows + data_out = [] + for flow in iter_range(p): + idx = flow * 128 * r + buffer_out = create_string_buffer(128 * r) + result = scryptROMix(stage_1[idx : idx + 128 * r], + buffer_out, + c_size_t(128 * r), + N, + core) + if result: + raise ValueError("Error %X while running scrypt" % result) + data_out += [ get_raw_buffer(buffer_out) ] + + dk = PBKDF2(password, + b"".join(data_out), + key_len * num_keys, 1, + prf=prf_hmac_sha256) + + if num_keys == 1: + return dk + + kol = [dk[idx:idx + key_len] + for idx in iter_range(0, key_len * num_keys, key_len)] + return kol + + +def _bcrypt_encode(data): + s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + bits = [] + for c in data: + bits_c = bin(bord(c))[2:].zfill(8) + bits.append(bstr(bits_c)) + bits = b"".join(bits) + + bits6 = [ bits[idx:idx+6] for idx in range(0, len(bits), 6) ] + + result = [] + for g in bits6[:-1]: + idx = int(g, 2) + result.append(s[idx]) + + g = bits6[-1] + idx = int(g, 2) << (6 - len(g)) + result.append(s[idx]) + result = "".join(result) + + return tobytes(result) + + +def _bcrypt_decode(data): + s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + bits = [] + for c in tostr(data): + idx = s.find(c) + bits6 = bin(idx)[2:].zfill(6) + bits.append(bits6) + bits = "".join(bits) + + modulo4 = len(data) % 4 + if modulo4 == 1: + raise ValueError("Incorrect length") + elif modulo4 == 2: + bits = bits[:-4] + elif modulo4 == 3: + bits = bits[:-2] + + bits8 = [ bits[idx:idx+8] for idx in range(0, len(bits), 8) ] + + result = [] + for g in bits8: + result.append(bchr(int(g, 2))) + result = b"".join(result) + + return result + + +def _bcrypt_hash(password, cost, salt, constant, invert): + from Crypto.Cipher import _EKSBlowfish + + if len(password) > 72: + raise ValueError("The password is too long. It must be 72 bytes at most.") + + if not (4 <= cost <= 31): + raise ValueError("bcrypt cost factor must be in the range 4..31") + + cipher = _EKSBlowfish.new(password, _EKSBlowfish.MODE_ECB, salt, cost, invert) + ctext = constant + for _ in range(64): + ctext = cipher.encrypt(ctext) + return ctext + + +def bcrypt(password, cost, salt=None): + """Hash a password into a key, using the OpenBSD bcrypt protocol. + + Args: + password (byte string or string): + The secret password or pass phrase. + It must be at most 72 bytes long. + It must not contain the zero byte. + Unicode strings will be encoded as UTF-8. + cost (integer): + The exponential factor that makes it slower to compute the hash. + It must be in the range 4 to 31. + A value of at least 12 is recommended. + salt (byte string): + Optional. Random byte string to thwarts dictionary and rainbow table + attacks. It must be 16 bytes long. + If not passed, a random value is generated. + + Return (byte string): + The bcrypt hash + + Raises: + ValueError: if password is longer than 72 bytes or if it contains the zero byte + + """ + + password = tobytes(password, "utf-8") + + if password.find(bchr(0)[0]) != -1: + raise ValueError("The password contains the zero byte") + + if len(password) < 72: + password += b"\x00" + + if salt is None: + salt = get_random_bytes(16) + if len(salt) != 16: + raise ValueError("bcrypt salt must be 16 bytes long") + + ctext = _bcrypt_hash(password, cost, salt, b"OrpheanBeholderScryDoubt", True) + + cost_enc = b"$" + bstr(str(cost).zfill(2)) + salt_enc = b"$" + _bcrypt_encode(salt) + hash_enc = _bcrypt_encode(ctext[:-1]) # only use 23 bytes, not 24 + return b"$2a" + cost_enc + salt_enc + hash_enc + + +def bcrypt_check(password, bcrypt_hash): + """Verify if the provided password matches the given bcrypt hash. + + Args: + password (byte string or string): + The secret password or pass phrase to test. + It must be at most 72 bytes long. + It must not contain the zero byte. + Unicode strings will be encoded as UTF-8. + bcrypt_hash (byte string, bytearray): + The reference bcrypt hash the password needs to be checked against. + + Raises: + ValueError: if the password does not match + """ + + bcrypt_hash = tobytes(bcrypt_hash) + + if len(bcrypt_hash) != 60: + raise ValueError("Incorrect length of the bcrypt hash: %d bytes instead of 60" % len(bcrypt_hash)) + + if bcrypt_hash[:4] != b'$2a$': + raise ValueError("Unsupported prefix") + + p = re.compile(br'\$2a\$([0-9][0-9])\$([A-Za-z0-9./]{22,22})([A-Za-z0-9./]{31,31})') + r = p.match(bcrypt_hash) + if not r: + raise ValueError("Incorrect bcrypt hash format") + + cost = int(r.group(1)) + if not (4 <= cost <= 31): + raise ValueError("Incorrect cost") + + salt = _bcrypt_decode(r.group(2)) + + bcrypt_hash2 = bcrypt(password, cost, salt) + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash).digest() + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash2).digest() + if mac1 != mac2: + raise ValueError("Incorrect bcrypt hash") diff --git a/env/Lib/site-packages/Crypto/Protocol/KDF.pyi b/env/Lib/site-packages/Crypto/Protocol/KDF.pyi new file mode 100644 index 0000000..fb004bf --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/KDF.pyi @@ -0,0 +1,24 @@ +from types import ModuleType +from typing import Optional, Callable, Tuple, Union, Dict, Any + +RNG = Callable[[int], bytes] + +def PBKDF1(password: str, salt: bytes, dkLen: int, count: Optional[int]=1000, hashAlgo: Optional[ModuleType]=None) -> bytes: ... +def PBKDF2(password: str, salt: bytes, dkLen: Optional[int]=16, count: Optional[int]=1000, prf: Optional[RNG]=None, hmac_hash_module: Optional[ModuleType]=None) -> bytes: ... + +class _S2V(object): + def __init__(self, key: bytes, ciphermod: ModuleType, cipher_params: Optional[Dict[Any, Any]]=None) -> None: ... + + @staticmethod + def new(key: bytes, ciphermod: ModuleType) -> None: ... + def update(self, item: bytes) -> None: ... + def derive(self) -> bytes: ... + +def HKDF(master: bytes, key_len: int, salt: bytes, hashmod: ModuleType, num_keys: Optional[int]=1, context: Optional[bytes]=None) -> Union[bytes, Tuple[bytes, ...]]: ... + +def scrypt(password: str, salt: str, key_len: int, N: int, r: int, p: int, num_keys: Optional[int]=1) -> Union[bytes, Tuple[bytes, ...]]: ... + +def _bcrypt_decode(data: bytes) -> bytes: ... +def _bcrypt_hash(password:bytes , cost: int, salt: bytes, constant:bytes, invert:bool) -> bytes: ... +def bcrypt(password: Union[bytes, str], cost: int, salt: Optional[bytes]=None) -> bytes: ... +def bcrypt_check(password: Union[bytes, str], bcrypt_hash: Union[bytes, bytearray, str]) -> None: ... diff --git a/env/Lib/site-packages/Crypto/Protocol/SecretSharing.py b/env/Lib/site-packages/Crypto/Protocol/SecretSharing.py new file mode 100644 index 0000000..a757e7c --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/SecretSharing.py @@ -0,0 +1,278 @@ +# +# SecretSharing.py : distribute a secret amongst a group of participants +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import is_native_int +from Crypto.Util import number +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Random import get_random_bytes as rng + + +def _mult_gf2(f1, f2): + """Multiply two polynomials in GF(2)""" + + # Ensure f2 is the smallest + if f2 > f1: + f1, f2 = f2, f1 + z = 0 + while f2: + if f2 & 1: + z ^= f1 + f1 <<= 1 + f2 >>= 1 + return z + + +def _div_gf2(a, b): + """ + Compute division of polynomials over GF(2). + Given a and b, it finds two polynomials q and r such that: + + a = b*q + r with deg(r)= d: + s = 1 << (deg(r) - d) + q ^= s + r ^= _mult_gf2(b, s) + return (q, r) + + +class _Element(object): + """Element of GF(2^128) field""" + + # The irreducible polynomial defining this field is 1+x+x^2+x^7+x^128 + irr_poly = 1 + 2 + 4 + 128 + 2 ** 128 + + def __init__(self, encoded_value): + """Initialize the element to a certain value. + + The value passed as parameter is internally encoded as + a 128-bit integer, where each bit represents a polynomial + coefficient. The LSB is the constant coefficient. + """ + + if is_native_int(encoded_value): + self._value = encoded_value + elif len(encoded_value) == 16: + self._value = bytes_to_long(encoded_value) + else: + raise ValueError("The encoded value must be an integer or a 16 byte string") + + def __eq__(self, other): + return self._value == other._value + + def __int__(self): + """Return the field element, encoded as a 128-bit integer.""" + return self._value + + def encode(self): + """Return the field element, encoded as a 16 byte string.""" + return long_to_bytes(self._value, 16) + + def __mul__(self, factor): + + f1 = self._value + f2 = factor._value + + # Make sure that f2 is the smallest, to speed up the loop + if f2 > f1: + f1, f2 = f2, f1 + + if self.irr_poly in (f1, f2): + return _Element(0) + + mask1 = 2 ** 128 + v, z = f1, 0 + while f2: + # if f2 ^ 1: z ^= v + mask2 = int(bin(f2 & 1)[2:] * 128, base=2) + z = (mask2 & (z ^ v)) | ((mask1 - mask2 - 1) & z) + v <<= 1 + # if v & mask1: v ^= self.irr_poly + mask3 = int(bin((v >> 128) & 1)[2:] * 128, base=2) + v = (mask3 & (v ^ self.irr_poly)) | ((mask1 - mask3 - 1) & v) + f2 >>= 1 + return _Element(z) + + def __add__(self, term): + return _Element(self._value ^ term._value) + + def inverse(self): + """Return the inverse of this element in GF(2^128).""" + + # We use the Extended GCD algorithm + # http://en.wikipedia.org/wiki/Polynomial_greatest_common_divisor + + if self._value == 0: + raise ValueError("Inversion of zero") + + r0, r1 = self._value, self.irr_poly + s0, s1 = 1, 0 + while r1 > 0: + q = _div_gf2(r0, r1)[0] + r0, r1 = r1, r0 ^ _mult_gf2(q, r1) + s0, s1 = s1, s0 ^ _mult_gf2(q, s1) + return _Element(s0) + + def __pow__(self, exponent): + result = _Element(self._value) + for _ in range(exponent - 1): + result = result * self + return result + + +class Shamir(object): + """Shamir's secret sharing scheme. + + A secret is split into ``n`` shares, and it is sufficient to collect + ``k`` of them to reconstruct the secret. + """ + + @staticmethod + def split(k, n, secret, ssss=False): + """Split a secret into ``n`` shares. + + The secret can be reconstructed later using just ``k`` shares + out of the original ``n``. + Each share must be kept confidential to the person it was + assigned to. + + Each share is associated to an index (starting from 1). + + Args: + k (integer): + The sufficient number of shares to reconstruct the secret (``k < n``). + n (integer): + The number of shares that this method will create. + secret (byte string): + A byte string of 16 bytes (e.g. the AES 128 key). + ssss (bool): + If ``True``, the shares can be used with the ``ssss`` utility. + Default: ``False``. + + Return (tuples): + ``n`` tuples. A tuple is meant for each participant and it contains two items: + + 1. the unique index (an integer) + 2. the share (a byte string, 16 bytes) + """ + + # + # We create a polynomial with random coefficients in GF(2^128): + # + # p(x) = \sum_{i=0}^{k-1} c_i * x^i + # + # c_0 is the encoded secret + # + + coeffs = [_Element(rng(16)) for i in range(k - 1)] + coeffs.append(_Element(secret)) + + # Each share is y_i = p(x_i) where x_i is the public index + # associated to each of the n users. + + def make_share(user, coeffs, ssss): + idx = _Element(user) + share = _Element(0) + for coeff in coeffs: + share = idx * share + coeff + if ssss: + share += _Element(user) ** len(coeffs) + return share.encode() + + return [(i, make_share(i, coeffs, ssss)) for i in range(1, n + 1)] + + @staticmethod + def combine(shares, ssss=False): + """Recombine a secret, if enough shares are presented. + + Args: + shares (tuples): + The *k* tuples, each containin the index (an integer) and + the share (a byte string, 16 bytes long) that were assigned to + a participant. + ssss (bool): + If ``True``, the shares were produced by the ``ssss`` utility. + Default: ``False``. + + Return: + The original secret, as a byte string (16 bytes long). + """ + + # + # Given k points (x,y), the interpolation polynomial of degree k-1 is: + # + # L(x) = \sum_{j=0}^{k-1} y_i * l_j(x) + # + # where: + # + # l_j(x) = \prod_{ \overset{0 \le m \le k-1}{m \ne j} } + # \frac{x - x_m}{x_j - x_m} + # + # However, in this case we are purely interested in the constant + # coefficient of L(x). + # + + k = len(shares) + + gf_shares = [] + for x in shares: + idx = _Element(x[0]) + value = _Element(x[1]) + if any(y[0] == idx for y in gf_shares): + raise ValueError("Duplicate share") + if ssss: + value += idx ** k + gf_shares.append((idx, value)) + + result = _Element(0) + for j in range(k): + x_j, y_j = gf_shares[j] + + numerator = _Element(1) + denominator = _Element(1) + + for m in range(k): + x_m = gf_shares[m][0] + if m != j: + numerator *= x_m + denominator *= x_j + x_m + result += y_j * numerator * denominator.inverse() + return result.encode() diff --git a/env/Lib/site-packages/Crypto/Protocol/SecretSharing.pyi b/env/Lib/site-packages/Crypto/Protocol/SecretSharing.pyi new file mode 100644 index 0000000..5952c99 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/SecretSharing.pyi @@ -0,0 +1,22 @@ +from typing import Union, List, Tuple, Optional + +def _mult_gf2(f1: int, f2: int) -> int : ... +def _div_gf2(a: int, b: int) -> int : ... + +class _Element(object): + irr_poly: int + def __init__(self, encoded_value: Union[int, bytes]) -> None: ... + def __eq__(self, other) -> bool: ... + def __int__(self) -> int: ... + def encode(self) -> bytes: ... + def __mul__(self, factor: int) -> _Element: ... + def __add__(self, term: _Element) -> _Element: ... + def inverse(self) -> _Element: ... + def __pow__(self, exponent) -> _Element: ... + +class Shamir(object): + @staticmethod + def split(k: int, n: int, secret: bytes, ssss: Optional[bool]) -> List[Tuple[int, bytes]]: ... + @staticmethod + def combine(shares: List[Tuple[int, bytes]], ssss: Optional[bool]) -> bytes: ... + diff --git a/env/Lib/site-packages/Crypto/Protocol/__init__.py b/env/Lib/site-packages/Crypto/Protocol/__init__.py new file mode 100644 index 0000000..efdf034 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/__init__.py @@ -0,0 +1,31 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['KDF', 'SecretSharing'] diff --git a/env/Lib/site-packages/Crypto/Protocol/__init__.pyi b/env/Lib/site-packages/Crypto/Protocol/__init__.pyi new file mode 100644 index 0000000..377ed90 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Protocol/__init__.pyi @@ -0,0 +1 @@ +__all__ = ['KDF.pyi', 'SecretSharing.pyi'] diff --git a/env/Lib/site-packages/Crypto/Protocol/_scrypt.pyd b/env/Lib/site-packages/Crypto/Protocol/_scrypt.pyd new file mode 100644 index 0000000000000000000000000000000000000000..927b9f47aa7c572cbda21d7ca2d1b1612eba7c0b GIT binary patch literal 12288 zcmeHNeRNyJl^@xbE!$D#1S>cJnrDz3Q;1Ql6dW9bl-Oa!%ETm2$N_?^*w2Z;mW1?_ z#3@~z%BFE%)rM}^hL*M@aJGATC`~B~PUzW62(S&wVnVic$^!1fmX|nyN!ujQkGH@3 z-cu~|(Z6=PdkXKIdH2qpd+*%2Gjr!Yjhb)X#qt?r1t_Y@*dQQ1ZteY-UltQ%Gv^b=*_PdLF54%koUJi-pp>nHREhP(G(qTh9Y5 z)9_vlU&r$pE7V9+koAHmwS zlQtlv?{bu!$F1>+U12bik5~kS_-d#!^|%>ZHdXs{Vu!S+v(q-ePjdjTx4w4;W2fS? zc{*Lzc_yu@r{a01GCVy}cKln26yWEHlhyr4q$P@2eRy^@cbw>Gn z%=Uy?SqWf!BBjy1@+iUMHZV2!r)>A7wQ2=~_`t0W3m=diWqjZ+hu~R@4g@Zr^2zVH zMwEA;q%URbqphpN&5Z3F+tKK7yPq^A8oIv-qOJc~yob!|Q6UqQ6qx!_(n{sUV$?ps zN>VC8;z|2$0s-qv0HtgOWBsZ4rOJApT|j${-aen&Pn42U3zX8*ao4IlUr>$|0qaxa z*D4hjXp+ZxqK-)pE)Og7(S%MbeDZS#Nx&=r8<+Eavi0D6#ysjvs<{$=)qFIO=Sp$; zR)>?zlB15xcR4tGA$V@f9Dow09Kw4S`I-E36P)36?|ff2{~F7;KXtcN7j{F$>CdRc z1Lk2gD7RpknrDPW!`j6#I)0TB#>kWs8ovpiNK{<|I>RtZ(ShrTd*_Qv6|mD|7rc^q z!=&5^ZaFitPdDN^GGf;uE-lR>C+bd7XA7G05kHl?D?2WMD z?jl#}_&ew#xlB!*HEn6)viU-cp2N*0LyZn6&CW!*YX1R(sXR6S3Kl}&W1zxo@-N{s z<-tFx>QvLWa+|DJeR@+uHN~%l(preQj*!@Ua5?Wkja8*Q_h(f_#}A0aVk&XEf$u@fkU0bj^*UtU z1dOI~$Q(7WLF@*`??}<+x}7A43QW}V6kWD{`!di6d<8<5XyUT<%UL1>-^VZ%Bq1us zRY!hwuU<*aOK{M;m;o+C9{Q!7ky64o4kK@HsYd8Ld8f znB3yT^bs*am1@@rZ7V^w&xV*flu+~T`Uy>*|F&DQb0@6f@=1LP%x``H%Acy;_xi&7 z$<+ExT;d6_0=+a>sP;LUQB4(mps7r?Gt}f1UqtxHc^aN<7WDd2sy7J`E92>LfxCda zn##B<%@1J6c%c0fR9sVCaa&^=?hlX&SSg&J8AbJas$%BXXJBGAEvhn}X!t4mX7NJR zE~BHVNPGx>y%fhYR>?LrAf_a(b5TFeu)TTGd=c=>IS75qTQG$uEDq&2I+BOJsxw;5 zSAKv{3`bXrTrQ}BB+TNL2{W%BmdahJNpAaz^-To1_&c?F%-?UCI79n^^xmP%7Aa34 z9!<n|+@_{ZoYa~a7u|eObV1_+MO)MBBb3tBv(;O^H+nXTM3?iO#*BgQh&MYd0%TYOHXq zG0q1p< zMQG6X5PA@L8(JaC=w$T@suyW>sRmwl zb=tz_JIMiwVfDARyUkWGU@gu8pejLB=L0-vK8|o<(mW0m4nPUcV7#<-fa5Ve*nBf% zolDHyvDD&QdAs#-AUJ&QB#{67_9c{y$Nht)h#;LyJQeX)-(^qRp7fOS6~@y%I0tU$xJnn5el@=jm-I1%>TACKdD@iO*0@ zLjdRLy8zh%(;7>fzX-NWqRLL(8Ewt2;mJoF?mu9N+fP!0A|KNu^&OB!j|K7zhc7(< zoGH&@s zrUD`MdQzbV3WcPT5P{B_d`)%#Ff}4rw(x;|>V^!_h@Qbc)T?$@^zR@$Gj!(4x&+Sh zHk|M+cC22&R$8*E-3FaV1n883J~~X2O_R3Wa7d$rQ>WhpKlSED+yaW#Hv^uxBzeB_ zbL|*^z%dG6@$Ju$%hqE-pUa5^p1i1t!f&#H;_k$3WLx9_gg{P1Nn|X%{`Kf_eW%j? z)aOCl@sNE5;SjPEmvd!je1h!LSiscxRM;qUrCcMneT{Ou$@NO3{DNouWJnUbJlo&9 zP7K8xYs8aWZfYtWw(aw5KNSp#-hl!~Q|a^2Qy{+&&szps9W&xBJYoF@2OL&4 zd97+|NEMoxRNc^4n8&2bh6sUq=Kb&g5jmBx#la7laQKpcx$U8Jth`Y+r*I;862qp( z0rS5AohSnB5g>zz@vJe)|4=`9*XPMr^xh~_)>4q|+jj7c;#f&jX&SOU5E(M7m*|Ik zB}D_)w?KFb;gk(rG_JQ@5bFWQg|Qgan?`y1KLuVo$}uW|k_X}9 z%+;CrO|tnha3_W>L*~6e@=1aD`@lS>GlEo?7#4=iJ5a?Gqv0N4gWwu%ykxAgek@+0 zX>yD&9gkZR){SU&4_ve!fM&Lul3@v%bmEyZKgB!k=mX+PEit!%2}cK!&pm3@GJL_S zRejp~9__tDd!NwWk7(~lwfBDQ{h0PXs=c4k-h+7WH)YTV&P9y1Z1oyXt7i=YZC#Um z+$$?cSa|=4t#1XU3E{`qN5_~?e%R5A5jOX~Y3sY3T5teG=!?gR43VebE#&eUp8idt zw|>&rZvun+^pMHce+Dw1WV6Xf-$wX~QtfB)ZSI2zw#(-{$?Nib_=14HNgFWoMV)88 z=hpSNWPI|oKJ-$}<(Iwki^}eN`Yw5~S^j-f|J$}c+QmG1;ml?kY5vPdN|i^6BER5G zykm<07f-H$b6+RLn;_z~{rFX$exuNv|2ogtws4zAJuH`@50CsTr1&MH-1g&NViADn zmr5;00L@U#ueBasrK`=fx%vUM=U@T9Gn4>;};VKWfY0C z13NdLQ%AY}?bzyB>(RD&vpm@>|JwV~J2={X@&`EEdHwV8d7i;m(v-B{QAxnM7(liE zVip;LEU`I-?5xm*b7&Z!6Vmmx<=yg7&i66C`Q%Z2F7cO6z$32_JJ0`>;TRvj29o8~ z9`&e-QNMtp*FP79bD@OGUC7|zOXLoBIo8ps;_-WK^V~XZc^*2QF0%C>gNdh~1~Ll7 zb2m6bzqN%;-bC60Skh^GcNnL4F>p^AUe}iy6ff000+EC)iE17vk+8s?c4x_` zq}{&y;pjg#*ALqguaYsBz0`bJ+BUxzIJ{a=!WZEP@LIeH#T0)xlN|wl_F)8W+gH8{ z43;AYSf)1Tk9zY{=j;#b_NQ{~FNXbY7?P*k5MBmrwLy3d`@_p%?A_WRyoUYZWt!DQ zrh#~!A8UWCQn$TxYQAkx6qGp0e0UA!Pfnk$Lo{CqR){mZr^lJ>0;L#{`^bnJxsRMT zV9KafR-5+AO15RHe|0=d%Y`}ZRX0P_O~FUt@`;_zZdzIEk5`|;r(_OUQ09QLOKZ$b zHR#M+exGH=PZB4%Y?5)zAwi%x?n_!{yrZffa zV=?Ueuwkb|C}N?p_?- zA+li?@{%h7vowd($({r4o9Ad-wR;!3%c%*{-H8LgRO8BKk$!Db<8-!nZp7UW>K2k& zz6w8GEcXzPdphlAqVZ`oug2Fhou=`vn?`HV_}Zq^G`>yKXssGw1T+lQu%%Vw>jjNr z4QKC@Xf4y4*6WUK+qQLVz%W<}qtnI_mD(L%kEU;U8trCH-)JtaRp%RVFsA_7 z`>hY+)s<3L_+o zp9$E@;5&u#sv$cJyks^~SEGI&@Xw)S+o?W3r`=RwVg<|atMa@;HgB~W_p=QgfycUzKkl8R68^&U*EsdCpO{{nWWGn^D5-o`2x8*qvyPYO$ z0kbYHX4cxmmSXA;y0ZFX6^#7}#SI!kQ=WUn z-89GwiAVECAuFjZjOg~jmc?dfn_U_qPm=xg5H#69-%vVQoGNN5bTU>+vQi;4cg$pN zTqt}ArQ<5l&3VlHy8^BLL(n9mL7v*D>R2@3VUTAZC;z6Iy$1eYqwLq@KYU zEn#z|QZ}~(u(g=Yt+h{{H$E$+*@Ssd;ZkHi?s6=3x=p!tsG9cBfklL`ftSXgXOyj| z_oD1TX~V#%|2@EefI^)9SbMZ5BGp8CI#^4OHx!io28rn{t5!A#x9WS2B&GXVAuSXh%Uyf%b9FXL-?dul#$#DyeIEAR@)1;_@(l=)%7Q z{)1?~aQKdRWJNsGE(OD(rjQivVeAcRSsRQ>@j%y_(3W7R1D-ao32h35I=aM;rmgK_ zgxFUEyCgA+n?v(zQToJYA@4d-S{{!^#gNnz4Y!L3ncp+FiqY<1C?JVh68l=dH)d4A z(G_BVY;B2(F^I8M1vkdUXitk6?F>h|1EF?|B9381S>;EydplTF4z+btR19>0$bL)I z)jcsu>~0Nqi=G$;AQHx^OpU=hMcF z5z!wEb%y;NVoZvLd;DEc&e%1qTkMXBz}7BbwdRIaZ*$Y4#ZF!4`K%o_h1==R8l6#5 zWNan#ZwbU=Vif#WGUy9Ra7#D4Y6|y@>{{$jag)DOn=n6_SHb-4;ZSFABfRVn1)|aL z7JneRaWj51)+O};XPd;(=3q1&>ZZwHi>GmfL*iEKb&1@^*u~5*ZWY_(lIWk-FbDk+ zA7g{_&N(L!U4A8Ns5`tF-F?cR!|R13 zokvwQd&8NH-PyQ(gH zy8^HsSP%Y^BjN8jScacH|EC@(D`RXC3gyF#0sBzyLdHt)+sNnM1pNU(0l8Q;=mc*> zPSyvU{tI_6%HzOy0}i8%0VjBWIb+WNe*n;G$B$dU%K(3dG7g;J>nPM0-MK74A-oze zVBj5qdkmc5j|_Z2;Fy6EeA~b$0b9;Tj*h&CU@r>kB)Hqa3BHO#c8&wyQK92qfCY0I zI|(|$Y82ujc%y-L06sbow5yO?0n#0R9dLs9y+Fe+2Rw}YwGs5({3z;p99%HO_=oa+$i1nDm`HIcESl#zD+nm%8efZPH-K{49I@={H=Pxr2T~xTcLOQ2=QG6g7ahO z_(u@qXrph_d#4L&wWZf??d}pbi_sX;y@mygYMcuMG1MOJ2!=K`ELhXJVqx6^AtoU& z40PeRZ&=VH#ui+AjkRPc_ +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] + +import binascii +import struct +import itertools + +from Crypto.Util.py3compat import bchr, bord, tobytes, tostr, iter_range + +from Crypto import Random +from Crypto.IO import PKCS8, PEM +from Crypto.Hash import SHA256 +from Crypto.Util.asn1 import ( + DerObject, DerSequence, + DerInteger, DerObjectId, + DerBitString, + ) + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import (test_probable_prime, COMPOSITE, + PROBABLY_PRIME) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + +# ; The following ASN.1 types are relevant for DSA +# +# SubjectPublicKeyInfo ::= SEQUENCE { +# algorithm AlgorithmIdentifier, +# subjectPublicKey BIT STRING +# } +# +# id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } +# +# ; See RFC3279 +# Dss-Parms ::= SEQUENCE { +# p INTEGER, +# q INTEGER, +# g INTEGER +# } +# +# DSAPublicKey ::= INTEGER +# +# DSSPrivatKey_OpenSSL ::= SEQUENCE +# version INTEGER, +# p INTEGER, +# q INTEGER, +# g INTEGER, +# y INTEGER, +# x INTEGER +# } +# + +class DsaKey(object): + r"""Class defining an actual DSA key. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar p: DSA modulus + :vartype p: integer + + :ivar q: Order of the subgroup + :vartype q: integer + + :ivar g: Generator + :vartype g: integer + + :ivar y: Public key + :vartype y: integer + + :ivar x: Private key + :vartype x: integer + + :undocumented: exportKey, publickey + """ + + _keydata = ['y', 'g', 'p', 'q', 'x'] + + def __init__(self, key_dict): + input_set = set(key_dict.keys()) + public_set = set(('y' , 'g', 'p', 'q')) + if not public_set.issubset(input_set): + raise ValueError("Some DSA components are missing = %s" % + str(public_set - input_set)) + extra_set = input_set - public_set + if extra_set and extra_set != set(('x',)): + raise ValueError("Unknown DSA components = %s" % + str(extra_set - set(('x',)))) + self._key = dict(key_dict) + + def _sign(self, m, k): + if not self.has_private(): + raise TypeError("DSA public key cannot be used for signing") + if not (1 < k < self.q): + raise ValueError("k is not between 2 and q-1") + + x, q, p, g = [self._key[comp] for comp in ['x', 'q', 'p', 'g']] + + blind_factor = Integer.random_range(min_inclusive=1, + max_exclusive=q) + inv_blind_k = (blind_factor * k).inverse(q) + blind_x = x * blind_factor + + r = pow(g, k, p) % q # r = (g**k mod p) mod q + s = (inv_blind_k * (blind_factor * m + blind_x * r)) % q + return map(int, (r, s)) + + def _verify(self, m, sig): + r, s = sig + y, q, p, g = [self._key[comp] for comp in ['y', 'q', 'p', 'g']] + if not (0 < r < q) or not (0 < s < q): + return False + w = Integer(s).inverse(q) + u1 = (w * m) % q + u2 = (w * r) % q + v = (pow(g, u1, p) * pow(y, u2, p) % p) % q + return v == r + + def has_private(self): + """Whether this is a DSA private key""" + + return 'x' in self._key + + def can_encrypt(self): # legacy + return False + + def can_sign(self): # legacy + return True + + def public_key(self): + """A matching DSA public key. + + Returns: + a new :class:`DsaKey` object + """ + + public_components = dict((k, self._key[k]) for k in ('y', 'g', 'p', 'q')) + return DsaKey(public_components) + + def __eq__(self, other): + if bool(self.has_private()) != bool(other.has_private()): + return False + + result = True + for comp in self._keydata: + result = result and (getattr(self._key, comp, None) == + getattr(other._key, comp, None)) + return result + + def __ne__(self, other): + return not self.__eq__(other) + + def __getstate__(self): + # DSA key is not pickable + from pickle import PicklingError + raise PicklingError + + def domain(self): + """The DSA domain parameters. + + Returns + tuple : (p,q,g) + """ + + return [int(self._key[comp]) for comp in ('p', 'q', 'g')] + + def __repr__(self): + attrs = [] + for k in self._keydata: + if k == 'p': + bits = Integer(self.p).size_in_bits() + attrs.append("p(%d)" % (bits,)) + elif hasattr(self, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + + def __getattr__(self, item): + try: + return int(self._key[item]) + except KeyError: + raise AttributeError(item) + + def export_key(self, format='PEM', pkcs8=None, passphrase=None, + protection=None, randfunc=None): + """Export this DSA key. + + Args: + format (string): + The encoding for the output: + + - *'PEM'* (default). ASCII as per `RFC1421`_/ `RFC1423`_. + - *'DER'*. Binary ASN.1 encoding. + - *'OpenSSH'*. ASCII one-liner as per `RFC4253`_. + Only suitable for public keys, not for private keys. + + passphrase (string): + *Private keys only*. The pass phrase to protect the output. + + pkcs8 (boolean): + *Private keys only*. If ``True`` (default), the key is encoded + with `PKCS#8`_. If ``False``, it is encoded in the custom + OpenSSL/OpenSSH container. + + protection (string): + *Only in combination with a pass phrase*. + The encryption scheme to use to protect the output. + + If :data:`pkcs8` takes value ``True``, this is the PKCS#8 + algorithm to use for deriving the secret and encrypting + the private DSA key. + For a complete list of algorithms, see :mod:`Crypto.IO.PKCS8`. + The default is *PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC*. + + If :data:`pkcs8` is ``False``, the obsolete PEM encryption scheme is + used. It is based on MD5 for key derivation, and Triple DES for + encryption. Parameter :data:`protection` is then ignored. + + The combination ``format='DER'`` and ``pkcs8=False`` is not allowed + if a passphrase is present. + + randfunc (callable): + A function that returns random bytes. + By default it is :func:`Crypto.Random.get_random_bytes`. + + Returns: + byte string : the encoded key + + Raises: + ValueError : when the format is unknown or when you try to encrypt a private + key with *DER* format and OpenSSL/OpenSSH. + + .. warning:: + If you don't provide a pass phrase, the private key will be + exported in the clear! + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + + if passphrase is not None: + passphrase = tobytes(passphrase) + + if randfunc is None: + randfunc = Random.get_random_bytes + + if format == 'OpenSSH': + tup1 = [self._key[x].to_bytes() for x in ('p', 'q', 'g', 'y')] + + def func(x): + if (bord(x[0]) & 0x80): + return bchr(0) + x + else: + return x + + tup2 = [func(x) for x in tup1] + keyparts = [b'ssh-dss'] + tup2 + keystring = b''.join( + [struct.pack(">I", len(kp)) + kp for kp in keyparts] + ) + return b'ssh-dss ' + binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + params = DerSequence([self.p, self.q, self.g]) + if self.has_private(): + if pkcs8 is None: + pkcs8 = True + if pkcs8: + if not protection: + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + private_key = DerInteger(self.x).encode() + binary_key = PKCS8.wrap( + private_key, oid, passphrase, + protection, key_params=params, + randfunc=randfunc + ) + if passphrase: + key_type = 'ENCRYPTED PRIVATE' + else: + key_type = 'PRIVATE' + passphrase = None + else: + if format != 'PEM' and passphrase: + raise ValueError("DSA private key cannot be encrypted") + ints = [0, self.p, self.q, self.g, self.y, self.x] + binary_key = DerSequence(ints).encode() + key_type = "DSA PRIVATE" + else: + if pkcs8: + raise ValueError("PKCS#8 is only meaningful for private keys") + + binary_key = _create_subject_public_key_info(oid, + DerInteger(self.y), params) + key_type = "PUBLIC" + + if format == 'DER': + return binary_key + if format == 'PEM': + pem_str = PEM.encode( + binary_key, key_type + " KEY", + passphrase, randfunc + ) + return tobytes(pem_str) + raise ValueError("Unknown key format '%s'. Cannot export the DSA key." % format) + + # Backward-compatibility + exportKey = export_key + publickey = public_key + + # Methods defined in PyCrypto that we don't support anymore + + def sign(self, M, K): + raise NotImplementedError("Use module Crypto.Signature.DSS instead") + + def verify(self, M, signature): + raise NotImplementedError("Use module Crypto.Signature.DSS instead") + + def encrypt(self, plaintext, K): + raise NotImplementedError + + def decrypt(self, ciphertext): + raise NotImplementedError + + def blind(self, M, B): + raise NotImplementedError + + def unblind(self, M, B): + raise NotImplementedError + + def size(self): + raise NotImplementedError + + +def _generate_domain(L, randfunc): + """Generate a new set of DSA domain parameters""" + + N = { 1024:160, 2048:224, 3072:256 }.get(L) + if N is None: + raise ValueError("Invalid modulus length (%d)" % L) + + outlen = SHA256.digest_size * 8 + n = (L + outlen - 1) // outlen - 1 # ceil(L/outlen) -1 + b_ = L - 1 - (n * outlen) + + # Generate q (A.1.1.2) + q = Integer(4) + upper_bit = 1 << (N - 1) + while test_probable_prime(q, randfunc) != PROBABLY_PRIME: + seed = randfunc(64) + U = Integer.from_bytes(SHA256.new(seed).digest()) & (upper_bit - 1) + q = U | upper_bit | 1 + + assert(q.size_in_bits() == N) + + # Generate p (A.1.1.2) + offset = 1 + upper_bit = 1 << (L - 1) + while True: + V = [ SHA256.new(seed + Integer(offset + j).to_bytes()).digest() + for j in iter_range(n + 1) ] + V = [ Integer.from_bytes(v) for v in V ] + W = sum([V[i] * (1 << (i * outlen)) for i in iter_range(n)], + (V[n] & ((1 << b_) - 1)) * (1 << (n * outlen))) + + X = Integer(W + upper_bit) # 2^{L-1} < X < 2^{L} + assert(X.size_in_bits() == L) + + c = X % (q * 2) + p = X - (c - 1) # 2q divides (p-1) + if p.size_in_bits() == L and \ + test_probable_prime(p, randfunc) == PROBABLY_PRIME: + break + offset += n + 1 + + # Generate g (A.2.3, index=1) + e = (p - 1) // q + for count in itertools.count(1): + U = seed + b"ggen" + bchr(1) + Integer(count).to_bytes() + W = Integer.from_bytes(SHA256.new(U).digest()) + g = pow(W, e, p) + if g != 1: + break + + return (p, q, g, seed) + + +def generate(bits, randfunc=None, domain=None): + """Generate a new DSA key pair. + + The algorithm follows Appendix A.1/A.2 and B.1 of `FIPS 186-4`_, + respectively for domain generation and key pair generation. + + Args: + bits (integer): + Key length, or size (in bits) of the DSA modulus *p*. + It must be 1024, 2048 or 3072. + + randfunc (callable): + Random number generation function; it accepts a single integer N + and return a string of random data N bytes long. + If not specified, :func:`Crypto.Random.get_random_bytes` is used. + + domain (tuple): + The DSA domain parameters *p*, *q* and *g* as a list of 3 + integers. Size of *p* and *q* must comply to `FIPS 186-4`_. + If not specified, the parameters are created anew. + + Returns: + :class:`DsaKey` : a new DSA key object + + Raises: + ValueError : when **bits** is too little, too big, or not a multiple of 64. + + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + if domain: + p, q, g = map(Integer, domain) + + ## Perform consistency check on domain parameters + # P and Q must be prime + fmt_error = test_probable_prime(p) == COMPOSITE + fmt_error = test_probable_prime(q) == COMPOSITE + # Verify Lagrange's theorem for sub-group + fmt_error |= ((p - 1) % q) != 0 + fmt_error |= g <= 1 or g >= p + fmt_error |= pow(g, q, p) != 1 + if fmt_error: + raise ValueError("Invalid DSA domain parameters") + else: + p, q, g, _ = _generate_domain(bits, randfunc) + + L = p.size_in_bits() + N = q.size_in_bits() + + if L != bits: + raise ValueError("Mismatch between size of modulus (%d)" + " and 'bits' parameter (%d)" % (L, bits)) + + if (L, N) not in [(1024, 160), (2048, 224), + (2048, 256), (3072, 256)]: + raise ValueError("Lengths of p and q (%d, %d) are not compatible" + "to FIPS 186-3" % (L, N)) + + if not 1 < g < p: + raise ValueError("Incorrent DSA generator") + + # B.1.1 + c = Integer.random(exact_bits=N + 64, randfunc=randfunc) + x = c % (q - 1) + 1 # 1 <= x <= q-1 + y = pow(g, x, p) + + key_dict = { 'y':y, 'g':g, 'p':p, 'q':q, 'x':x } + return DsaKey(key_dict) + + +def construct(tup, consistency_check=True): + """Construct a DSA key from a tuple of valid DSA components. + + Args: + tup (tuple): + A tuple of long integers, with 4 or 5 items + in the following order: + + 1. Public key (*y*). + 2. Sub-group generator (*g*). + 3. Modulus, finite field order (*p*). + 4. Sub-group order (*q*). + 5. Private key (*x*). Optional. + + consistency_check (boolean): + If ``True``, the library will verify that the provided components + fulfil the main DSA properties. + + Raises: + ValueError: when the key being imported fails the most basic DSA validity checks. + + Returns: + :class:`DsaKey` : a DSA key object + """ + + key_dict = dict(zip(('y', 'g', 'p', 'q', 'x'), map(Integer, tup))) + key = DsaKey(key_dict) + + fmt_error = False + if consistency_check: + # P and Q must be prime + fmt_error = test_probable_prime(key.p) == COMPOSITE + fmt_error = test_probable_prime(key.q) == COMPOSITE + # Verify Lagrange's theorem for sub-group + fmt_error |= ((key.p - 1) % key.q) != 0 + fmt_error |= key.g <= 1 or key.g >= key.p + fmt_error |= pow(key.g, key.q, key.p) != 1 + # Public key + fmt_error |= key.y <= 0 or key.y >= key.p + if hasattr(key, 'x'): + fmt_error |= key.x <= 0 or key.x >= key.q + fmt_error |= pow(key.g, key.x, key.p) != key.y + + if fmt_error: + raise ValueError("Invalid DSA key components") + + return key + + +# Dss-Parms ::= SEQUENCE { +# p OCTET STRING, +# q OCTET STRING, +# g OCTET STRING +# } +# DSAPublicKey ::= INTEGER -- public key, y + +def _import_openssl_private(encoded, passphrase, params): + if params: + raise ValueError("DSA private key already comes with parameters") + der = DerSequence().decode(encoded, nr_elements=6, only_ints_expected=True) + if der[0] != 0: + raise ValueError("No version found") + tup = [der[comp] for comp in (4, 3, 1, 2, 5)] + return construct(tup) + + +def _import_subjectPublicKeyInfo(encoded, passphrase, params): + + algoid, encoded_key, emb_params = _expand_subject_public_key_info(encoded) + if algoid != oid: + raise ValueError("No DSA subjectPublicKeyInfo") + if params and emb_params: + raise ValueError("Too many DSA parameters") + + y = DerInteger().decode(encoded_key).value + p, q, g = list(DerSequence().decode(params or emb_params)) + tup = (y, g, p, q) + return construct(tup) + + +def _import_x509_cert(encoded, passphrase, params): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info, None, params) + + +def _import_pkcs8(encoded, passphrase, params): + if params: + raise ValueError("PKCS#8 already includes parameters") + k = PKCS8.unwrap(encoded, passphrase) + if k[0] != oid: + raise ValueError("No PKCS#8 encoded DSA key") + x = DerInteger().decode(k[1]).value + p, q, g = list(DerSequence().decode(k[2])) + tup = (pow(g, x, p), g, p, q, x) + return construct(tup) + + +def _import_key_der(key_data, passphrase, params): + """Import a DSA key (public or private half), encoded in DER form.""" + + decodings = (_import_openssl_private, + _import_subjectPublicKeyInfo, + _import_x509_cert, + _import_pkcs8) + + for decoding in decodings: + try: + return decoding(key_data, passphrase, params) + except ValueError: + pass + + raise ValueError("DSA key format is not supported") + + +def import_key(extern_key, passphrase=None): + """Import a DSA key. + + Args: + extern_key (string or byte string): + The DSA key to import. + + The following formats are supported for a DSA **public** key: + + - X.509 certificate (binary DER or PEM) + - X.509 ``subjectPublicKeyInfo`` (binary DER or PEM) + - OpenSSH (ASCII one-liner, see `RFC4253`_) + + The following formats are supported for a DSA **private** key: + + - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` + DER SEQUENCE (binary or PEM) + - OpenSSL/OpenSSH custom format (binary or PEM) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (string): + In case of an encrypted private key, this is the pass phrase + from which the decryption key is derived. + + Encryption may be applied either at the `PKCS#8`_ or at the PEM level. + + Returns: + :class:`DsaKey` : a DSA key object + + Raises: + ValueError : when the given key cannot be parsed (possibly because + the pass phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt + .. _PKCS#8: http://www.ietf.org/rfc/rfc5208.txt + """ + + extern_key = tobytes(extern_key) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if extern_key.startswith(b'-----'): + # This is probably a PEM encoded key + (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) + if enc_flag: + passphrase = None + return _import_key_der(der, passphrase, None) + + if extern_key.startswith(b'ssh-dss '): + # This is probably a public OpenSSH key + keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) + keyparts = [] + while len(keystring) > 4: + length = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + length]) + keystring = keystring[4 + length:] + if keyparts[0] == b"ssh-dss": + tup = [Integer.from_bytes(keyparts[x]) for x in (4, 3, 1, 2)] + return construct(tup) + + if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: + # This is probably a DER encoded key + return _import_key_der(extern_key, passphrase, None) + + raise ValueError("DSA key format is not supported") + + +# Backward compatibility +importKey = import_key + +#: `Object ID`_ for a DSA key. +#: +#: id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } +#: +#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.10040.4.1.html +oid = "1.2.840.10040.4.1" diff --git a/env/Lib/site-packages/Crypto/PublicKey/DSA.pyi b/env/Lib/site-packages/Crypto/PublicKey/DSA.pyi new file mode 100644 index 0000000..354ac1f --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/DSA.pyi @@ -0,0 +1,31 @@ +from typing import Dict, Tuple, Callable, Union, Optional + +__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] + +RNG = Callable[[int], bytes] + +class DsaKey(object): + def __init__(self, key_dict: Dict[str, int]) -> None: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... # legacy + def can_sign(self) -> bool: ... # legacy + def public_key(self) -> DsaKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... + def domain(self) -> Tuple[int, int, int]: ... + def __repr__(self) -> str: ... + def __getattr__(self, item: str) -> int: ... + def export_key(self, format: Optional[str]="PEM", pkcs8: Optional[bool]=None, passphrase: Optional[str]=None, + protection: Optional[str]=None, randfunc: Optional[RNG]=None) -> bytes: ... + # Backward-compatibility + exportKey = export_key + publickey = public_key + +def generate(bits: int, randfunc: Optional[RNG]=None, domain: Optional[Tuple[int, int, int]]=None) -> DsaKey: ... +def construct(tup: Union[Tuple[int, int, int, int], Tuple[int, int, int, int, int]], consistency_check: Optional[bool]=True) -> DsaKey: ... +def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> DsaKey: ... +# Backward compatibility +importKey = import_key + +oid: str diff --git a/env/Lib/site-packages/Crypto/PublicKey/ECC.py b/env/Lib/site-packages/Crypto/PublicKey/ECC.py new file mode 100644 index 0000000..415eced --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/ECC.py @@ -0,0 +1,1182 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from __future__ import print_function + +import re +import struct +import binascii +from collections import namedtuple + +from Crypto.Util.py3compat import bord, tobytes, tostr, bchr, is_string +from Crypto.Util.number import bytes_to_long, long_to_bytes + +from Crypto.Math.Numbers import Integer +from Crypto.Util.asn1 import (DerObjectId, DerOctetString, DerSequence, + DerBitString) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + SmartPointer, c_size_t, c_uint8_ptr, + c_ulonglong) + +from Crypto.Random import get_random_bytes +from Crypto.Random.random import getrandbits + + +_ec_lib = load_pycryptodome_raw_lib("Crypto.PublicKey._ec_ws", """ +typedef void EcContext; +typedef void EcPoint; +int ec_ws_new_context(EcContext **pec_ctx, + const uint8_t *modulus, + const uint8_t *b, + const uint8_t *order, + size_t len, + uint64_t seed); +void ec_free_context(EcContext *ec_ctx); +int ec_ws_new_point(EcPoint **pecp, + const uint8_t *x, + const uint8_t *y, + size_t len, + const EcContext *ec_ctx); +void ec_free_point(EcPoint *ecp); +int ec_ws_get_xy(uint8_t *x, + uint8_t *y, + size_t len, + const EcPoint *ecp); +int ec_ws_double(EcPoint *p); +int ec_ws_add(EcPoint *ecpa, EcPoint *ecpb); +int ec_ws_scalar(EcPoint *ecp, + const uint8_t *k, + size_t len, + uint64_t seed); +int ec_ws_clone(EcPoint **pecp2, const EcPoint *ecp); +int ec_ws_copy(EcPoint *ecp1, const EcPoint *ecp2); +int ec_ws_cmp(const EcPoint *ecp1, const EcPoint *ecp2); +int ec_ws_neg(EcPoint *p); +int ec_ws_normalize(EcPoint *ecp); +int ec_ws_is_pai(EcPoint *ecp); +""") + +_Curve = namedtuple("_Curve", "p b order Gx Gy G modulus_bits oid context desc openssh") +_curves = {} + + +p256_names = ["p256", "NIST P-256", "P-256", "prime256v1", "secp256r1", + "nistp256"] + + +def init_p256(): + p = 0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff + b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b + order = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551 + Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296 + Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5 + + p256_modulus = long_to_bytes(p, 32) + p256_b = long_to_bytes(b, 32) + p256_order = long_to_bytes(order, 32) + + ec_p256_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p256_context.address_of(), + c_uint8_ptr(p256_modulus), + c_uint8_ptr(p256_b), + c_uint8_ptr(p256_order), + c_size_t(len(p256_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-256 context" % result) + + context = SmartPointer(ec_p256_context.get(), _ec_lib.ec_free_context) + p256 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 256, + "1.2.840.10045.3.1.7", # ANSI X9.62 + context, + "NIST P-256", + "ecdsa-sha2-nistp256") + global p256_names + _curves.update(dict.fromkeys(p256_names, p256)) + + +init_p256() +del init_p256 + + +p384_names = ["p384", "NIST P-384", "P-384", "prime384v1", "secp384r1", + "nistp384"] + + +def init_p384(): + p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffff + b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef + order = 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973 + Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760aB7 + Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5F + + p384_modulus = long_to_bytes(p, 48) + p384_b = long_to_bytes(b, 48) + p384_order = long_to_bytes(order, 48) + + ec_p384_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p384_context.address_of(), + c_uint8_ptr(p384_modulus), + c_uint8_ptr(p384_b), + c_uint8_ptr(p384_order), + c_size_t(len(p384_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-384 context" % result) + + context = SmartPointer(ec_p384_context.get(), _ec_lib.ec_free_context) + p384 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 384, + "1.3.132.0.34", # SEC 2 + context, + "NIST P-384", + "ecdsa-sha2-nistp384") + global p384_names + _curves.update(dict.fromkeys(p384_names, p384)) + + +init_p384() +del init_p384 + + +p521_names = ["p521", "NIST P-521", "P-521", "prime521v1", "secp521r1", + "nistp521"] + + +def init_p521(): + p = 0x000001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff + b = 0x00000051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00 + order = 0x000001fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409 + Gx = 0x000000c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66 + Gy = 0x0000011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650 + + p521_modulus = long_to_bytes(p, 66) + p521_b = long_to_bytes(b, 66) + p521_order = long_to_bytes(order, 66) + + ec_p521_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p521_context.address_of(), + c_uint8_ptr(p521_modulus), + c_uint8_ptr(p521_b), + c_uint8_ptr(p521_order), + c_size_t(len(p521_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-521 context" % result) + + context = SmartPointer(ec_p521_context.get(), _ec_lib.ec_free_context) + p521 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 521, + "1.3.132.0.35", # SEC 2 + context, + "NIST P-521", + "ecdsa-sha2-nistp521") + global p521_names + _curves.update(dict.fromkeys(p521_names, p521)) + + +init_p521() +del init_p521 + + +class UnsupportedEccFeature(ValueError): + pass + + +class EccPoint(object): + """A class to abstract a point over an Elliptic Curve. + + The class support special methods for: + + * Adding two points: ``R = S + T`` + * In-place addition: ``S += T`` + * Negating a point: ``R = -T`` + * Comparing two points: ``if S == T: ...`` + * Multiplying a point by a scalar: ``R = S*k`` + * In-place multiplication by a scalar: ``T *= k`` + + :ivar x: The affine X-coordinate of the ECC point + :vartype x: integer + + :ivar y: The affine Y-coordinate of the ECC point + :vartype y: integer + + :ivar xy: The tuple with X- and Y- coordinates + """ + + def __init__(self, x, y, curve="p256"): + + try: + self._curve = _curves[curve] + except KeyError: + raise ValueError("Unknown curve name %s" % str(curve)) + self._curve_name = curve + + modulus_bytes = self.size_in_bytes() + context = self._curve.context + + xb = long_to_bytes(x, modulus_bytes) + yb = long_to_bytes(y, modulus_bytes) + if len(xb) != modulus_bytes or len(yb) != modulus_bytes: + raise ValueError("Incorrect coordinate length") + + self._point = VoidPointer() + result = _ec_lib.ec_ws_new_point(self._point.address_of(), + c_uint8_ptr(xb), + c_uint8_ptr(yb), + c_size_t(modulus_bytes), + context.get()) + if result: + if result == 15: + raise ValueError("The EC point does not belong to the curve") + raise ValueError("Error %d while instantiating an EC point" % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the EC point + self._point = SmartPointer(self._point.get(), + _ec_lib.ec_free_point) + + def set(self, point): + self._point = VoidPointer() + result = _ec_lib.ec_ws_clone(self._point.address_of(), + point._point.get()) + if result: + raise ValueError("Error %d while cloning an EC point" % result) + + self._point = SmartPointer(self._point.get(), + _ec_lib.ec_free_point) + return self + + def __eq__(self, point): + return 0 == _ec_lib.ec_ws_cmp(self._point.get(), point._point.get()) + + def __neg__(self): + np = self.copy() + result = _ec_lib.ec_ws_neg(np._point.get()) + if result: + raise ValueError("Error %d while inverting an EC point" % result) + return np + + def copy(self): + """Return a copy of this point.""" + x, y = self.xy + np = EccPoint(x, y, self._curve_name) + return np + + def is_point_at_infinity(self): + """``True`` if this is the point-at-infinity.""" + return self.xy == (0, 0) + + def point_at_infinity(self): + """Return the point-at-infinity for the curve this point is on.""" + return EccPoint(0, 0, self._curve_name) + + @property + def x(self): + return self.xy[0] + + @property + def y(self): + return self.xy[1] + + @property + def xy(self): + modulus_bytes = self.size_in_bytes() + xb = bytearray(modulus_bytes) + yb = bytearray(modulus_bytes) + result = _ec_lib.ec_ws_get_xy(c_uint8_ptr(xb), + c_uint8_ptr(yb), + c_size_t(modulus_bytes), + self._point.get()) + if result: + raise ValueError("Error %d while encoding an EC point" % result) + + return (Integer(bytes_to_long(xb)), Integer(bytes_to_long(yb))) + + def size_in_bytes(self): + """Size of each coordinate, in bytes.""" + return (self.size_in_bits() + 7) // 8 + + def size_in_bits(self): + """Size of each coordinate, in bits.""" + return self._curve.modulus_bits + + def double(self): + """Double this point (in-place operation). + + :Return: + :class:`EccPoint` : this same object (to enable chaining) + """ + + result = _ec_lib.ec_ws_double(self._point.get()) + if result: + raise ValueError("Error %d while doubling an EC point" % result) + return self + + def __iadd__(self, point): + """Add a second point to this one""" + + result = _ec_lib.ec_ws_add(self._point.get(), point._point.get()) + if result: + if result == 16: + raise ValueError("EC points are not on the same curve") + raise ValueError("Error %d while adding two EC points" % result) + return self + + def __add__(self, point): + """Return a new point, the addition of this one and another""" + + np = self.copy() + np += point + return np + + def __imul__(self, scalar): + """Multiply this point by a scalar""" + + if scalar < 0: + raise ValueError("Scalar multiplication is only defined for non-negative integers") + sb = long_to_bytes(scalar) + result = _ec_lib.ec_ws_scalar(self._point.get(), + c_uint8_ptr(sb), + c_size_t(len(sb)), + c_ulonglong(getrandbits(64))) + if result: + raise ValueError("Error %d during scalar multiplication" % result) + return self + + def __mul__(self, scalar): + """Return a new point, the scalar product of this one""" + + np = self.copy() + np *= scalar + return np + + def __rmul__(self, left_hand): + return self.__mul__(left_hand) + + +# Last piece of initialization +p256_G = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy, "p256") +p256 = _curves['p256']._replace(G=p256_G) +_curves.update(dict.fromkeys(p256_names, p256)) +del p256_G, p256, p256_names + +p384_G = EccPoint(_curves['p384'].Gx, _curves['p384'].Gy, "p384") +p384 = _curves['p384']._replace(G=p384_G) +_curves.update(dict.fromkeys(p384_names, p384)) +del p384_G, p384, p384_names + +p521_G = EccPoint(_curves['p521'].Gx, _curves['p521'].Gy, "p521") +p521 = _curves['p521']._replace(G=p521_G) +_curves.update(dict.fromkeys(p521_names, p521)) +del p521_G, p521, p521_names + + +class EccKey(object): + r"""Class defining an ECC key. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar curve: The name of the ECC as defined in :numref:`curve_names`. + :vartype curve: string + + :ivar pointQ: an ECC point representating the public component + :vartype pointQ: :class:`EccPoint` + + :ivar d: A scalar representating the private component + :vartype d: integer + """ + + def __init__(self, **kwargs): + """Create a new ECC key + + Keywords: + curve : string + It must be *"p256"*, *"P-256"*, *"prime256v1"* or *"secp256r1"*. + d : integer + Only for a private key. It must be in the range ``[1..order-1]``. + point : EccPoint + Mandatory for a public key. If provided for a private key, + the implementation will NOT check whether it matches ``d``. + """ + + kwargs_ = dict(kwargs) + curve_name = kwargs_.pop("curve", None) + self._d = kwargs_.pop("d", None) + self._point = kwargs_.pop("point", None) + if kwargs_: + raise TypeError("Unknown parameters: " + str(kwargs_)) + + if curve_name not in _curves: + raise ValueError("Unsupported curve (%s)", curve_name) + self._curve = _curves[curve_name] + + if self._d is None: + if self._point is None: + raise ValueError("Either private or public ECC component must be specified, not both") + else: + self._d = Integer(self._d) + if not 1 <= self._d < self._curve.order: + raise ValueError("Invalid ECC private component") + + self.curve = self._curve.desc + + def __eq__(self, other): + if other.has_private() != self.has_private(): + return False + + return other.pointQ == self.pointQ + + def __repr__(self): + if self.has_private(): + extra = ", d=%d" % int(self._d) + else: + extra = "" + x, y = self.pointQ.xy + return "EccKey(curve='%s', point_x=%d, point_y=%d%s)" % (self._curve.desc, x, y, extra) + + def has_private(self): + """``True`` if this key can be used for making signatures or decrypting data.""" + + return self._d is not None + + def _sign(self, z, k): + assert 0 < k < self._curve.order + + order = self._curve.order + blind = Integer.random_range(min_inclusive=1, + max_exclusive=order) + + blind_d = self._d * blind + inv_blind_k = (blind * k).inverse(order) + + r = (self._curve.G * k).x % order + s = inv_blind_k * (blind * z + blind_d * r) % order + return (r, s) + + def _verify(self, z, rs): + order = self._curve.order + sinv = rs[1].inverse(order) + point1 = self._curve.G * ((sinv * z) % order) + point2 = self.pointQ * ((sinv * rs[0]) % order) + return (point1 + point2).x == rs[0] + + @property + def d(self): + if not self.has_private(): + raise ValueError("This is not a private ECC key") + return self._d + + @property + def pointQ(self): + if self._point is None: + self._point = self._curve.G * self._d + return self._point + + def public_key(self): + """A matching ECC public key. + + Returns: + a new :class:`EccKey` object + """ + + return EccKey(curve=self._curve.desc, point=self.pointQ) + + def _export_subjectPublicKeyInfo(self, compress): + + # See 2.2 in RFC5480 and 2.3.3 in SEC1 + # The first byte is: + # - 0x02: compressed, only X-coordinate, Y-coordinate is even + # - 0x03: compressed, only X-coordinate, Y-coordinate is odd + # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate + # + # PAI is in theory encoded as 0x00. + + modulus_bytes = self.pointQ.size_in_bytes() + + if compress: + first_byte = 2 + self.pointQ.y.is_odd() + public_key = (bchr(first_byte) + + self.pointQ.x.to_bytes(modulus_bytes)) + else: + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + + unrestricted_oid = "1.2.840.10045.2.1" + return _create_subject_public_key_info(unrestricted_oid, + public_key, + DerObjectId(self._curve.oid)) + + def _export_private_der(self, include_ec_params=True): + + assert self.has_private() + + # ECPrivateKey ::= SEQUENCE { + # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), + # privateKey OCTET STRING, + # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + # publicKey [1] BIT STRING OPTIONAL + # } + + # Public key - uncompressed form + modulus_bytes = self.pointQ.size_in_bytes() + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + + seq = [1, + DerOctetString(self.d.to_bytes(modulus_bytes)), + DerObjectId(self._curve.oid, explicit=0), + DerBitString(public_key, explicit=1)] + + if not include_ec_params: + del seq[2] + + return DerSequence(seq).encode() + + def _export_pkcs8(self, **kwargs): + from Crypto.IO import PKCS8 + + if kwargs.get('passphrase', None) is not None and 'protection' not in kwargs: + raise ValueError("At least the 'protection' parameter should be present") + + unrestricted_oid = "1.2.840.10045.2.1" + private_key = self._export_private_der(include_ec_params=False) + result = PKCS8.wrap(private_key, + unrestricted_oid, + key_params=DerObjectId(self._curve.oid), + **kwargs) + return result + + def _export_public_pem(self, compress): + from Crypto.IO import PEM + + encoded_der = self._export_subjectPublicKeyInfo(compress) + return PEM.encode(encoded_der, "PUBLIC KEY") + + def _export_private_pem(self, passphrase, **kwargs): + from Crypto.IO import PEM + + encoded_der = self._export_private_der() + return PEM.encode(encoded_der, "EC PRIVATE KEY", passphrase, **kwargs) + + def _export_private_clear_pkcs8_in_clear_pem(self): + from Crypto.IO import PEM + + encoded_der = self._export_pkcs8() + return PEM.encode(encoded_der, "PRIVATE KEY") + + def _export_private_encrypted_pkcs8_in_clear_pem(self, passphrase, **kwargs): + from Crypto.IO import PEM + + assert passphrase + if 'protection' not in kwargs: + raise ValueError("At least the 'protection' parameter should be present") + encoded_der = self._export_pkcs8(passphrase=passphrase, **kwargs) + return PEM.encode(encoded_der, "ENCRYPTED PRIVATE KEY") + + def _export_openssh(self, compress): + if self.has_private(): + raise ValueError("Cannot export OpenSSH private keys") + + desc = self._curve.openssh + modulus_bytes = self.pointQ.size_in_bytes() + + if compress: + first_byte = 2 + self.pointQ.y.is_odd() + public_key = (bchr(first_byte) + + self.pointQ.x.to_bytes(modulus_bytes)) + else: + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + + middle = desc.split("-")[2] + comps = (tobytes(desc), tobytes(middle), public_key) + blob = b"".join([struct.pack(">I", len(x)) + x for x in comps]) + return desc + " " + tostr(binascii.b2a_base64(blob)) + + def export_key(self, **kwargs): + """Export this ECC key. + + Args: + format (string): + The format to use for encoding the key: + + - ``'DER'``. The key will be encoded in ASN.1 DER format (binary). + For a public key, the ASN.1 ``subjectPublicKeyInfo`` structure + defined in `RFC5480`_ will be used. + For a private key, the ASN.1 ``ECPrivateKey`` structure defined + in `RFC5915`_ is used instead (possibly within a PKCS#8 envelope, + see the ``use_pkcs8`` flag below). + - ``'PEM'``. The key will be encoded in a PEM_ envelope (ASCII). + - ``'OpenSSH'``. The key will be encoded in the OpenSSH_ format + (ASCII, public keys only). + + passphrase (byte string or string): + The passphrase to use for protecting the private key. + + use_pkcs8 (boolean): + Only relevant for private keys. + + If ``True`` (default and recommended), the `PKCS#8`_ representation + will be used. + + If ``False``, the much weaker `PEM encryption`_ mechanism will be used. + + protection (string): + When a private key is exported with password-protection + and PKCS#8 (both ``DER`` and ``PEM`` formats), this parameter MUST be + present and be a valid algorithm supported by :mod:`Crypto.IO.PKCS8`. + It is recommended to use ``PBKDF2WithHMAC-SHA1AndAES128-CBC``. + + compress (boolean): + If ``True``, a more compact representation of the public key + with the X-coordinate only is used. + + If ``False`` (default), the full public key will be exported. + + .. warning:: + If you don't provide a passphrase, the private key will be + exported in the clear! + + .. note:: + When exporting a private key with password-protection and `PKCS#8`_ + (both ``DER`` and ``PEM`` formats), any extra parameters + to ``export_key()`` will be passed to :mod:`Crypto.IO.PKCS8`. + + .. _PEM: http://www.ietf.org/rfc/rfc1421.txt + .. _`PEM encryption`: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + .. _OpenSSH: http://www.openssh.com/txt/rfc5656.txt + .. _RFC5480: https://tools.ietf.org/html/rfc5480 + .. _RFC5915: http://www.ietf.org/rfc/rfc5915.txt + + Returns: + A multi-line string (for PEM and OpenSSH) or bytes (for DER) with the encoded key. + """ + + args = kwargs.copy() + ext_format = args.pop("format") + if ext_format not in ("PEM", "DER", "OpenSSH"): + raise ValueError("Unknown format '%s'" % ext_format) + + compress = args.pop("compress", False) + + if self.has_private(): + passphrase = args.pop("passphrase", None) + if is_string(passphrase): + passphrase = tobytes(passphrase) + if not passphrase: + raise ValueError("Empty passphrase") + use_pkcs8 = args.pop("use_pkcs8", True) + if ext_format == "PEM": + if use_pkcs8: + if passphrase: + return self._export_private_encrypted_pkcs8_in_clear_pem(passphrase, **args) + else: + return self._export_private_clear_pkcs8_in_clear_pem() + else: + return self._export_private_pem(passphrase, **args) + elif ext_format == "DER": + # DER + if passphrase and not use_pkcs8: + raise ValueError("Private keys can only be encrpyted with DER using PKCS#8") + if use_pkcs8: + return self._export_pkcs8(passphrase=passphrase, **args) + else: + return self._export_private_der() + else: + raise ValueError("Private keys cannot be exported in OpenSSH format") + else: # Public key + if args: + raise ValueError("Unexpected parameters: '%s'" % args) + if ext_format == "PEM": + return self._export_public_pem(compress) + elif ext_format == "DER": + return self._export_subjectPublicKeyInfo(compress) + else: + return self._export_openssh(compress) + + +def generate(**kwargs): + """Generate a new private key on the given curve. + + Args: + + curve (string): + Mandatory. It must be a curve name defined in :numref:`curve_names`. + + randfunc (callable): + Optional. The RNG to read randomness from. + If ``None``, :func:`Crypto.Random.get_random_bytes` is used. + """ + + curve_name = kwargs.pop("curve") + curve = _curves[curve_name] + randfunc = kwargs.pop("randfunc", get_random_bytes) + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + d = Integer.random_range(min_inclusive=1, + max_exclusive=curve.order, + randfunc=randfunc) + + return EccKey(curve=curve_name, d=d) + + +def construct(**kwargs): + """Build a new ECC key (private or public) starting + from some base components. + + Args: + + curve (string): + Mandatory. It must be a curve name defined in :numref:`curve_names`. + + d (integer): + Only for a private key. It must be in the range ``[1..order-1]``. + + point_x (integer): + Mandatory for a public key. X coordinate (affine) of the ECC point. + + point_y (integer): + Mandatory for a public key. Y coordinate (affine) of the ECC point. + + Returns: + :class:`EccKey` : a new ECC key object + """ + + curve_name = kwargs["curve"] + curve = _curves[curve_name] + point_x = kwargs.pop("point_x", None) + point_y = kwargs.pop("point_y", None) + + if "point" in kwargs: + raise TypeError("Unknown keyword: point") + + if None not in (point_x, point_y): + # ValueError is raised if the point is not on the curve + kwargs["point"] = EccPoint(point_x, point_y, curve_name) + + # Validate that the private key matches the public one + d = kwargs.get("d", None) + if d is not None and "point" in kwargs: + pub_key = curve.G * d + if pub_key.xy != (point_x, point_y): + raise ValueError("Private and public ECC keys do not match") + + return EccKey(**kwargs) + + +def _import_public_der(curve_oid, ec_point): + """Convert an encoded EC point into an EccKey object + + curve_name: string with the OID of the curve + ec_point: byte string with the EC point (not DER encoded) + + """ + + for curve_name, curve in _curves.items(): + if curve.oid == curve_oid: + break + else: + raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) + + # See 2.2 in RFC5480 and 2.3.3 in SEC1 + # The first byte is: + # - 0x02: compressed, only X-coordinate, Y-coordinate is even + # - 0x03: compressed, only X-coordinate, Y-coordinate is odd + # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate + # + # PAI is in theory encoded as 0x00. + + modulus_bytes = curve.p.size_in_bytes() + point_type = bord(ec_point[0]) + + # Uncompressed point + if point_type == 0x04: + if len(ec_point) != (1 + 2 * modulus_bytes): + raise ValueError("Incorrect EC point length") + x = Integer.from_bytes(ec_point[1:modulus_bytes+1]) + y = Integer.from_bytes(ec_point[modulus_bytes+1:]) + # Compressed point + elif point_type in (0x02, 0x3): + if len(ec_point) != (1 + modulus_bytes): + raise ValueError("Incorrect EC point length") + x = Integer.from_bytes(ec_point[1:]) + y = (x**3 - x*3 + curve.b).sqrt(curve.p) # Short Weierstrass + if point_type == 0x02 and y.is_odd(): + y = curve.p - y + if point_type == 0x03 and y.is_even(): + y = curve.p - y + else: + raise ValueError("Incorrect EC point encoding") + + return construct(curve=curve_name, point_x=x, point_y=y) + + +def _import_subjectPublicKeyInfo(encoded, *kwargs): + """Convert a subjectPublicKeyInfo into an EccKey object""" + + # See RFC5480 + + # Parse the generic subjectPublicKeyInfo structure + oid, ec_point, params = _expand_subject_public_key_info(encoded) + + # ec_point must be an encoded OCTET STRING + # params is encoded ECParameters + + # We accept id-ecPublicKey, id-ecDH, id-ecMQV without making any + # distiction for now. + + # Restrictions can be captured in the key usage certificate + # extension + unrestricted_oid = "1.2.840.10045.2.1" + ecdh_oid = "1.3.132.1.12" + ecmqv_oid = "1.3.132.1.13" + + if oid not in (unrestricted_oid, ecdh_oid, ecmqv_oid): + raise UnsupportedEccFeature("Unsupported ECC purpose (OID: %s)" % oid) + + # Parameters are mandatory for all three types + if not params: + raise ValueError("Missing ECC parameters") + + # ECParameters ::= CHOICE { + # namedCurve OBJECT IDENTIFIER + # -- implicitCurve NULL + # -- specifiedCurve SpecifiedECDomain + # } + # + # implicitCurve and specifiedCurve are not supported (as per RFC) + curve_oid = DerObjectId().decode(params).value + + return _import_public_der(curve_oid, ec_point) + + +def _import_private_der(encoded, passphrase, curve_oid=None): + + # See RFC5915 https://tools.ietf.org/html/rfc5915 + # + # ECPrivateKey ::= SEQUENCE { + # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), + # privateKey OCTET STRING, + # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + # publicKey [1] BIT STRING OPTIONAL + # } + + private_key = DerSequence().decode(encoded, nr_elements=(3, 4)) + if private_key[0] != 1: + raise ValueError("Incorrect ECC private key version") + + try: + parameters = DerObjectId(explicit=0).decode(private_key[2]).value + if curve_oid is not None and parameters != curve_oid: + raise ValueError("Curve mismatch") + curve_oid = parameters + except ValueError: + pass + + if curve_oid is None: + raise ValueError("No curve found") + + for curve_name, curve in _curves.items(): + if curve.oid == curve_oid: + break + else: + raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) + + scalar_bytes = DerOctetString().decode(private_key[1]).payload + modulus_bytes = curve.p.size_in_bytes() + if len(scalar_bytes) != modulus_bytes: + raise ValueError("Private key is too small") + d = Integer.from_bytes(scalar_bytes) + + # Decode public key (if any) + if len(private_key) == 4: + public_key_enc = DerBitString(explicit=1).decode(private_key[3]).value + public_key = _import_public_der(curve_oid, public_key_enc) + point_x = public_key.pointQ.x + point_y = public_key.pointQ.y + else: + point_x = point_y = None + + return construct(curve=curve_name, d=d, point_x=point_x, point_y=point_y) + + +def _import_pkcs8(encoded, passphrase): + from Crypto.IO import PKCS8 + + # From RFC5915, Section 1: + # + # Distributing an EC private key with PKCS#8 [RFC5208] involves including: + # a) id-ecPublicKey, id-ecDH, or id-ecMQV (from [RFC5480]) with the + # namedCurve as the parameters in the privateKeyAlgorithm field; and + # b) ECPrivateKey in the PrivateKey field, which is an OCTET STRING. + + algo_oid, private_key, params = PKCS8.unwrap(encoded, passphrase) + + # We accept id-ecPublicKey, id-ecDH, id-ecMQV without making any + # distiction for now. + unrestricted_oid = "1.2.840.10045.2.1" + ecdh_oid = "1.3.132.1.12" + ecmqv_oid = "1.3.132.1.13" + + if algo_oid not in (unrestricted_oid, ecdh_oid, ecmqv_oid): + raise UnsupportedEccFeature("Unsupported ECC purpose (OID: %s)" % algo_oid) + + curve_oid = DerObjectId().decode(params).value + + return _import_private_der(private_key, passphrase, curve_oid) + + +def _import_x509_cert(encoded, *kwargs): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info) + + +def _import_der(encoded, passphrase): + + try: + return _import_subjectPublicKeyInfo(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_x509_cert(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_private_der(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_pkcs8(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + raise ValueError("Not an ECC DER key") + + +def _import_openssh_public(encoded): + keystring = binascii.a2b_base64(encoded.split(b' ')[1]) + + keyparts = [] + while len(keystring) > 4: + lk = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + lk]) + keystring = keystring[4 + lk:] + + for curve_name, curve in _curves.items(): + middle = tobytes(curve.openssh.split("-")[2]) + if keyparts[1] == middle: + break + else: + raise ValueError("Unsupported ECC curve") + + return _import_public_der(curve.oid, keyparts[2]) + + +def _import_openssh_private_ecc(data, password): + + from ._openssh import (import_openssh_private_generic, + read_bytes, read_string, check_padding) + + ssh_name, decrypted = import_openssh_private_generic(data, password) + + name, decrypted = read_string(decrypted) + if name not in _curves: + raise UnsupportedEccFeature("Unsupported ECC curve %s" % name) + curve = _curves[name] + modulus_bytes = (curve.modulus_bits + 7) // 8 + + public_key, decrypted = read_bytes(decrypted) + + if bord(public_key[0]) != 4: + raise ValueError("Only uncompressed OpenSSH EC keys are supported") + if len(public_key) != 2 * modulus_bytes + 1: + raise ValueError("Incorrect public key length") + + point_x = Integer.from_bytes(public_key[1:1+modulus_bytes]) + point_y = Integer.from_bytes(public_key[1+modulus_bytes:]) + point = EccPoint(point_x, point_y, curve=name) + + private_key, decrypted = read_bytes(decrypted) + d = Integer.from_bytes(private_key) + + _, padded = read_string(decrypted) # Comment + check_padding(padded) + + return EccKey(curve=name, d=d, point=point) + + +def import_key(encoded, passphrase=None): + """Import an ECC key (public or private). + + Args: + encoded (bytes or multi-line string): + The ECC key to import. + + An ECC **public** key can be: + + - An X.509 certificate, binary (DER) or ASCII (PEM) + - An X.509 ``subjectPublicKeyInfo``, binary (DER) or ASCII (PEM) + - An OpenSSH line (e.g. the content of ``~/.ssh/id_ecdsa``, ASCII) + + An ECC **private** key can be: + + - In binary format (DER, see section 3 of `RFC5915`_ or `PKCS#8`_) + - In ASCII format (PEM or `OpenSSH 6.5+`_) + + Private keys can be in the clear or password-protected. + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (byte string): + The passphrase to use for decrypting a private key. + Encryption may be applied protected at the PEM level or at the PKCS#8 level. + This parameter is ignored if the key in input is not encrypted. + + Returns: + :class:`EccKey` : a new ECC key object + + Raises: + ValueError: when the given key cannot be parsed (possibly because + the pass phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _RFC5915: http://www.ietf.org/rfc/rfc5915.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + .. _`OpenSSH 6.5+`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf + """ + + from Crypto.IO import PEM + + encoded = tobytes(encoded) + if passphrase is not None: + passphrase = tobytes(passphrase) + + # PEM + if encoded.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): + text_encoded = tostr(encoded) + openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + result = _import_openssh_private_ecc(openssh_encoded, passphrase) + return result + + elif encoded.startswith(b'-----'): + + text_encoded = tostr(encoded) + + # Remove any EC PARAMETERS section + # Ignore its content because the curve type must be already given in the key + ecparams_start = "-----BEGIN EC PARAMETERS-----" + ecparams_end = "-----END EC PARAMETERS-----" + text_encoded = re.sub(ecparams_start + ".*?" + ecparams_end, "", + text_encoded, + flags=re.DOTALL) + + der_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + if enc_flag: + passphrase = None + try: + result = _import_der(der_encoded, passphrase) + except UnsupportedEccFeature as uef: + raise uef + except ValueError: + raise ValueError("Invalid DER encoding inside the PEM file") + return result + + # OpenSSH + if encoded.startswith(b'ecdsa-sha2-'): + return _import_openssh_public(encoded) + + # DER + if len(encoded) > 0 and bord(encoded[0]) == 0x30: + return _import_der(encoded, passphrase) + + raise ValueError("ECC key format is not supported") + + +if __name__ == "__main__": + + import time + + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + + point = _curves['p256'].G.copy() + count = 3000 + + start = time.time() + for x in range(count): + pointX = point * d + print("(P-256 G)", (time.time() - start) / count * 1000, "ms") + + start = time.time() + for x in range(count): + pointX = pointX * d + print("(P-256 arbitrary point)", (time.time() - start) / count * 1000, "ms") diff --git a/env/Lib/site-packages/Crypto/PublicKey/ECC.pyi b/env/Lib/site-packages/Crypto/PublicKey/ECC.pyi new file mode 100644 index 0000000..acf3164 --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/ECC.pyi @@ -0,0 +1,62 @@ +from typing import Union, Callable, Optional, NamedTuple, List, Tuple, Dict, NamedTuple, Any + +from Crypto.Math.Numbers import Integer + +RNG = Callable[[int], bytes] + +class UnsupportedEccFeature(ValueError): ... +class EccPoint(object): + def __init__(self, x: Union[int, Integer], y: Union[int, Integer], curve: Optional[str] = ...) -> None: ... + def set(self, point: EccPoint) -> EccPoint: ... + def __eq__(self, point: object) -> bool: ... + def __neg__(self) -> EccPoint: ... + def copy(self) -> EccPoint: ... + def is_point_at_infinity(self) -> bool: ... + def point_at_infinity(self) -> EccPoint: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def xy(self) -> Tuple[int, int]: ... + def size_in_bytes(self) -> int: ... + def size_in_bits(self) -> int: ... + def double(self) -> EccPoint: ... + def __iadd__(self, point: EccPoint) -> EccPoint: ... + def __add__(self, point: EccPoint) -> EccPoint: ... + def __imul__(self, scalar: int) -> EccPoint: ... + def __mul__(self, scalar: int) -> EccPoint: ... + +class EccKey(object): + curve: str + def __init__(self, *, curve: str = ..., d: int = ..., point: EccPoint = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __repr__(self) -> str: ... + def has_private(self) -> bool: ... + @property + def d(self) -> int: ... + @property + def pointQ(self) -> EccPoint: ... + def public_key(self) -> EccKey: ... + def export_key(self, **kwargs: Union[str, bytes, bool]) -> str: ... + + +_Curve = NamedTuple("_Curve", [('p', Integer), + ('order', Integer), + ('b', Integer), + ('Gx', Integer), + ('Gy', Integer), + ('G', EccPoint), + ('modulus_bits', int), + ('oid', str), + ('context', Any), + ('desc', str), + ('openssh', str), + ]) + +_curves : Dict[str, _Curve] + + +def generate(**kwargs: Union[str, RNG]) -> EccKey: ... +def construct(**kwargs: Union[str, int]) -> EccKey: ... +def import_key(encoded: Union[bytes, str], passphrase: Optional[str]=None) -> EccKey: ... diff --git a/env/Lib/site-packages/Crypto/PublicKey/ElGamal.py b/env/Lib/site-packages/Crypto/PublicKey/ElGamal.py new file mode 100644 index 0000000..3b10840 --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/ElGamal.py @@ -0,0 +1,286 @@ +# +# ElGamal.py : ElGamal encryption/decryption and signatures +# +# Part of the Python Cryptography Toolkit +# +# Originally written by: A.M. Kuchling +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['generate', 'construct', 'ElGamalKey'] + +from Crypto import Random +from Crypto.Math.Primality import ( generate_probable_safe_prime, + test_probable_prime, COMPOSITE ) +from Crypto.Math.Numbers import Integer + +# Generate an ElGamal key with N bits +def generate(bits, randfunc): + """Randomly generate a fresh, new ElGamal key. + + The key will be safe for use for both encryption and signature + (although it should be used for **only one** purpose). + + Args: + bits (int): + Key length, or size (in bits) of the modulus *p*. + The recommended value is 2048. + randfunc (callable): + Random number generation function; it should accept + a single integer *N* and return a string of random + *N* random bytes. + + Return: + an :class:`ElGamalKey` object + """ + + obj=ElGamalKey() + + # Generate a safe prime p + # See Algorithm 4.86 in Handbook of Applied Cryptography + obj.p = generate_probable_safe_prime(exact_bits=bits, randfunc=randfunc) + q = (obj.p - 1) >> 1 + + # Generate generator g + while 1: + # Choose a square residue; it will generate a cyclic group of order q. + obj.g = pow(Integer.random_range(min_inclusive=2, + max_exclusive=obj.p, + randfunc=randfunc), 2, obj.p) + + # We must avoid g=2 because of Bleichenbacher's attack described + # in "Generating ElGamal signatures without knowning the secret key", + # 1996 + if obj.g in (1, 2): + continue + + # Discard g if it divides p-1 because of the attack described + # in Note 11.67 (iii) in HAC + if (obj.p - 1) % obj.g == 0: + continue + + # g^{-1} must not divide p-1 because of Khadir's attack + # described in "Conditions of the generator for forging ElGamal + # signature", 2011 + ginv = obj.g.inverse(obj.p) + if (obj.p - 1) % ginv == 0: + continue + + # Found + break + + # Generate private key x + obj.x = Integer.random_range(min_inclusive=2, + max_exclusive=obj.p-1, + randfunc=randfunc) + # Generate public key y + obj.y = pow(obj.g, obj.x, obj.p) + return obj + +def construct(tup): + r"""Construct an ElGamal key from a tuple of valid ElGamal components. + + The modulus *p* must be a prime. + The following conditions must apply: + + .. math:: + + \begin{align} + &1 < g < p-1 \\ + &g^{p-1} = 1 \text{ mod } 1 \\ + &1 < x < p-1 \\ + &g^x = y \text{ mod } p + \end{align} + + Args: + tup (tuple): + A tuple with either 3 or 4 integers, + in the following order: + + 1. Modulus (*p*). + 2. Generator (*g*). + 3. Public key (*y*). + 4. Private key (*x*). Optional. + + Raises: + ValueError: when the key being imported fails the most basic ElGamal validity checks. + + Returns: + an :class:`ElGamalKey` object + """ + + obj=ElGamalKey() + if len(tup) not in [3,4]: + raise ValueError('argument for construct() wrong length') + for i in range(len(tup)): + field = obj._keydata[i] + setattr(obj, field, Integer(tup[i])) + + fmt_error = test_probable_prime(obj.p) == COMPOSITE + fmt_error |= obj.g<=1 or obj.g>=obj.p + fmt_error |= pow(obj.g, obj.p-1, obj.p)!=1 + fmt_error |= obj.y<1 or obj.y>=obj.p + if len(tup)==4: + fmt_error |= obj.x<=1 or obj.x>=obj.p + fmt_error |= pow(obj.g, obj.x, obj.p)!=obj.y + + if fmt_error: + raise ValueError("Invalid ElGamal key components") + + return obj + +class ElGamalKey(object): + r"""Class defining an ElGamal key. + Do not instantiate directly. + Use :func:`generate` or :func:`construct` instead. + + :ivar p: Modulus + :vartype d: integer + + :ivar g: Generator + :vartype e: integer + + :ivar y: Public key component + :vartype y: integer + + :ivar x: Private key component + :vartype x: integer + """ + + #: Dictionary of ElGamal parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + _keydata=['p', 'g', 'y', 'x'] + + def __init__(self, randfunc=None): + if randfunc is None: + randfunc = Random.new().read + self._randfunc = randfunc + + def _encrypt(self, M, K): + a=pow(self.g, K, self.p) + b=( pow(self.y, K, self.p)*M ) % self.p + return [int(a), int(b)] + + def _decrypt(self, M): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + r = Integer.random_range(min_inclusive=2, + max_exclusive=self.p-1, + randfunc=self._randfunc) + a_blind = (pow(self.g, r, self.p) * M[0]) % self.p + ax=pow(a_blind, self.x, self.p) + plaintext_blind = (ax.inverse(self.p) * M[1] ) % self.p + plaintext = (plaintext_blind * pow(self.y, r, self.p)) % self.p + return int(plaintext) + + def _sign(self, M, K): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + p1=self.p-1 + K = Integer(K) + if (K.gcd(p1)!=1): + raise ValueError('Bad K value: GCD(K,p-1)!=1') + a=pow(self.g, K, self.p) + t=(Integer(M)-self.x*a) % p1 + while t<0: t=t+p1 + b=(t*K.inverse(p1)) % p1 + return [int(a), int(b)] + + def _verify(self, M, sig): + sig = [Integer(x) for x in sig] + if sig[0]<1 or sig[0]>self.p-1: + return 0 + v1=pow(self.y, sig[0], self.p) + v1=(v1*pow(sig[0], sig[1], self.p)) % self.p + v2=pow(self.g, M, self.p) + if v1==v2: + return 1 + return 0 + + def has_private(self): + """Whether this is an ElGamal private key""" + + if hasattr(self, 'x'): + return 1 + else: + return 0 + + def can_encrypt(self): + return True + + def can_sign(self): + return True + + def publickey(self): + """A matching ElGamal public key. + + Returns: + a new :class:`ElGamalKey` object + """ + return construct((self.p, self.g, self.y)) + + def __eq__(self, other): + if bool(self.has_private()) != bool(other.has_private()): + return False + + result = True + for comp in self._keydata: + result = result and (getattr(self.key, comp, None) == + getattr(other.key, comp, None)) + return result + + def __ne__(self, other): + return not self.__eq__(other) + + def __getstate__(self): + # ElGamal key is not pickable + from pickle import PicklingError + raise PicklingError + + # Methods defined in PyCrypto that we don't support anymore + + def sign(self, M, K): + raise NotImplementedError + + def verify(self, M, signature): + raise NotImplementedError + + def encrypt(self, plaintext, K): + raise NotImplementedError + + def decrypt(self, ciphertext): + raise NotImplementedError + + def blind(self, M, B): + raise NotImplementedError + + def unblind(self, M, B): + raise NotImplementedError + + def size(self): + raise NotImplementedError diff --git a/env/Lib/site-packages/Crypto/PublicKey/ElGamal.pyi b/env/Lib/site-packages/Crypto/PublicKey/ElGamal.pyi new file mode 100644 index 0000000..9048531 --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/ElGamal.pyi @@ -0,0 +1,18 @@ +from typing import Callable, Union, Tuple, Optional + +__all__ = ['generate', 'construct', 'ElGamalKey'] + +RNG = Callable[[int], bytes] + +def generate(bits: int, randfunc: RNG) -> ElGamalKey: ... +def construct(tup: Union[Tuple[int, int, int], Tuple[int, int, int, int]]) -> ElGamalKey: ... + +class ElGamalKey(object): + def __init__(self, randfunc: Optional[RNG]=None) -> None: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... + def can_sign(self) -> bool: ... + def publickey(self) -> ElGamalKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... diff --git a/env/Lib/site-packages/Crypto/PublicKey/RSA.py b/env/Lib/site-packages/Crypto/PublicKey/RSA.py new file mode 100644 index 0000000..fda3b2a --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/RSA.py @@ -0,0 +1,799 @@ +# -*- coding: utf-8 -*- +# =================================================================== +# +# Copyright (c) 2016, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['generate', 'construct', 'import_key', + 'RsaKey', 'oid'] + +import binascii +import struct + +from Crypto import Random +from Crypto.Util.py3compat import tobytes, bord, tostr +from Crypto.Util.asn1 import DerSequence + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import (test_probable_prime, + generate_probable_prime, COMPOSITE) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + + +class RsaKey(object): + r"""Class defining an actual RSA key. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar n: RSA modulus + :vartype n: integer + + :ivar e: RSA public exponent + :vartype e: integer + + :ivar d: RSA private exponent + :vartype d: integer + + :ivar p: First factor of the RSA modulus + :vartype p: integer + + :ivar q: Second factor of the RSA modulus + :vartype q: integer + + :ivar u: Chinese remainder component (:math:`p^{-1} \text{mod } q`) + :vartype q: integer + + :undocumented: exportKey, publickey + """ + + def __init__(self, **kwargs): + """Build an RSA key. + + :Keywords: + n : integer + The modulus. + e : integer + The public exponent. + d : integer + The private exponent. Only required for private keys. + p : integer + The first factor of the modulus. Only required for private keys. + q : integer + The second factor of the modulus. Only required for private keys. + u : integer + The CRT coefficient (inverse of p modulo q). Only required for + private keys. + """ + + input_set = set(kwargs.keys()) + public_set = set(('n', 'e')) + private_set = public_set | set(('p', 'q', 'd', 'u')) + if input_set not in (private_set, public_set): + raise ValueError("Some RSA components are missing") + for component, value in kwargs.items(): + setattr(self, "_" + component, value) + if input_set == private_set: + self._dp = self._d % (self._p - 1) # = (e⁻¹) mod (p-1) + self._dq = self._d % (self._q - 1) # = (e⁻¹) mod (q-1) + + @property + def n(self): + return int(self._n) + + @property + def e(self): + return int(self._e) + + @property + def d(self): + if not self.has_private(): + raise AttributeError("No private exponent available for public keys") + return int(self._d) + + @property + def p(self): + if not self.has_private(): + raise AttributeError("No CRT component 'p' available for public keys") + return int(self._p) + + @property + def q(self): + if not self.has_private(): + raise AttributeError("No CRT component 'q' available for public keys") + return int(self._q) + + @property + def u(self): + if not self.has_private(): + raise AttributeError("No CRT component 'u' available for public keys") + return int(self._u) + + def size_in_bits(self): + """Size of the RSA modulus in bits""" + return self._n.size_in_bits() + + def size_in_bytes(self): + """The minimal amount of bytes that can hold the RSA modulus""" + return (self._n.size_in_bits() - 1) // 8 + 1 + + def _encrypt(self, plaintext): + if not 0 <= plaintext < self._n: + raise ValueError("Plaintext too large") + return int(pow(Integer(plaintext), self._e, self._n)) + + def _decrypt(self, ciphertext): + if not 0 <= ciphertext < self._n: + raise ValueError("Ciphertext too large") + if not self.has_private(): + raise TypeError("This is not a private key") + + # Blinded RSA decryption (to prevent timing attacks): + # Step 1: Generate random secret blinding factor r, + # such that 0 < r < n-1 + r = Integer.random_range(min_inclusive=1, max_exclusive=self._n) + # Step 2: Compute c' = c * r**e mod n + cp = Integer(ciphertext) * pow(r, self._e, self._n) % self._n + # Step 3: Compute m' = c'**d mod n (normal RSA decryption) + m1 = pow(cp, self._dp, self._p) + m2 = pow(cp, self._dq, self._q) + h = ((m2 - m1) * self._u) % self._q + mp = h * self._p + m1 + # Step 4: Compute m = m**(r-1) mod n + result = (r.inverse(self._n) * mp) % self._n + # Verify no faults occurred + if ciphertext != pow(result, self._e, self._n): + raise ValueError("Fault detected in RSA decryption") + return result + + def has_private(self): + """Whether this is an RSA private key""" + + return hasattr(self, "_d") + + def can_encrypt(self): # legacy + return True + + def can_sign(self): # legacy + return True + + def public_key(self): + """A matching RSA public key. + + Returns: + a new :class:`RsaKey` object + """ + return RsaKey(n=self._n, e=self._e) + + def __eq__(self, other): + if self.has_private() != other.has_private(): + return False + if self.n != other.n or self.e != other.e: + return False + if not self.has_private(): + return True + return (self.d == other.d) + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + # RSA key is not pickable + from pickle import PicklingError + raise PicklingError + + def __repr__(self): + if self.has_private(): + extra = ", d=%d, p=%d, q=%d, u=%d" % (int(self._d), int(self._p), + int(self._q), int(self._u)) + else: + extra = "" + return "RsaKey(n=%d, e=%d%s)" % (int(self._n), int(self._e), extra) + + def __str__(self): + if self.has_private(): + key_type = "Private" + else: + key_type = "Public" + return "%s RSA key at 0x%X" % (key_type, id(self)) + + def export_key(self, format='PEM', passphrase=None, pkcs=1, + protection=None, randfunc=None): + """Export this RSA key. + + Args: + format (string): + The format to use for wrapping the key: + + - *'PEM'*. (*Default*) Text encoding, done according to `RFC1421`_/`RFC1423`_. + - *'DER'*. Binary encoding. + - *'OpenSSH'*. Textual encoding, done according to OpenSSH specification. + Only suitable for public keys (not private keys). + + passphrase (string): + (*For private keys only*) The pass phrase used for protecting the output. + + pkcs (integer): + (*For private keys only*) The ASN.1 structure to use for + serializing the key. Note that even in case of PEM + encoding, there is an inner ASN.1 DER structure. + + With ``pkcs=1`` (*default*), the private key is encoded in a + simple `PKCS#1`_ structure (``RSAPrivateKey``). + + With ``pkcs=8``, the private key is encoded in a `PKCS#8`_ structure + (``PrivateKeyInfo``). + + .. note:: + This parameter is ignored for a public key. + For DER and PEM, an ASN.1 DER ``SubjectPublicKeyInfo`` + structure is always used. + + protection (string): + (*For private keys only*) + The encryption scheme to use for protecting the private key. + + If ``None`` (default), the behavior depends on :attr:`format`: + + - For *'DER'*, the *PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC* + scheme is used. The following operations are performed: + + 1. A 16 byte Triple DES key is derived from the passphrase + using :func:`Crypto.Protocol.KDF.PBKDF2` with 8 bytes salt, + and 1 000 iterations of :mod:`Crypto.Hash.HMAC`. + 2. The private key is encrypted using CBC. + 3. The encrypted key is encoded according to PKCS#8. + + - For *'PEM'*, the obsolete PEM encryption scheme is used. + It is based on MD5 for key derivation, and Triple DES for encryption. + + Specifying a value for :attr:`protection` is only meaningful for PKCS#8 + (that is, ``pkcs=8``) and only if a pass phrase is present too. + + The supported schemes for PKCS#8 are listed in the + :mod:`Crypto.IO.PKCS8` module (see :attr:`wrap_algo` parameter). + + randfunc (callable): + A function that provides random bytes. Only used for PEM encoding. + The default is :func:`Crypto.Random.get_random_bytes`. + + Returns: + byte string: the encoded key + + Raises: + ValueError:when the format is unknown or when you try to encrypt a private + key with *DER* format and PKCS#1. + + .. warning:: + If you don't provide a pass phrase, the private key will be + exported in the clear! + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + + if passphrase is not None: + passphrase = tobytes(passphrase) + + if randfunc is None: + randfunc = Random.get_random_bytes + + if format == 'OpenSSH': + e_bytes, n_bytes = [x.to_bytes() for x in (self._e, self._n)] + if bord(e_bytes[0]) & 0x80: + e_bytes = b'\x00' + e_bytes + if bord(n_bytes[0]) & 0x80: + n_bytes = b'\x00' + n_bytes + keyparts = [b'ssh-rsa', e_bytes, n_bytes] + keystring = b''.join([struct.pack(">I", len(kp)) + kp for kp in keyparts]) + return b'ssh-rsa ' + binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + if self.has_private(): + binary_key = DerSequence([0, + self.n, + self.e, + self.d, + self.p, + self.q, + self.d % (self.p-1), + self.d % (self.q-1), + Integer(self.q).inverse(self.p) + ]).encode() + if pkcs == 1: + key_type = 'RSA PRIVATE KEY' + if format == 'DER' and passphrase: + raise ValueError("PKCS#1 private key cannot be encrypted") + else: # PKCS#8 + from Crypto.IO import PKCS8 + + if format == 'PEM' and protection is None: + key_type = 'PRIVATE KEY' + binary_key = PKCS8.wrap(binary_key, oid, None) + else: + key_type = 'ENCRYPTED PRIVATE KEY' + if not protection: + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + binary_key = PKCS8.wrap(binary_key, oid, + passphrase, protection) + passphrase = None + else: + key_type = "PUBLIC KEY" + binary_key = _create_subject_public_key_info(oid, + DerSequence([self.n, + self.e]) + ) + + if format == 'DER': + return binary_key + if format == 'PEM': + from Crypto.IO import PEM + + pem_str = PEM.encode(binary_key, key_type, passphrase, randfunc) + return tobytes(pem_str) + + raise ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) + + # Backward compatibility + exportKey = export_key + publickey = public_key + + # Methods defined in PyCrypto that we don't support anymore + def sign(self, M, K): + raise NotImplementedError("Use module Crypto.Signature.pkcs1_15 instead") + + def verify(self, M, signature): + raise NotImplementedError("Use module Crypto.Signature.pkcs1_15 instead") + + def encrypt(self, plaintext, K): + raise NotImplementedError("Use module Crypto.Cipher.PKCS1_OAEP instead") + + def decrypt(self, ciphertext): + raise NotImplementedError("Use module Crypto.Cipher.PKCS1_OAEP instead") + + def blind(self, M, B): + raise NotImplementedError + + def unblind(self, M, B): + raise NotImplementedError + + def size(self): + raise NotImplementedError + + +def generate(bits, randfunc=None, e=65537): + """Create a new RSA key pair. + + The algorithm closely follows NIST `FIPS 186-4`_ in its + sections B.3.1 and B.3.3. The modulus is the product of + two non-strong probable primes. + Each prime passes a suitable number of Miller-Rabin tests + with random bases and a single Lucas test. + + Args: + bits (integer): + Key length, or size (in bits) of the RSA modulus. + It must be at least 1024, but **2048 is recommended.** + The FIPS standard only defines 1024, 2048 and 3072. + randfunc (callable): + Function that returns random bytes. + The default is :func:`Crypto.Random.get_random_bytes`. + e (integer): + Public RSA exponent. It must be an odd positive integer. + It is typically a small number with very few ones in its + binary representation. + The FIPS standard requires the public exponent to be + at least 65537 (the default). + + Returns: an RSA key object (:class:`RsaKey`, with private key). + + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if bits < 1024: + raise ValueError("RSA modulus length must be >= 1024") + if e % 2 == 0 or e < 3: + raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") + + if randfunc is None: + randfunc = Random.get_random_bytes + + d = n = Integer(1) + e = Integer(e) + + while n.size_in_bits() != bits and d < (1 << (bits // 2)): + # Generate the prime factors of n: p and q. + # By construciton, their product is always + # 2^{bits-1} < p*q < 2^bits. + size_q = bits // 2 + size_p = bits - size_q + + min_p = min_q = (Integer(1) << (2 * size_q - 1)).sqrt() + if size_q != size_p: + min_p = (Integer(1) << (2 * size_p - 1)).sqrt() + + def filter_p(candidate): + return candidate > min_p and (candidate - 1).gcd(e) == 1 + + p = generate_probable_prime(exact_bits=size_p, + randfunc=randfunc, + prime_filter=filter_p) + + min_distance = Integer(1) << (bits // 2 - 100) + + def filter_q(candidate): + return (candidate > min_q and + (candidate - 1).gcd(e) == 1 and + abs(candidate - p) > min_distance) + + q = generate_probable_prime(exact_bits=size_q, + randfunc=randfunc, + prime_filter=filter_q) + + n = p * q + lcm = (p - 1).lcm(q - 1) + d = e.inverse(lcm) + + if p > q: + p, q = q, p + + u = p.inverse(q) + + return RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) + + +def construct(rsa_components, consistency_check=True): + r"""Construct an RSA key from a tuple of valid RSA components. + + The modulus **n** must be the product of two primes. + The public exponent **e** must be odd and larger than 1. + + In case of a private key, the following equations must apply: + + .. math:: + + \begin{align} + p*q &= n \\ + e*d &\equiv 1 ( \text{mod lcm} [(p-1)(q-1)]) \\ + p*u &\equiv 1 ( \text{mod } q) + \end{align} + + Args: + rsa_components (tuple): + A tuple of integers, with at least 2 and no + more than 6 items. The items come in the following order: + + 1. RSA modulus *n*. + 2. Public exponent *e*. + 3. Private exponent *d*. + Only required if the key is private. + 4. First factor of *n* (*p*). + Optional, but the other factor *q* must also be present. + 5. Second factor of *n* (*q*). Optional. + 6. CRT coefficient *q*, that is :math:`p^{-1} \text{mod }q`. Optional. + + consistency_check (boolean): + If ``True``, the library will verify that the provided components + fulfil the main RSA properties. + + Raises: + ValueError: when the key being imported fails the most basic RSA validity checks. + + Returns: An RSA key object (:class:`RsaKey`). + """ + + class InputComps(object): + pass + + input_comps = InputComps() + for (comp, value) in zip(('n', 'e', 'd', 'p', 'q', 'u'), rsa_components): + setattr(input_comps, comp, Integer(value)) + + n = input_comps.n + e = input_comps.e + if not hasattr(input_comps, 'd'): + key = RsaKey(n=n, e=e) + else: + d = input_comps.d + if hasattr(input_comps, 'q'): + p = input_comps.p + q = input_comps.q + else: + # Compute factors p and q from the private exponent d. + # We assume that n has no more than two factors. + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t //= 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = Integer(2) + while not spotted and a < 100: + k = Integer(t) + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = Integer(n).gcd(cand + 1) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + assert ((n % p) == 0) + q = n // p + + if hasattr(input_comps, 'u'): + u = input_comps.u + else: + u = p.inverse(q) + + # Build key object + key = RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) + + # Verify consistency of the key + if consistency_check: + + # Modulus and public exponent must be coprime + if e <= 1 or e >= n: + raise ValueError("Invalid RSA public exponent") + if Integer(n).gcd(e) != 1: + raise ValueError("RSA public exponent is not coprime to modulus") + + # For RSA, modulus must be odd + if not n & 1: + raise ValueError("RSA modulus is not odd") + + if key.has_private(): + # Modulus and private exponent must be coprime + if d <= 1 or d >= n: + raise ValueError("Invalid RSA private exponent") + if Integer(n).gcd(d) != 1: + raise ValueError("RSA private exponent is not coprime to modulus") + # Modulus must be product of 2 primes + if p * q != n: + raise ValueError("RSA factors do not match modulus") + if test_probable_prime(p) == COMPOSITE: + raise ValueError("RSA factor p is composite") + if test_probable_prime(q) == COMPOSITE: + raise ValueError("RSA factor q is composite") + # See Carmichael theorem + phi = (p - 1) * (q - 1) + lcm = phi // (p - 1).gcd(q - 1) + if (e * d % int(lcm)) != 1: + raise ValueError("Invalid RSA condition") + if hasattr(key, 'u'): + # CRT coefficient + if u <= 1 or u >= q: + raise ValueError("Invalid RSA component u") + if (p * u % q) != 1: + raise ValueError("Invalid RSA component u with p") + + return key + + +def _import_pkcs1_private(encoded, *kwargs): + # RSAPrivateKey ::= SEQUENCE { + # version Version, + # modulus INTEGER, -- n + # publicExponent INTEGER, -- e + # privateExponent INTEGER, -- d + # prime1 INTEGER, -- p + # prime2 INTEGER, -- q + # exponent1 INTEGER, -- d mod (p-1) + # exponent2 INTEGER, -- d mod (q-1) + # coefficient INTEGER -- (inverse of q) mod p + # } + # + # Version ::= INTEGER + der = DerSequence().decode(encoded, nr_elements=9, only_ints_expected=True) + if der[0] != 0: + raise ValueError("No PKCS#1 encoding of an RSA private key") + return construct(der[1:6] + [Integer(der[4]).inverse(der[5])]) + + +def _import_pkcs1_public(encoded, *kwargs): + # RSAPublicKey ::= SEQUENCE { + # modulus INTEGER, -- n + # publicExponent INTEGER -- e + # } + der = DerSequence().decode(encoded, nr_elements=2, only_ints_expected=True) + return construct(der) + + +def _import_subjectPublicKeyInfo(encoded, *kwargs): + + algoid, encoded_key, params = _expand_subject_public_key_info(encoded) + if algoid != oid or params is not None: + raise ValueError("No RSA subjectPublicKeyInfo") + return _import_pkcs1_public(encoded_key) + + +def _import_x509_cert(encoded, *kwargs): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info) + + +def _import_pkcs8(encoded, passphrase): + from Crypto.IO import PKCS8 + + k = PKCS8.unwrap(encoded, passphrase) + if k[0] != oid: + raise ValueError("No PKCS#8 encoded RSA key") + return _import_keyDER(k[1], passphrase) + + +def _import_keyDER(extern_key, passphrase): + """Import an RSA key (public or private half), encoded in DER form.""" + + decodings = (_import_pkcs1_private, + _import_pkcs1_public, + _import_subjectPublicKeyInfo, + _import_x509_cert, + _import_pkcs8) + + for decoding in decodings: + try: + return decoding(extern_key, passphrase) + except ValueError: + pass + + raise ValueError("RSA key format is not supported") + + +def _import_openssh_private_rsa(data, password): + + from ._openssh import (import_openssh_private_generic, + read_bytes, read_string, check_padding) + + ssh_name, decrypted = import_openssh_private_generic(data, password) + + if ssh_name != "ssh-rsa": + raise ValueError("This SSH key is not RSA") + + n, decrypted = read_bytes(decrypted) + e, decrypted = read_bytes(decrypted) + d, decrypted = read_bytes(decrypted) + iqmp, decrypted = read_bytes(decrypted) + p, decrypted = read_bytes(decrypted) + q, decrypted = read_bytes(decrypted) + + _, padded = read_string(decrypted) # Comment + check_padding(padded) + + build = [Integer.from_bytes(x) for x in (n, e, d, q, p, iqmp)] + return construct(build) + + +def import_key(extern_key, passphrase=None): + """Import an RSA key (public or private). + + Args: + extern_key (string or byte string): + The RSA key to import. + + The following formats are supported for an RSA **public key**: + + - X.509 certificate (binary or PEM format) + - X.509 ``subjectPublicKeyInfo`` DER SEQUENCE (binary or PEM + encoding) + - `PKCS#1`_ ``RSAPublicKey`` DER SEQUENCE (binary or PEM encoding) + - An OpenSSH line (e.g. the content of ``~/.ssh/id_ecdsa``, ASCII) + + The following formats are supported for an RSA **private key**: + + - PKCS#1 ``RSAPrivateKey`` DER SEQUENCE (binary or PEM encoding) + - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` + DER SEQUENCE (binary or PEM encoding) + - OpenSSH (text format, introduced in `OpenSSH 6.5`_) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (string or byte string): + For private keys only, the pass phrase that encrypts the key. + + Returns: An RSA key object (:class:`RsaKey`). + + Raises: + ValueError/IndexError/TypeError: + When the given key cannot be parsed (possibly because the pass + phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + .. _`OpenSSH 6.5`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf + """ + + from Crypto.IO import PEM + + extern_key = tobytes(extern_key) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if extern_key.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): + text_encoded = tostr(extern_key) + openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + result = _import_openssh_private_rsa(openssh_encoded, passphrase) + return result + + if extern_key.startswith(b'-----'): + # This is probably a PEM encoded key. + (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) + if enc_flag: + passphrase = None + return _import_keyDER(der, passphrase) + + if extern_key.startswith(b'ssh-rsa '): + # This is probably an OpenSSH key + keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) + keyparts = [] + while len(keystring) > 4: + length = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + length]) + keystring = keystring[4 + length:] + e = Integer.from_bytes(keyparts[1]) + n = Integer.from_bytes(keyparts[2]) + return construct([n, e]) + + if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: + # This is probably a DER encoded key + return _import_keyDER(extern_key, passphrase) + + raise ValueError("RSA key format is not supported") + + +# Backward compatibility +importKey = import_key + +#: `Object ID`_ for the RSA encryption algorithm. This OID often indicates +#: a generic RSA key, even when such key will be actually used for digital +#: signatures. +#: +#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.113549.1.1.1.html +oid = "1.2.840.113549.1.1.1" diff --git a/env/Lib/site-packages/Crypto/PublicKey/RSA.pyi b/env/Lib/site-packages/Crypto/PublicKey/RSA.pyi new file mode 100644 index 0000000..d436acf --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/RSA.pyi @@ -0,0 +1,51 @@ +from typing import Callable, Union, Tuple, Optional + +__all__ = ['generate', 'construct', 'import_key', + 'RsaKey', 'oid'] + +RNG = Callable[[int], bytes] + +class RsaKey(object): + def __init__(self, **kwargs: int) -> None: ... + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + @property + def d(self) -> int: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def u(self) -> int: ... + def size_in_bits(self) -> int: ... + def size_in_bytes(self) -> int: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... # legacy + def can_sign(self) -> bool:... # legacy + def public_key(self) -> RsaKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... + def __repr__(self) -> str: ... + def __str__(self) -> str: ... + def export_key(self, format: Optional[str]="PEM", passphrase: Optional[str]=None, pkcs: Optional[int]=1, + protection: Optional[str]=None, randfunc: Optional[RNG]=None) -> bytes: ... + + # Backward compatibility + exportKey = export_key + publickey = public_key + +def generate(bits: int, randfunc: Optional[RNG]=None, e: Optional[int]=65537) -> RsaKey: ... +def construct(rsa_components: Union[Tuple[int, int], # n, e + Tuple[int, int, int], # n, e, d + Tuple[int, int, int, int, int], # n, e, d, p, q + Tuple[int, int, int, int, int, int]], # n, e, d, p, q, crt_q + consistency_check: Optional[bool]=True) -> RsaKey: ... +def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> RsaKey: ... + +# Backward compatibility +importKey = import_key + +oid: str diff --git a/env/Lib/site-packages/Crypto/PublicKey/__init__.py b/env/Lib/site-packages/Crypto/PublicKey/__init__.py new file mode 100644 index 0000000..c9ff59b --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/__init__.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.asn1 import (DerSequence, DerInteger, DerBitString, + DerObjectId, DerNull) + + +def _expand_subject_public_key_info(encoded): + """Parse a SubjectPublicKeyInfo structure. + + It returns a triple with: + * OID (string) + * encoded public key (bytes) + * Algorithm parameters (bytes or None) + """ + + # + # SubjectPublicKeyInfo ::= SEQUENCE { + # algorithm AlgorithmIdentifier, + # subjectPublicKey BIT STRING + # } + # + # AlgorithmIdentifier ::= SEQUENCE { + # algorithm OBJECT IDENTIFIER, + # parameters ANY DEFINED BY algorithm OPTIONAL + # } + # + + spki = DerSequence().decode(encoded, nr_elements=2) + algo = DerSequence().decode(spki[0], nr_elements=(1,2)) + algo_oid = DerObjectId().decode(algo[0]) + spk = DerBitString().decode(spki[1]).value + + if len(algo) == 1: + algo_params = None + else: + try: + DerNull().decode(algo[1]) + algo_params = None + except: + algo_params = algo[1] + + return algo_oid.value, spk, algo_params + + +def _create_subject_public_key_info(algo_oid, secret_key, params=None): + + if params is None: + params = DerNull() + + spki = DerSequence([ + DerSequence([ + DerObjectId(algo_oid), + params]), + DerBitString(secret_key) + ]) + return spki.encode() + + +def _extract_subject_public_key_info(x509_certificate): + """Extract subjectPublicKeyInfo from a DER X.509 certificate.""" + + certificate = DerSequence().decode(x509_certificate, nr_elements=3) + tbs_certificate = DerSequence().decode(certificate[0], + nr_elements=range(6, 11)) + + index = 5 + try: + tbs_certificate[0] + 1 + # Version not present + version = 1 + except TypeError: + version = DerInteger(explicit=0).decode(tbs_certificate[0]).value + if version not in (2, 3): + raise ValueError("Incorrect X.509 certificate version") + index = 6 + + return tbs_certificate[index] diff --git a/env/Lib/site-packages/Crypto/PublicKey/__init__.pyi b/env/Lib/site-packages/Crypto/PublicKey/__init__.pyi new file mode 100644 index 0000000..e69de29 diff --git a/env/Lib/site-packages/Crypto/PublicKey/_ec_ws.pyd b/env/Lib/site-packages/Crypto/PublicKey/_ec_ws.pyd new file mode 100644 index 0000000000000000000000000000000000000000..7d2b3d6530e94cfc194ef3be752733a66f96c2c1 GIT binary patch literal 747520 zcmd>n34B!5_5UQ9Kv*YMMxwTLlvo3)HX5zqh|a(azJVD?*fG7$nBnnkDG>I}^ zM`?>yTWzVW{aL$cTZ;j$CIm>!0NAu3y zd)|Hb-FxoY?m72GE?VNqayT4L{BN2LM=h@W=aavO|C5v9aP&Q4Wnaf*JvN?Ln-Si4 z;`o_Y&GVGaopa^f%dYiYaoOzIbILuJPxs8NnC-c0wkI%Voafp()25%+r%%s(tLdL# z{p&Nf_Im#^`~O|@F28I$eqZyWeWlH~t}WeHx<%^uUG}Mze^k0tt{<1aAlDD1e&m8f zrO)B_$OZ40Zj)=@%k=d_`91ckD`xT;oxM>>(BYVNOE*X5pH3{barQY*^7P8=`%TAq zyy!7KrFeCJTyh1hM5%@|9gc2Nl)O3~?WG45<(ZCBpz53KOZ(rtr0&vQjwQe@xvZCC z8rxjb%W;s`%3hB7y$M+Vmi2NJ0es!)<@lyu*3mW8%Q09wYE_nHI0{mKx4GxE^6B%- zQ8s^^)n&dri6iBo$KfbFZSJ(o$}hw7KknghpiReqTqon2^3Mk{I!&WFCbREl7?T`a zN8_6E&*yN|omQq1^j=n&c*bdKk+T3|_uRxWI3C0G4i;3$9Z~8TJfXFd&V5|gh zMmxM=tp5M`pKsiRiT?5aNk-K>9z#8rUx~qps6B?dB!7NL#lz~55gUYDtraBQL( z%INNo-H>6Z2+Gz+j@}Sd4UwbkLRGJR7BW|bz0bRDIUNM#I>G0dF~fC2H6G@0gwz`2 z0J_tJCPP{4B5FhAXxz0foP|49p%LT2w9g$zR=pH&NEC-MoF?wor5eX-qbb72qM5pI z*7~p)9cf^FjeNnvM zbpppT5*wS7KBi4!wKjb8rtr~g^}7QI9k~1;`q8FzR7T*KgCRBYpe_$pwPsZOIjrzl zv>Q?~83#GxrJ=0)P*td^v_%Z#AHC@!U5I1xhojR(t%H`Y)L}<>H2C@Gnxc&Q(nYItf%gkyIT~ zyNkC*RDDPt3f1p+hSmG=1z;{ljp?eM3PdA%Nxskj1X-7X0W?DD#jwzD;mhIJxiXd! z^=zc@vq;t#GnN#u3#kvo-dE<$3xSMcBMutU=H_r#U8Hcsj3ptJIr2b$R)l@dsyDp# z>~AQ;cQ7=>cMxwOV^GzSQL){qSyz7Am{{-rLhuQC&*)KH7g9$>Vxb(vyCtmZT|ccO z!FZeJz7{eYLRG6UWUZ(>;QH|#(=@{zh2mYs`>|xOK10g+QACZ-iNvnUH4f|!$G({r zQX}@E1*5*@{IGXdNbQWozBxP+ow{#y%o&QP<`74}zV-ZY*4Bu(*{Is)C|+k|)rGw~ zTtBTJ8FLK@wRrN}dkxIFVbts*pYWA~HGIb5i@D#uSXjgIOUN1?y#%a* z^E1_d$)z$ooFV>8u5ZKJXA$oCpyC@weSPkX1NEl)K}F^V4MzRC+?KncnE)HJSWvUI zd`Q*X8RZ51b4=AI8Rb1|Wy-2n7c|6gg7|ICj*r2`wY(0`ENBQRyvpi=78g1esi||_ zg_*$5JZr{tIM=ykRvt|zhWY@cQM@j`7mZZcxvHCRW4t2U;ka$b;y}L7_jpFt(0@{b zxN6p*sxkY|_`xp3>wr{US3V}b88R^b38ZEGL=<5-x^VM+NE|S^qYgZp@yHMk&&KXPCEbh;QuA1h9?&d!FW*=@Ty0pP;ie~4bB7QMmQ%1&9HT3t>(dmk_<8$z4 z@t%DD42(q8$#dT9kV1A68iIZf4d?B?@eS#^f*;?Tm;$8|p>E`ambuD}G2C-pg)xnJGgNDj} z@*;|(x{98Sf&3!QPHR8YJqCqpM-(L>gzCSc6r-k|$B@OBeQuh`s=HIG`hfa$Rh5}h z@qILU25u|f%G=(@kW=d2RUcnV7Ob*|>j#(jwQ)@Mjtes!HD-Bsm6=uHOFZ$klqZ%C z#GO5?F87E{&cF;5wf3`dP4``BLieGyTk$T|ss@qGm<^wQ>ZMb;tYqGVo<)p@)Is%v zq29+LsmPB15`z#{jo`i{kOUE6Szg+5C%TUNedDzB-&wrQboYW1XQ&OP`;$o-Si?^F z*|f5U)4_G+x!5pIK8PA48qNnJdS~zoD#UR&=DVh@y!#50;>LlX>EBKTkvxoFh6;e_ zqS?1$#*7%@syeK`?Bn=bWzXX!n*I6|{=zx~9LKGV*-xQ_Pi@Su1;nXr%wAe2%L!mYdCW~Py!t}AQj6Vv% zOEGO8)BON$GaIYS%p3p8$-8u_2rQ|bV5oQW6l9k}=QW-<#%R z;VF?k)BSttR4~to26IjK68uti#_5D_)Nm}~(Z8qhchG~|av`BSMls;xV6IVIZ>+#8 z;~!cdfFH%lM;Wd_J%<1N37m>cz$6Ydc{n5>>8IHFO+kz){i7bRUlg z36}v*LBlf`*99@i#S)X|=<; z{{kzP|lO7$s5YZY0WmgP38TH>-HJ0zxFv34OZ+uL-YJcGg=RDJ@DRH`rhvG z<={uH$HvFuH`wOSb+Pk|y4LRTJMrVu40#~$0rMYneEDH{x^k zkM8lS0JOf7a({dhO5$Vj$13FgHJiw&jfGg3$;9rviN%4 zdUpJALB#}F@A+rzcgybp==JYfp8X!yV$HfIxYk^OhQ-;h;CK8wu)+93pjji(FJ7j9 z^uTyc2iW@dZDDExIoU^X0IHhI4_!BBPXPjGaRx5@$t!-&!DVgs5d6)M5l0tR zpb^Mm{1G~Uc&x9DT;y=f7?OR|Yo=K--?*Yr004w<1}|Cb7Y(89UvX(|FL00IwNj5lIvi??E2dR6bL=x*HVo&nCM zhf?EI4!sy9j@x#YJFB-=^p5Yx4Unhr;bOlqX)Kmq1GI*!lcy8AL1=D*5&KiV?^C?u zqIamEsGXWq&%(WUqEl_yIB*4EM|=$y9=aWi?DN+)Q+Qv5V*WJUFaLlvcz+ydM(nN~ zsDx+$Gc?`Lum(F9cqnInCQa8gu@cO(#E91LG3cy2#3J9xTqfICRlrm_K!Xy^l=UR#Z3FI4m z=xKZ?p5_DJ0to8sjgilZ8CmhCMU3B<-;A*`7Om!xO#wm6Sjwt3V&A&^R7_87=n`C7 zzX5HF0{|AdkF8cUKqr4?Zi`;2T%UR#?0aYFd#AOz_pwgyT?GQ>vy*Hix45o&o$G;s zs)v;wP#gS<_RTJzevN<8`xi`~T@g@Q)xP3&{@C!0V6P_E1O7!HTs3=oDC*1)_F9j2 zoazupJ|{Xpzjpr}A8% zZ>-3G@a6a#uHV+r>laYjxzO(YRZSUzXm&Qh*6t|#3`%@0c)TMJGd}h1yYV~hE1n)u z)LNYOvt27+>oKoquvb0qU58s5v+o?Eg?k{{=XQXum2dTPRsDnwMf+Tjf+xUE9no)< z>AK_REda(h=gmX&8O=uhs~)3Qvw^qrPFy?x=vBjh6CvpS!!FPgS1-@KyPu-+C7p z&TIjd;c{rwRjYHwq!G~vIw`IjlD!*95FKQLiGSwFTSr6gpWJT@{~jZM^W^Vb`CBc2 zN6X(5`8!tr-Y9=3%HPTO`)Ec3p3#uh)o4DVtDzjz9mf+4)r%#d#t-Dz24N!*!yE83 z(Hqh;r~p16n#U8gh5C@X7uI9~EzSaVx!aCBfo%Ih8)nBc>CqrT(pM!Q%bltn*xt zY!24E?y4pO4y)hg^Xh#`Jt>2x)?ki79&wI$k@i0rnEy~#zkE)80eF>b`8vOAWjx1U zAI}N89$D+pg0l%zzs~i@hENs^!t?kxxIGe^mY?G{*9Kx^%m7>r%Q4{|L;WeAVjzG= zbDBsM`Gu`!W2kCPrV-VZd?D|R`30^YpASh!Lh;cBRRZJiRWS8hdXf_Z(w1JdoUK}r zgI?^c$ieakja}TJLcP&>x(AAC)>U`{G5<`cDds5hf43)0@5a_q8E`g?0^V@^f4o>3 zVKpvy5Z<}TuMVyR`8d=nP+3p^widtIux+>56d2GH^fvfi!#@oaz7i;G3`B?buE?tz zo?|u+@127i$_qp0`ao=0_i%JvZm?!o`SE_&@=V_Ss@iO>AJDw*)j+TH!NLaKNmIVa zkA6y@+rFCow)9WS*O3=c{@!7=&i@3T6$DXrGyS>n!>R>2>UnixuHSpmP(RE=2b)zx zpm3F6H3g&o-T~M0enrCS{M+7E>-C#77k*Mc5Iuv)LA|rcH)ykeTig#pwAml^=Wcs7 zsG0-4w&F#D(F=R~RS&;fMIH)g9=n6S=lP?doHTa%4CEMGP8%_iW+-b_^r+&k0pNrV ze9-8Z@rnAKA*{JjR-J)o7^*(~_GfrAUd_>%0mTAtt#~Hx09vCzM+&TF!cl)-n>+RJ zh1H7_#(^)2K~@U;^fhd-7{zc(1Tix3gU!o9qdCR<7YBDm)V{?*Xz}>3esN?c%5xV7 zUs@b|5x?;xZ*lOI#le^L56|M@p2fl4`bWXy;A@M6uj(H~iz7SWn;X11_{QSk>x(0A zEsneyt$;l_ zqISQveRX4gZbZS&4#TPhF8dcikp3*A_OiLtXHUO!!8Jzl4s1CXwH4Fnl}*2P&b)AO zvr$_$XWHz`=FXWGE^abv@usrP70KEX?p24oGeh{kD|u$dhoJ#+Uw} zx42C@Xg#LG_N8YHX>S6mm2P#2Z}%tV%>^G`bE6nt-4!aXx4XaS{g?P!#dUZ`Jb`Zp z$tnG^I@-1sB65)1j>4DiWjp+kS{ErsN9+6WTfv4NMs7-Fh{iK*RL zT!+aP-WHBt3jPLiw20luJqRDXaaafyJz{=(8+=pY!^qdF|o#VQ@E>hSOiS|Ax&}#>*40CNH<{UUC>O9F%!?Eo8Wrnhv!`?NcqWz>Kphh{d zc>o7T$lDJdo#z}u@B4FqW*46ApW&LfBZq}E z4A)KT{obZ|#{>%NA$qZ2XY_)91ba!`GWrm%1q_|vwY0u9D+qq3f_dB(W6!w%hn|&R zpVF&R=~W=LS4+O?{$K#4JASoZ)%$zZmme!*>91KQdY9i7q;JR?Qp`H$Fh2Ps^i>nK zTd-3ITVsM?XBw((GRYTzpv;BU%p4NA5mpQF(zA0SHSfBrXVE1<;Bo*$`WH$GRz-;b zn5qi^s9v0ZHz16otNK+eGyxtVa0t5oNvH~_E1)S1hb=U(z=&4oKZGF-Rejcj+gp3t zTFkw7I2FJ?&*K6=us;L!Uu3ZUJzf8MU4N(4{}uHS^%0oJv{3zQ(o;%`x_@i3;Mbu`I#w-e{?eW$=Tmd6|Q2q@=EzMts@-VCh_AU&C zs(#Pe&^3A(y_z^!MMktIFiUWUVJw>AGx35cC`NQ7xYKAD3nNO>i=j%5E=BQVX^f5b z0uvsYhhogch|=_84~nsPM$AkvE8M2}H9pOvBdSI-XjY7R%o z4mQ*+Qp1;s4)bBS-XwECTieOFJn=(4$r~OM1EBRrEV_tl469M-@pD9E>zFo78B8x> zEAjr=jcgs;70j11GaRi2UBUdYJLf@L9APyF19Sc4h`I^aYyWR(@p8Y=;wW59ckujn zw0NAZ&yxC+QU7IV@gKW8qQy&l{r^sj*COWa%h94b)Kyyi0laKoqs1$;(`XT^@%qUY zUxWhif4~>FpC`2VG%lul#kuWh@eW;oz0}`}`Y%I^17Gfl7Pnz0z)gH$ImZ8=d~pw` z;LFjXxxcHlxaM5!CnW>WCnR!Hu3uki3zCn!W2(P=jIXG$WudiuMRSl5VG&P@%PbBo@Kn zIl0v58K^*`#jhHK2&4GzuR7$Mdr1&J%t^caRlA|oZ=FH?Iisr1iA&EJuJ@X7g%_%- zo;v+b{J`{7t-_D@nozLCkA1;S0JcHPUuA3g%`-x-o0|RJ`gx}qge#<`(fN&OY02f^4F|-f(~044!cZu-|ury$H43D)DR*aJ~>Ou5Dl?ULmt!priKbk z_lp`Tf`3*+gH3mnhJ2>`DGeE>`!NlbnC^!)G}&}769`u4-2%W=;n>nlv@94q&OwY6 z+uoG@eHi@W2RtNXwF@sG6S9?n$_y5-3#$ur!U|SrMp%u7!nmuvFoKx2?_h4rPxQkh zJH&8amnZRqCcKOYAiNkcd}DL`Uc))}Z3A00m~7blTg%VHBg((W`vwKomLTstF(sZ( zW%$iiMl{o4lqaYt=q+(_S8@_G)lu*>Y%MQl1E;ZpfpYhN)cqScCj2HG-zoQ_11&Va z#dAx~hJXq4+xB$m@$ikaqKV;+K(y8#z07EZaN#{nKxImVf$XKM^k9V422u14g<1RV znc&sfHw??ZNkulC%dIzrRCoF-w6~<#q(N8^iOtRlMY}=WDL(;jkn-cyXlHB^DCO;9 zwh+Vv>nbKZ0dWB{DgrLQisfoFD5zFLprF49guRpp0l)(iJr@r~&?F<{QkctH6n9Ltjes&U9o&g+h&z0UE2Z#pr40q+QmUnn*klLsMKhDD5? zS}vaR@K|`3*;iPs^m(f>us`eA>Inh0nXUHMQxb|gB&ZZq8t{(RO=3FHfhIBa=}Q8sRZ<6M9WG$cNOZ(`?}o(6{e-jr+9&iLmWt4_b^ zrbEWck{MfmecjTh78^r?4tPJXIEM5G`zqc#0G?#us<{u`Qg-fH#*hngT!Ag9T=C0c zSJ%}4-6%GOL~@9?ZUEfRmv#U1`57xp)&MWp6<80vl`lVJK5$C?x4tljOvp{P_nnWX z-aB{i-N4JU+AFy5t@o~+<{OJc6I0R3%~o=rMF#z zTZ#g%z$(6Qq{vvgvBl|pyxSc$(ucr+I-}NE=l#X}s|OAN=3u`o&_v7$gQca{F*u8E z_T_=7(8eCrEL!MWi{VFYV8HH(nh1wm9hjo4`u8ZMG%nsM!Z?PXBD6}wYIspl?etf@ z25Gzt0(r@Ye%Bmlb8J{9f!9RK!gr?>tdjb=q(I&_cVlM)dCR<0!s_B8g!(*( z2RZ_U>-}oWNaZ|gWU=#DOCTEvKd!*~0Ij0it=U>A*Q|C`w_xWsqWVVQk`c$QdfreQ zL+~O-ut!g~gu%Fi`Q&i{e4j6!Imh3gGptiwb?mF?H9kx8ujO)&H)@1 zV`afV>OwOrnui)gx?AKnfupAB?N5Jr?1rZwdK$!#ptr%Bwl8~i{%f8zdMkOf>a>Y( z{aFFernlLVfmhscl=*{=gRYSg$S@{z1 zYV*FaL#p4;yadCwNTwLs5*&xv<6#AbV(jtB+nsdI4Fzt z59t#|vqu)|6&*~N&++dtUp0;8=cP^6b0PK2k(cUaLu9I6&Y|$R5lb1~z-4$_5yA){ zaDZcpTsIoE^B8r|J1#1uzM7OlzXB$l^w1#4A>wKe6oTPSvk5WGjl6>lAgpFjmS$%_ z7>P$`3IOfJE~I>-7yBS6PJlH8XdDkF(r`GjM?s0WLY$#xbT)HNjH-jFp6q-HN9)rE z7t`GxU4#Lh>dAibyJ8@(!NqjHHHfc(h-Uh*)t*rO>&{5l8wS!xLa~0>8ef`U$&SaI z2rsIZ7!mqpQ(W6Ko}tAYPIGNP8_zw!w+;HBpMg2*MrbihcT#prV&~+y(5=4 zswblwE^YXb6TWNgOGQdi+}5RyVx%ht+q$$-T!!MdE^QRgM{!%1Hl{A3YUH(5&~j<- zhD*CH936&DtW_ahDLsN$I|N@f*gs|i4r@c{L9)caG>8E=4Pu(k*b+X8-qv>_9YMDcNy!+K}C!2MJmI92dl|7qlb0>AHTj)R&?D z@MK3yISkpox2_|yyBA}P8BLHK8zI?M;+nQ8^7SXXUt!|AMs}-T?JC*b*3wn7TZU)` z2t8OQvnNLwWwrqPOIt|*Y=7XnfVCGeCjmVRjt3zFmNx=r36?i2$?{|}(^=l>1wwnr z;exoj?~wLBPi1*6r%3%)Tuk>zCtI`^K`JY@vFqP(hO^!!%QIsA)__v552z!JLRrs+ zveu9Z3g5CAAQo=e`&MFaZ+c^UN_;Nv`T)&<5)o7*46t)b)EMA!2?p54RT!C!hT1Yf z+*OL=whR!(Gf~`@0iw7J#cdfNiXrOSF+hmSu*Ks#WPpg&mN~Gt^|)Er83Uvq-0!_~ zN@QFBc_kjBH}`0j3j;i7cRD3@j_G(Sof138JO->%;tlQ}Kqo<8Bc`oZp)4)s!;pV4 zXsdMqm1g5`{l)Z7tgv!X=$S>J4#GUB`3HyuY6fD^Ga_mPHmye^GHGHi%>&HJeX@f~ za{z7)u?~_=gEje~14!nO7z;%-r}6%XbB^BzH3T-UI%vAzJBbNqt)oJ!Tj~>dr$Wqt zJV+jZ2r&ao2EGOcN1~tYT0K*g8yM7?@UI}+vX0nLaGK@X*K}9mS@1~1!LmR@aHP!9 z5F9DfHRLhf7i$QvxF2W;PMr%hG}v?x)sWA07i$QvxPclfG2JIc3k1hZZve7s zP>GI&lneg96FEin3GUMnI`XE5(2*B4gpO?05IWMNA#~&^4WT2CX$T#8SVQQ@G7X_4 zKi3dCa=SqA3046JBYuv41=VPK9a>7>v;8Eg&Wfayg!%P4)|!a`SYO#c^?=PVpfEs} z^pp9!(I7YsBZ2Ak2-3gB)CKxZ!udud=IL$F`4TUDIoIeTL)?no7qN9wzECTog4Hf_#t3eOIaT?46 zI9`Lf0Fi~q+j0O-loS#=$|~{r0fq@r{N9X;5B#+`>~hRKos$^ra~>|BuM@B|(8HK> zzu{fux|P8}Mq!*8gOiP{mqLZx@mh$Zj1N8vQP# z3BEW7zxpm7e#lT4Fv_%G@$gS9(6e~>r@An2@$kg(a(nw z_{Gf*Oo-`T|81_9XH0iJ06Mh8=oKP3f4oW}ILqMEJWN16#=0U4i3d6!st&P4LK6%5 zimFFg^*}g%`CPvWIfJSR;3eEN2_WF#j)TI60z`EO5l{hyK)niIOa~!Q0q?qbNBavA zL*-~2|AY+?bfT`ouRSk-l535Sq%@RjZ(~JI9dCyqRrEh&Whpx?VPD{Y@WecpUnwOw z=1-1|{EeXoGlHgs?9q^Tc+>r>Z;|Qk(8~Z09yv7NbO36M?ZUl*C{d;YxvKjUC58xL zQhCobwDE$*IDS|4K8OgQkp^7VuTfqkT01obZJkET6SODO(7L6dJ)qG%f_7IL+AH5= z-}FOo&}i5w!b4}Lq1C3Kjn`;Jf+nF;iN5_L1?^OgHdxR+X{}wIf`(ioL?zGii3xiM zODFlJ!-1AyPcL)*$F49$3*3^1cf8;^^JV0g%j%m|B1+;P((pb!p6_b8((jP=Zh%&a zXvjtQF>6dBta3dNLrBcvOw2$PPD{uu-XBu+YLldfYz9pP)CEqposoUlN(|k22IEay zSG9+mfNB17lD9Y=Te%mQi0{L0-<)Rembtq@;MfXm6XUne?`@uUw1n3;8@;ei!vH@x z77^gLu6!(&1S$){OOd}wl7$6`ycWbT$2Zim%OmJ5$L@oggqhk}{_P>xJFh#I9mDR5 zxi+y^FgC|~J^cAd4#NJ&airx@kejs{g53Ozh9Eb8ry!nC>?;RARb!X=pOy^8iVH47YT!4~(@$NS z{PLIrSOoE@8;DPSQgM82IHGwa236{*-&iA&mVa8p7B=s3DB~FExa*|A~e$_P1&X zW4};C82fn|!q{J>A&mW{8p7C55Qy^+0NY95z#f%7?zI>+ZYyDc+6*>+O1w1Gq||un zvQ+kcUw$QTkN!#gOa^v(#OzXo!NRR*XBPJNxs^H51RUnfRhKp0%7P z78Acxc-B1Z(SlK4l!kUE(6Hwytiz|#5aR;0;xx3GDQL%PG%Q!3^-e?cr=W>`7Mtf0 zv^Ow7Nk(~G3fczLgk$px1nsFbv=5I-_U(5Xtw_-BPD6Vp1#O|A%^ED+xhxIsK|!;) zDb+M^XK-=Y=(Hn}om(h)7I!`sRYq)0`XWD4E{;3c>-;+D|f4JE>z4`Mcx?VT#^ zY3BSgYVfjP;uyLu_>scIvjtDj#B$Pu>J|o5q=909eI`~6%pIYCKhhAEMHwK?3ug#KUU-Srk{6DZGVsEX05G9^Qinxx zHh=_E93-V=iXIK22ghg#J?O0=^dM71=)u8$(r5HwuZFNNU)K;8<_j9a!fe)1iRoUe zAuP-%0a;9uD+DYM%F;C21@=O+7FHsi5NimF6>G5lV&!@rH-Q_#a)6kx_+6eZr1r_y ze~YlnyGRpcmA4B3UB3!Ih#SA+LLaOyKq7tJJOqN{cPyBNXZfI#V$T;RI>9C<11^i7 zjEzET$M-m%M+WT{OoA<>qT6)DjonI6h3S6jXc{=X$#A<%EI{iF8_9Inq0&%sRF=O6 zCj+SGIr?73vjP7+I#{Gl8Sqq3eC!I4quLz7Ib>=({M+$*geIvSX!&Rq!+Me8&GCn& z*ie}8G4cS~%s~xdYP}l5)Sj*(OzkNe!qk3CLzvp5HH4|{p&?AI`3-h>G^X}r4Pk0q zHH4|%BM@xn9RNh*-z+5?G5-K))pJl(L(4x?HusZ*-QoOFBRSJPA4aDeFOXJ8;a%ov z2=6joAdvPY1gfmVAkd;t#o!q}LL%_6zaPP7h=O6G31nlgMIfsIa9g$d2>^_1tV3sI z?LSNu#0flvf{^+)w6uyINW9Hz8G;J#qu5C&5$>BLH)dn}8H75h8=Mm3J}wu5Ch@EH$N>&L4Jf zP0NQia*$J95>^0bLM@>JC7qKL z$@*C2-NsPt%(ZZ|eG$L!FN`P)`fyYcR3EtfKmuVrId!|XFKSAF)bVyLz2NqCZ6bHc zh>nKb?O?lhH2mfrY}by)`srZ1c60%X+i%y7E{dqTDm9tRrvps(I1)KX69=d5)-tDn zzFCYYs0Q#S##01F4BeE2rY zk>wd-1SlP_5j97s2>P4nD<-6w9TC!j-a;t5ae;n`bgVXn^oXv%N9tFy{>X$h?oVF_ zLXxeu!xNH>XeuEMe738EGzJ@uT_Ys-GhHR5LReBfWyKd#*Mo*{MiSde;>BZDhA<x_DYE}lfwe&9R^WZ)j`mI38tc+PH>673_*j`QLMt|!mMD$BsaIPWz=xtImuGaPA zq`m_65%n=nbGR1E)IH^VvtRgf$5_2A&cOT$5f?JdZE*uHo@dME;F9$s&w`g~-p z-+t@$P{r2^tC#S4mcFaD5UVkwoGCBYTVAeyl(%<3ygl;r>v0}1-VX=WjgC6;eoZ)U z!E9{vdJ%6!^7ZC1AwR5k^7X<;zvTCRG52SwueVktx+wgf_?yifbdfar1?Am%tbqM) zVm9&J{;RWhD_^MJ3qiF125;04Z$$YS!pY-MaDCj#uS3@(*eItf{d*BL1_ZjU{5#z9 zr|1vG`oZ-+J|8LtPJ0sX`>5RbH6UsR=|@z5G5NQO{3ZR0yW@{C8l#+QjS%lo@l%2! zuab0|tvZcpSBkv^{WGJJe4;I=OUozPfQ^Km%qhZ_S?iXLbBYG&oT7xjZ6JQRgM1?O z5{|`d&3*!IS*UaC;s4L10l{7MUuX#C{SpnqyuU?59{B$?1Xsme4Z&40Q$ui7Ox2Lj zbdT2%ToofV1XqPmLvU4mUm$Ewo(ceGLS7cT!PQdD>A(#1WppHu*{d=pYen1!eWi{LG^d6z z3!m`hTJ>#x&KdZ#kEe=YBXU={7cY&;*(nI4K&+cJ1QK1NA>96ihCr-;6$r!ekN~iv ze=Ptf10Jl79iGAo^f|#LJeY}_LOW$rJxCW3PiKDs;>haD5=?6uT}#s#J)}=DGok~* zPjT#IFstR7Y|CLKJI>;@8pvKmag_H#`f%Xsx*)T~2Q<2;DlwR_6&Q&@5@9693(vtC zgd5UTb2gAd>g-5t4os{UMFgFIA`wByunxWmhWT1PEW_LYAC`fWXW_%b&XKoaUQfxr zYuSrjm}HXDyMxwKt$vJ5ep~)kG)Gq=NT~`(1sJ0H>xxEkqRSM0l?n z;(r4F1iS4a53;;hFX5$Z@6~suIeGI_;fxTUt?BM2rSM*T4o{a*#zzDigS-E())SQH_GZ4OH-try%p)?gw%dz-Og5Ez(^{-5X|4QP* zYVbJVg8u>VtCtY8QqzQS@W(F6*XQ-%1pO>n{igeB_AGiqE+;Q1HaW9p8Sp4>7Ne^X zhO^-uuBP~F$URkk#ljy2-C6*c;M&3;hFe1F{MZk=$3}wq`^(emsIKT8iO$Jmt0RH( zR27<(kaIZ?5T{;PX>-R&Uz<@tbboAWX3KHXi{I(+wV3<$A>H^MOS~_;s{V+_Vs3EH zsF<@38OZo_!K#(HXnV#Io3&;y!B5$wem8>I#IfekX2#@Jnm=2kF%QqeX#A|*XbeLClB02^{K9A)O&~EELh}w3 z!6=fq`UpoHN21q@7^uMpfkReG%b9Hl?M2{O%QHbba7#L#B3n)j+Hzo}4ccAOJSc7I zrk35nOc}J>&yEzEQbm1YZUlsY%;i|P}qNDnakaulvWZXg} zTA`bKfNOaWE2(KEI_B8+Dw4qU`JTOA@E0DmZjPw)t#f%1^63X@2G#Zb4!n<3F=HW9 z#pN_!v&o;e_Q1z<!7YFp>OkQ|eviciYP4Pu>$_F@HBC%^T1BD-Fee5?^2fT01 zeE^%rG8fjYk^mQ@2`0D*0it}F{MMUyu%MY}A>^jEG0+Y=6r!BD36&CA?K5K4`FR)> z)(qwk_KQCd*&^Fem*itV7Q&xP8r~E=rXgAV2&Typ!gtlUg$E!>v#<2wcD-~E`grTT zpu|P1xwiCHbLR&Nw_T40yQQ)>ew0{`MN(3o--N5R9(PEQtVbd8uVg*0e()<^kMr0d zW~dgurGD}#dyVzj-!BU?lw6R+sA%DW%sSbHfFOj(L2ba^TA zUH<2nV);UQ8GaQD(D6nW7NDNSuWkXt;5z&Q+~Pmd0z5$$AS2slkznQZ`MIn-uA+`t zUfcCnz+OQ9y`NZb*gzHr^AN7qdgH5G@~@8ISF+y5KkyZ=w=6b@^>z=s$n~~_UF~qa z5#wK7Z)g51>uofLH`eC_To7Nm@2gyIe3$>U^|oYB7uK7e#;+4Mx z3D(>FKb7^yRn*CP!!h~HOeul~8sV*$wFR5=w|lv!#M{FBjMy%?SXiLMflV=rnbI@o&03Jxri4#^g(&7|zGn9q2 z812eSu{WWFS7hS|omR0pVOLSSd$0@iGK@}IoH@&AGeD#IuXe`5#&`J^wgc>W`2}NP zIe~<+AjZ{iQG_#crT2FOrj1Y9X-aj#Z z-q5%NW0l_4xG^gDYkH687ogM$g01TmLI-^!|X+ zTas?6-ywN%_VC>!zc75)6G$JvX(+OXZ!+L64&O7Kj^6-zSm)zM`!z9s`&;?Ml<|8G z1s#vy2DD51ZU>~;nkLT`C6Zs{#G%fNz>6U$r~W?0@|03e`{xpsdkQK=B4 zx=7&QGJqulPS}TIBBH^2=y!%KAB-E($iZ-QN~IB9xETM#L3bm~lXQ~tdmj#W&2cv16sAlT_g1J0pJUQ*B3!KZve z*y*Tfh4UD;!qPenN@;5j5(!JO#2D@O6vo66n~3kCIAN1wTpp5$ua962pjIkck@Sm- z#Zs{=rDChDSRxgxP~nB%{9lUy%Unm*u#dPbkxOiuzC5Zg zb^6k*FT3@HQrvY^jLI`E6vMdPqc27J;?tKBeJRzKGJUDkmnFDp`yH<(^44^JL;4>$ z%Ofvv0G3#?ZvUHn>FTqV;ptM%Pjw}$pSP;n52>b81BbX9RcmwnRbS-td#?C!AXtgL zWj5(<>ZEH1M5-cI3BXE!*nJHd{h>oh(vrXQAU(upR+Dv0O=o`a#9xa&Fqy(#~; zdY(=84;R*f|61{la(11>k0Q(bA-q-?><5uu8+hAyd4Aq0`ZiCbupwnXwZr?h5fQAk zo_Z>uTA*(~5l>Bx2khwng;8P4`f;6!wC)Hf1~PJe(s3NCanHpkdwqFc)$xVtnlMNZ=&e*#5chl!@SYv=C z&Om!-cVm42Qq#mH7)~=(qY$09np9>_NeA8ttfHv|)#hus~l&{LTa)Y_ih!gusSQw7RpnNOFl^en0 z4esEn{7p#5^|+3TG||g;J>E2+f;&{jvua_pohS`XtH;3;Lnx!peAW8Jh`@N_k{){Ij2N ztR~dz-%H}}$nS;A{7>=C4OMYZ+x?D97;gy2B~$=JOjFzxG2t6mL`H0s&7b&;Quw-= zdEAVXB9;g`NLSEtzJWwaf`A#sj#2Uf2ck<7*1*64z^x}?me@y%<0vA?VS4eH$T{XX@FTqp?TC+t6NPaV4(sZ}%VaFqj5$~^|C8qm1K+GdRMR+KD_G_f(_MyvA zg#B;p7fIH1|AD_^H)GM@tKysKxkk(>_nc`4@-PSx6C}>83;^}P&;EV0q7`(&%l;Jr8I3AECe?m`*SX zTL(hM3$Zt>AGS2#A-%$L;_j5v9IJGG+(fLEjhic_x(D$$Q!4T-^s;z!N-5`<9w_!L z{x~WmaH2+dg+Am%Htow%K&MP4&}8%e3SlFNf|L|M`F_M{D3X0zers5OXVJKXWS@l+ z1`f%|Tl(`6GSV0wWKX4?y!G%N@wNO67tura z=g>UQ6`>~woQK*7h*%^qCF1Q6->p|;D&Z7hM7FS}Ho=m$_X20c;c=>$0q3N{<1Bkn z#G1`$>L03T~ZEhl7aJF&+Ba(BVn1 z2p#sq#dK%C)Q%2ccvztj*D20`-;($cvHBwt7(!g< z3m?1&zY@VJRCX?AHR*3GNlWMU0DNK2iQ!>RtoYDc^eZc z5--o!DH7?Qm|BcE|C$m0m!o`_- zUXQ?=*5a;U#_k6u&Qt=wy$XcIQ&B~E2LXSN7W5!4hm)lxoWyD`&*WOLNO&zmTSU)& zUDg3w!=X2tjf05zcKAvL4-t#?!zx-T>JXVd7A;ofHTk9Div9rczxY1j8z|)W0g+>I zj{ZL2pl~A5LcR|;sNMGgSC5G~Pme^MXJE{7t!#^k7YEbylM;F*B^}{?rXz%^w&VMN zd-#38$MSdMVMw-MC<$!`j(}Ev!6)qW3(1Y%6!$G-Z(&=sW^>^qPz&ijTS!ktP_ z(J34+s$qv2ta~9Xxet3D?-yQiW{LznToXKyOnA>kG$E5V3v2;xxvC$xSq8sMr&-3y zHp^fnlJE|On~V7{lt^7;8MB@fmN6a|(>)qlfo)jE0A1f(>Q6(x%`$|Z+gcDza46Ac zgjf!n!`?3v+~U!4atmZiC(H%f3KxO4zGmEl2iJW$F**IdB%?s$jAj&Y)I%~!WBeRl zBH(y&%wiNr#2ih@N=9)+%xSbidI+iU@I3N{Gw4rC$V>Rtbg%SB@)GhgL9-8Fu?x+v z<1!=7;`2S4M?8rV;SuY|2ZXz5)t@|~29;m~II@tQBK!NH*i8tj-uQm#x-rU$?}sv1 z=^Q316+-}7sY>7n_4x$G-l|3+930WS)bCa1$=tZb%n9bL5cl>Lx9|^zj6J7_f+C-$x^z`65!*aIx<{Z z!l?4(Zc(6lw3_AwO!pQB@Se>Hp0qgu^@=WXg3GoDCm4x~=?-je#|e(r^_fzik9wOE z2yq>e6Byo)6P)1p2wo*8*qFu%NM2d-e>&ptbe?atba@dZHRs7Cy$TeevBmN@u)9RhUNOXQ7|BFke{v@I^n@E6moC3DxlQ40mQN72CK^+M0VeI2^_n$x zLv)xdIUG35Z&_69S*Lo{UXj^}P#zgxoD_=T#i%kLM%FiuqUd~7p+U*0(#+7Ks_h+n zJd@u;2R}>u9{L&W>>ZBhiC^V-k}=41>s&hOIq?#VN@6g3Qsql%dkKB9Gi~oN(4r_d zWO>33{~LPA;hEsNCSihq!v*K-LAbPGf{S$h)lz>u>JQ5VZ3{&*)5!$GDSAoYneFuw z@IMm7*Ny*u8DBD4U&Eai>$9{Cm~L7RpjSGL^|e%5tnY}JkH9sOs+V-$<{B_bdQ~RV zwRaIobpjWE2^O`8Iv$^6^(VRbExn`%dY+2chNKe?zdd(YBuz6~NE#>f=yksl06*uh zH5r_^@Rsf62-4+kyE!I{9fN}ofg3vS<3! zo?%y#HVqA$p!cJ>OKo;)uW6f?B40=6&x&~yzYya=g@%qGc}==-guQJ#{A+>-3%}rv z`b}pnNzvE-UEXg2<}vwxVBag{{ZdFKp)j{21&d_1gWN=}hzib+?X(dU7T#ZgCqpRC zH4x;*sf5-ULC)8$CkVGq>j{D^0Un)qF#2ZEd_-u_WQVKzBYV19Wx7ULzBf=Mymi{U zCE4)nQejOq=0J54^F(@sA9_xy2f3Dush zB*DU8*J!H%bfi8g1C6<)`%AnG7XlCgByo*&aEhN?Hk6o_E6Vq)pi}`KA;6P`Xex90 za2bW=ursahL8hc0E;M14$ei-oOFe96U2YQ=?DsAlXBjQ44>4)r*-p=S6y}j|Y(Ay?l z@cjqa&27A}_v`vwr2Zk)WAvth=Bxk?AvQZO$09oJRKmpbu1U~c(WM>H-3u7?uP@yt z3_X6O!m{$(Q{J1Hb9Hi~~Zq`4A^2Xx=|Nj&1C~ttS?=AJGp+1%J;HkDaUBaYFa6Fu% zo1nbqm$dgQY>$^h{ri%XXCI_yAFNjYoqb2Zyy!+?76^GB0kaG|k$Y5=JKE1iDo-a$&kE405a`E3pyLtN z$1o#?tO;MF>r3;+&u#I$(FX1(K)%==NVXg{6hE|9EfTMd9NxH6sB<|kru)%aJR$uX zYc;xlw$v{{eF}9_>XJIYi(LV|b&Z356C^p{B2AL%n^7AvA&_l2<6(`a%;zbBMTRF2 z>9gc-#F04HuOo7d%e=~I1@(QT1I)4EN^x6Itp3w&^_@~nRDey2^{*=+t-u-b@ zU-3GV?v%c8r{J`^;kgO-)LOWwq@?}!u^;ZKrQ()KQA3}lC+hegQ$5gbVbKs6M<*KT z2mta5vimBEC{MW*#bAfM03Ng#Kpv`ih97&RPu;Z_z!#~C=rfPY0{90m_`b$JxB$|k zJF9j5wNig4>M^U=h17lnCtNaK8o`}V0%Q$nrx%8Ff7p94u?~)()Rr}a3B}id7`m_y z%BhDQb{%l7U~zY{7!V)!+w$%j8 zIlv@S##g%2w`|*DuDK4FWXeax+;xMsBDDBt&@@jQ8Aswm41O-jSovvy45=)n#Rox85JDD8-t z7nfUorGVb@Mcz`4PG&9|5?c(DspZecGsLTL3$ThTR^)g(NnFWcQ&eVJY zd=i`?b~*MJ4E2dbypbW)I^PM&rS}Pu(`WHPL?Ok+P)H`2nG=XjM5c*OBq%|KfV4Fq zfMhC61^`|X=DwBzhGTv8S@I367*3^A4rX$}=VDPXo!}mMcs)}xZemu&0%Qd^)g~UN zbGaP!K0*$9-zu4n{C-^WeD`L6iSKhY;p20!&v140@j1V@Y2GmbIcpyGw>@q?pkBr& z=o(tHg6ajX?>uyjS|Icf8Uu79O70~F8{)$5+YIU3$n?Hd0_A_8f7{W&Rl%BVuIfg# z4@p;kDo@oc;BXC4n}&?S;hB-bLstGJzQkhIkBT~75q?i#l$>{WyX0fQkA-s^aOY{f zbD-KjWSsN5lfs4Xi$luxm)iMqsox{GJ)fBy5?nmsyBrVj!{Y=%QtD9vhTt&TTXFz| z3-@UER>Hr<_7Em!eLrFz`qT1Cp$sL?G(^sdrjz(yY>C+kAv$?P6HWZNlnOh4WM=$n zl+s6lPYyxz{R{YN7-YLd#s_`69|g4BNM)iM-_8PRH&p<%Zu|p4q8fbL23mnFy75#Z z$p$gF%s9}wUK~0#2K%>qm-qyK?Oq__ps9x*5r+K}Tuit6GoQ_{d9+o2oWJ%xjdZa_ z3JB5+L0T$n&VYeizsG50K}~RuJiPB3C4nwk!h z`VDtaQk_mM#yEdTf0k|9VWjm}lB2_X*6uMGnWZ`F5#tX$0X7IBPkcU=0UhFDsSVpK zw%=B5_|=sb+fPI*cZNA_w#D|3i1{NZ@S<}6jD&&7WhRqQ>5o}JuCqV@kZTzLO|A@D z5OO`r2KLzG%Ht+GAy*%F+dzp(SW1%Xp+5<^zKRRJulq1NA(0Ph+PB-Qz#Y>3%|S~Mz@A5YbMUiiK-1>n;n3F10-81l4~O=0ndYnXy_TPw!a~cx z2f|DSBH0@_(xK#rpQV}Pq3h(%23rZIaOU#!Q|P_?3~=P~Q$e|HmP{fdfUZA@Y-*fq zHOQoK^bIVY-%l0hf1n!|4c5xvxeUbWV^qJamG9YrMCj7(fx}k5)LPm22U#o2ae@Ei zzdBwkl^W?PK~jR$^|d1YA1)ZuIe3&|q%-wTjSg2!Q~Uvp^4Dy&q%ie_v$#D|?|8lZ z)*@ZFjzA+V4}$f8v=Zi?&9^te-cOAadh)VV(rW!-SMHfVGwq(CUAd`$lvPTT?9MAz1Vt&~%eK_E=Vz9hHP zv1UB+KK86zBie%Xo%G^kXqi9Ysuod8CUG0rzc2~rVG5H-59243nBJaAh*@*6jq^dLH^rsIkOgix*|AM<9P*dir_7^%#On-g0>2ZbbIlQ2JmLy zc&b?48t8;kEC8`28HLT6uHiwtP=B=iPIHm=gf{jcBVpUS`7Ms2r%WU!&nN)7r0Rg> z*I>tK5oQr9#F9U@$G!cI)7?`bJ=2{3+Be@Ga|*>V{Y*z}bKX^)70g1aNKA?U63x`^ zQbHsqf_)!@K-a+|AXa)2Xb;Hx7h5Mm`8T0j#3Pg6H02KmB$S_P125UCBzL&bd8WPF zn*w?m=>|(rd;j|bLhkqBV!D663{Oel8JMN($4mWntOu!S>$=6Le*^Z;doXcAU`=Z< z3q1O4qg=C|v;6}`mUv@8@AVXnp!X)mSI81nn-dPy7yO`m#Ih%2W-)5*i*ZfZrd#W+y`KqiQNm|oh)AWehj))z{%OL+9WR`C0Xn&HllqL!Pc2PQ!+n%TP znDZesQ;EED%sXaq7A*S}GOjIscMvpYWneuE8B2c*k9!c$!vi2q;q3PkNYJ;EVsLgE zPny0-#6sWSvVpg4`euxzrf={9ZP|+*4bHAdkeg|w^CVl}@N1#-$8f>-)$ixCE#!2z zeuKuFCV009p5=gOXW#ph_Pu>BvS#m{aQ1`eb;8+yEj+|N=0Qu5|5YMSwc$Nq9g%mE z1x!$dY_0? ztkZQW*6G*^L-gXR)Lg6~fnLqX#qyhLkeD?PFU&;E3<(5pK%%CUB%&5GD5H*kIpB@Y z{i(gqk}8+YsS^oVL7W1k8sop>087}nj{LMTYW4n1LP&AA#cp#=m$!}BscY@zIOUDCtmmV;+H>~y~^;bP|Z3b=#25z7pIE+GP0|ynBe#*3Q zgZe5ZZO0A6hWs_HsoKMrU<`>owr8wiyAtEjnjYXS*_(9lx7p=qwxNvOHlZd2KHUi- z62^Cn03f1;05q$k>ql4}cQXZe!)A5VZ@(m~+x-h+b?b03-3?H_6K|Bx>XvA{1%mfW z!E49r;KuYprjI473&V3M>=iCedNC=%!RkWZy{Y=^H_qvR%i+Vx@fDzsR8h{Rz7AZj zlgzXua=8_kftvLGs1*TVYol#5+Fuz9OuavP1k4971Sa)XD=0R}<=VFS1o)wdUz)3D z(r``KthUbE#w%pS>Sm(bawBcl`A}8M&t(&5vx)Yupmxi`as@qE%E^}our`=xDN64) zFazNiIPI4;c*$mCPFlPtSu7(HtTybW z#VaLNPy@I*Eneg3y-c}T!qD5@=!RIlHviD8$BwrJqh-kL8P<5wIpC^JixKbi1@E<9 z5Z0T1!GGiT8(XUBp~MzD;uL=hZ@`_Zi!hs#Ta<{>?kt-7VGv}Ja64eAmPYwwi*WUL zmVhMqh>LLR;pujyZePBPEFXPte@X-F5~c`yfXN_2Ojl(Vnjcb*k0MmRgf|k>d=`Mr z+;+%#OZwZCxeRsk^+u=wbk5+M8|xWAH zrL6z&(3mQJoMX_@bsrctWW7a^5)1Y(G0TYevxIxjdzvQ3`?uxU4u3-&jgh`&js)Q`+tW@7qx=11Nx^jVAxzAt=R zy99|$UEiu$-v{+!d{WW+&OBVmX&E{J^b@{5$}F9plX{riPYN}qrXQx}oyZqVQmVdJ zJ4o6}bnC10$s`HAi<0VY{@253I&mnc7Q+|zCBtaipOh4eIrmzdC01Hkh;mm;am(KW z7e#wsITzZhCB+Yi<{bqzEyWLqHXbZh6wB4N`uqd{+Eg&q0&2gVZOQ47(A!&xCsvPb z(&3x(>3f)i(o-w3hnb#OiG1HLZPL9@lP=wkYa6z{IesNZP#l61qAr_04D%UmuNKBB z=+*K$*9K33U2g+-6QFWTXJXrcb-q0^)lvi2kB`c7apGdSzrd)p30U8@Sn8j|#dN=n zdIYPFLd-e?*6DwifOQ5ChrI73nD})C?V0%YxCgHViZjxKhS{f-9Tp}xv1M& z;n_)dNn1zrC4OM0@a)bpPcnd+!m~Ta91QL*YlDG`vL!AdncVl_jv@?P5Z6}}pWXq5 zt++)f=mA_z_kGoPXnIP-4|V-TQhy`rNnTb;M0$KWLJ-no(_P;onWY{}#n|)}80@bp znWfrP9gx{(igA+J5o5_eKF=bvBVs-gw8-p;m|I2)*Xk@T{aGyplCde4)BiYh`p9wV z_V;lmKAlmD9fhV>3QxzUad8dd+hvIHWpJ z2@|JUF3Yc204VW;0BHVw5Zx30y~YN9N`U-3n@BfcD(!ruC-pJ;5T%oiDl?UbPpXm? z=EKEwpR=gl3UlfDgFj~dajcIyZ^p+=sO#v{;UifGkU#+G9*UibW985NBL4T2Ofv*M z2Kiw4geAV+fD8}G$xTrp`l6-;NJhw3VNz9>(TLa{Ga1$C33a2fi7}=W#R%C-X=^k- zx-e!Yic8arF<)cKP&_lexB$iTQCucT!fdD)ylSJ1LOP)iHQKyO)R0MsQu8}U_!vMb z!Q-NxU~&s|X5C^CGQaZz|Baq_i*;L(khb?+Shsw+f|Tuv|Hf;rsYEJ9XP8$Gv)0rR zF&9FG7YD@mK@FlQ^Mq~Dr1ME1;)zH^l2rDZ7P9Eph*kT&Yu9@_hSiN=t{Q zA$tP3$@4Cyv5xY#QW3CQVcAwGv~fbsTO;>{uX<%*9>kN*jSV zvSWcb(gW-=U_&4#%o8NgbSw}@g3;VbeuW#-1>(rT*a=D#h{N!5&!@{otV+p&!31%` zYR+&3K~|wP;b#`fSUaygICVG#XyVl4pPmWgy09yllPNgA*A>j!K48jW-!yy@sp{4# z9kyzT7h^v>@u!D@8fe@wCPGv&IfW>}{}>J-vUCo~QDkXfKti*!$wXQDkWI5E5y{TR zCCvs%t7*pcrUgQ)kK=;#Kd)~`tN)L^Hvy}ud;7;5iAqr^a}=dWgi55-xKEu^k%$bL zlCh|iISS>-oO$Lk&qFddNF_8qsf18C$rueZod0L7eNLUi_xZl>_xiop@BLpt*QLGA zTKC?2?X~W8&-Yqu*mi%~zL~TWOXYAi+-dJf+_vC`|zWAI4HCA@9G1rH`6nDaNdd3_dU(yp+sy9?;^CcnO`U310r} z|0ld`4jFIpC%kM9`GgQsDZFH3@x`$^+(x{_acQK#2IwyX$9^i767gn#!IcVkL*>PI z+GLkTilYD3-J>OY6tDs=k=&cc1o! znN4xH?Z2X^#4yx_=;5qkNbIAOkW(LQK`=-XaY(ljd@pt0CXzlXa1Yrp5&OT3_e&)E zzhl3|=KH@R_NB@G?|<6IRovaibiYTFR#F7X~pbImRZ~%sq z93T-75Gts|qTg6mL7wo17S(^@D;eziodGz)D2bhco~J6L z1qeT{=mJJz8D35Y68ri7GLlCDPOQM>ev&tj0hQ!aOZI1kQ5w;h;`26;b=_otzaH%V zer{1J7*Fh%0zw|ipok4Aq+&NJC{k@k;a{+8 z$hGXSsskNMhmyx&!j4G|Ffl1fQvbwLvrTM~A*xj4Jc{QhWe9l&x-bz3pwcc-|0iYe z+7bp|f*GQm{vs(5iwn`Hn_A3~PTkC0{$V~Pi@CUnv@-nho~C5;DBB)Q+s~t2%3MzA zCA-TYrOJ>~<0&R2FjgQOjVTJ|ZCNJY=vg9m%~u$n-!b`srZkfp(2@4~KP!s+71tFQ#J z@jMbjAmcGZjkwyV1^xemKsMKcekG7uzzi{AY+|XV8IMgQ#47y;!uSu6w~ZSJ<3B*= zfP@lVCKDWus@lo408!``3V#%Z5I+PFh!J2D?l2S^&J%+C)3{$8_TO%le-IrlKF^@J z{6o$cYAXLAjfK|yLH?2GU=V>M5@i#S=aKL^#*{j7vd}lWK#0!v$FIRo*6i2(PM*W} z^kDbD`!~PGHA;bQA1>Yi5AvLIlqRkDZ-0*}!4dep*m-I+>fJ!cK@p^7!t|6N+2 zA#=s*GY#7nCN<91Z}pi*?E6KZ!S@NLU5{@u32i1((oJb| z1hq3XlVkN~Q1Yybi%6^G^k2yHGhf=CjEhK%nXNxKwL{W}XPs()ueMYS#-BDSD5 zh^?spodp%uf3TpUrI|~S8|BbE?!y|Ky(vi+BRrd6NN||i5p{*rx&e}?_lT)sqx${? zY19jlM7{qi&5>26%bZllgjhRN#Z8_8c6X6TPdgx*r&7&|8ZyF+HY+5zd~-) z1LSXP_`gD)G6ZCUtyE0macmWpq`4AI#C4cP-$Cfjfl(CyObBXGrK{;7 zyhK*rF2O$%3;oadr!|G*U;Si)f7O$I!M{gnfb%<+w&#;}80~2k95LL393oZ%t zB0}nvC|abQ^QfNm1OCmy3r zsoF7GfPlIW1tL*F9x$tPiyla~FkWCLBKdw2d4RRYiO<_;BEKOlw7EP=&2efF52TO3 zMqBiTF4mGSTu`qa(IdBsML& z4q{NS7qC0ZlBiF1A6XLn&z26;60vY~LTj_dK!&3kF;<#Hk^SpLvKY$7llCUWI35j5 zwCYLPo=)1OiLumTkeKk;Me(vhP8d^P|H~E}c|uuy>931|_D?!c=pSkF|E@*R3250! zk8zF)N%Cfi;jcJstqp8v>G3~6Ds%-TOOO8nvLm6~lpecd4Tyyb8cfO~wpF;x*}N&r z7j%WQdjf?TgpY=xQwiM$FiW@X07nAw@(F&eycdScZ?>BSk>}~_l6@-?cqa+!^B6sb z^jmiGTQS+AzPf+kxD@!G1TXk-DqAq?V`l|V7 zTBR;{L0V8w@Kp=USE4_-YRl8D9t3t3BpN3OQl*IS~6Nq8-f^J>t_Ae@mn zVH`UjhYe|7n znV%>$2^2yo9pUco&B7KBj-zxW3Kx-9xEDUMsV&`$ZRgPTsieJm*dkB9g8C*NHN>0M z{-}p#%)th+0gQkTXLv4gD@v;iOjH}RAjH!F8<$SEArW#-n69f5nh{&>(DWu(gl_SD zP~ALi5!HUE_G$VMT~r64x_Q_lszXuTJZzB>M2BH&(9$$Yg9>zvU^Q)Q2ooZ4chv4k z^anO(@$c^BZgyTNkHxq&%?=VrnnPe9_rjju4(pzEDEuTN)XQYQo3qR72!dA&^F1@snCXm%X-jBfUf#y^kEqgCh|`>Lv z38tzyldn#AQheTw3-*5|J!p#0K5RRew$G*=i(uo^Svd=0Asf|JN06F<#=*BrX%Mb{ zerFL|+Ig5KJc=3Sf-dq&6f~bG9)wFNbIy&u0kW=+_?>;QR>8=24#cqMPQmx zJjAsC)gZg@ebCk%!BOppYH1<~XkAnXpjw(pq8eH+KNQsAJjb_|r z0zZh~pU6a1rYAwR6un5S0!6R0C`e>^l3314@Xt`omQK?WA)MhTQ7#(@918@FHAxAu z_nN7D8;xWN8L+66^~y)p^KtU~*oU|ICQB%oIf-|AfhdhIFy z6}@tV4qYVZg_uG@6Zd? zze6unn{rtm+zh?Y0{Xc+jLe~AF2#&kI08Xhf*nXaG>T zn2!S6PCS^+`d;b*cuCq#!u1L$8Eg6cm2C^knk(Lo#vJh$MvLzdlR(rO@h_qDLlDkMtjxBk?KEo$(OBZQ< zCWY{Oy532R{fio}uRezK?S>C}2^Blq`H)B}m)?>O5jt3z!G^yUvF)Wx41#@l! z)?W(Dlm*kJV0ul!T1$boV!;e4SS`M{v2VpZ@(FC;UXziC4m71;xlO=sNr92zFMwH4 zuyakoVx+*fv0ye7Y;6;;c@1De^YJ8X?ot`#*Vrew#^#Y|Ch_D!(D)^zZra#vhGqzn zl|e?c-4t4iXGOiSU4yu;An_zgPo_W_`_sGN8z?4bvn60+Dq8|3-eF6?#4BtGn0SgU z0TU0hC1B!qwggOE&z69RtJo4SF_bL<6X($qdSVtY)sY5dM8Pb+5jRP_jR#Is0eV_6 z0>wyY&|tHBL>vcA`U*vs5xmEe!f_pH5vR!1k{o&Bb>|b+Zv(AC(`pnDfnk;{yikKB z9qmY>+f&%{wwQ1Wncz;0=rI#6gb?}|V;Ee9MH}4GEWY$CnK=Qks+x%&TEz&k(Ha@V z1fd1z4$(s+X(U|&DojztzCWaBh$8l;Lo*Vv2Hn^MbCwM0@#zH1~6dL58bB%HIN;;$&;WH8)CI5m6|4f!(&;+qRSuNJ7 zB@?}(I#QmA_Ey5LHj#EXv&Kn@ewy7+aZxwYu7<7DiL|R_OX`t!b!W?6fYL>SVnzd$C5K~5(YDl%skbB$kW5$VYCRGAV$1aX5NjM#} zI$eIWD^N6*AFYBQqf#YCzQ|rJHbE^m70h0Vrwk{0DmURJhPG5)Ef$Bhr1F%pd2Xo= zsg$nJZC+v6yuwsmK_4KrAio%#C&W{Z=SRt6xPtr|=GRUZN7q!V#VV5tCLMt^s}_ry zZ79|>7Hb=eb&W;D-%D;2iHqE*R!sPN+28}1>f7~_sg5HhI@RY;YBtsMr&=rr^O1#d zktLl*hfyQfSd?!pDyYSB70FMoay&ymP#Ht%+@i6FS){en6htl7Ru&I$e4?#XvGL)y z3JveErSG5}v2D><+frQQMaedMe#=N%yro4X`4Bm}OgM6ENjH)sPpfnyISRC(9?4Op z1*J$%3v?@z(^C3r50T?LUSEkQjmXyx($}G!nG3tO;0r?zP{Lk!SqlepjSKjoF!I=J z|A|o5f`l6fkAhl81Q;gFQYP4m5dpap7~^EdDrvz-v-q+hjByQ&JI>K6EqQ_@VZJ%N zsT%{q6OK*|LU!7L>;MNDk9^%{*$u2Xb8b%4m zs-8@93;YtAyumcL<$;Y8BNeSGR3p}J5f@Zioe%q;4@V%9vOn_#oG)BK3SW@PQ#+rF zy)OBJr)n|rJmY+>Ady=&U4|zg&%@@tR`I+j0-4Cy$UwCcR~~MHL`Iea0zbbRX94py zQn*$L0ARa9epXFa=gP-3va&ouf1cGBu0|f$DxQx(bq%DAC320)?ZB9sN{m$^SCB)h zt%TBz>Wn2v9Zy0#c^-Blqxa~bT8FPuM4qNm#1rFJ&_LpULueM>z{scYWjo(PU_1V! zUpZ=9;yEJpI3F7tFF+K}eE0yc$MH-h?1#uuwoE1jLx9gtff=4FD8wi*WE7Io9IVaUdZOTIb}mgN>04koHUh00?`0l9D3OF0p25+4F9j3M5wjxpgy%~z6q z8hS0n05DNA)!CuK+~`P#BZ_-f*d9J z0yKJ`j1O0K{5>VEU=;a-0Tz!C!;TQNG>#5~Srg1B69fXszE*0eKy?1C(UNUGSp0@@ z#FqmjfHU}x#&IG8ZO9HZwqT$GO$Ltb02y{XIR}!V!zb>F~*-6t8&7aSQ3(l2Pbi@V2R1Vojm1yoK8qaD#}p@ck%^%T|T5a zchCsFaTa4$gMGnyR?q26GO^%E=c62T`Br#i3i`mc%61UsaRhmiZV>xE5`2suU=4OO zB%kiZwIbnDaI|pLwj|dQ z8GK2n`0$r<%8BR~sSf`khc8TVP$q=8s)*?n&s60zBA8DjD3kxDw)`l2`W{Bu$5(9* z4w>}-+gGh96W4pbYK8D2mfE!Lm^G3?t#FPNWh)B9#OH@*(bk$FZyckY4Jf zU6;rzq3n3M<0b;}5)cmo!RSJUF>T;>O$4xi1e#(pT)@|NnL%b0T03=&7z!ofb74!Fh>@V9_@)U{=fjUk5 z`zuOdc)8IkP=K*6UBErr@r599A7x2z0nrXq2na(g>3D(@WLg9d7+E`0#?sD|ukje) zi@!mAIB~=q&-NEHmBo`)X+(%LDvaRHRDuz(KrFGLyn>{YVQst~N*B-(Ttr&?EM;Vz z1uDg)D*}Xa6HbA+QLMf(O0I(O#Y$X34cGW5SOk=M8UaL3ABYE+02CVUI}3t}LI3?R zg^zj0kH8-s1WzadN8r$X2@R)@1)GU-09iyB5gL$V_K79n&I0>l9ZJBRbN~Sk;$p$I zB2v4H>F>$68+i(v4VzF*5NqXlO0Vy;L=z7IZ6$!f0ZQXpaxDRrkXB5G8l|j+oH>A2 zfarn9g3P(FWUd*xy>G7!dQkO(X-;j4QNjl#mzCH-Z3{se(?LITSc+SgMJX+w3$Y&L zmA+4%g(=Dh^-L$!6Rwj#QBPKXg(}EvdI~bt18?b{poHx!A$Au~1PLV?8xB!2?mkFF=UBC?*dRf4L*g+^9Ur;S*iRbav&I6gD+H$L20k=`>1Zs#?`k3f};n^~< z=5W={ClFOy-T}rNaCsgKk80;h7l^h^!ve~3>qU~TFySe3On9<7*Q$2GcH%b;qpxN^ zoH#znS63#<2I(`=F1QQ&JHmycRmgi1nP?9=@@D$fLHIx>e3=Yg7`s@eF=7Z^7)H2= zv?7qZ1FK{aQTaW1f_EfQ!uTDIoZ}mR=96tH%2@h_BnnNlG`>KNMA=}m@>yPBoW!$A zbcwPzss^T;#q+HSc!C0;I}v`uKgtHDLh`g16w`F<3_+Q&Cv+*|&LJP5EDILiDVwQ! z%7`9heI)cD4Am5`NP*y0&|pEb8rfeRNUDutAw0PS6Mi4aa!(5w+W0n3%o29I=$!mm6CTld}MN7c!YpM1dsH!D&WDAXcv z*i1x6JdH6%Moyx0luEUY+Kv>nG>GdMc_bn`2(Ia^O(>uS2f3LW4m& zjk(IQsZ5653cDJTVQ0r2bW@&;ycyP;}X*|~Rx3hsY3U*}S!D6y_MI`YB z#Nu83!eEKU$TzMa!DkFt@SMcZ5k&zurQw^1j1{fYm55)aT;Q59wjrR$s2NhCV5h{c z-`p%>Grlhe8!Mg?clm<4jMnjl@+25*EP!IO{+9!M}3QSQXf3Sp;$rN2A@JPf|fI3Xl%(Ba3xENx!)!wkc5<=iZFa}tS z44{K&_6K&?{xkeVJ_V12W~n5xk{&2gL6WB1Yk*`a0McgHOCR zU+)r2GKcu;;)90pqDNLU#^r9&nu!qZ+LOI7tXhLXRXYe0nn*%a#nnsE`>C8yUkt*7 zCx_R-zAo;74kWMy2=S7B(D$=wFZcms5)5>6Czueem0(8E?G!E|EqiPaasvV<8afYW z%!&kw;jkae7ov!XWz^1R5ZeJPJkam|G(hpsi2p78&+KlIeI|HvP@DC58A51rqIU_1=+|IZ)#|NNo< zMSm!`7&wMHacZ_9{z(zctMLoIKp2EEKtl8-%nw(d;2BAW7{jdmphMIqf#OI*@uG>K z=?`kx6yuCqLoBTJg$+?wSaJ`dV1rxPSXkj)2>iT5q#xpoBwLD; zErPDl{qSzspjxD*V2B%_T0+72!i{Q)=!am_Xh*0REIbiQC420AAss^u$>wyd^Og6= zFmfi5;Jg$xpiHu3$;)+vT=Ca5s2Bd)22Ud5DMDESi2D?xk=ZMe2rpqcDJ@6 zPL!)cv;q|fIZ=*EBCYNQpx{xk4CA#e&C*IQpr0JUXay=a16@*u=dle4N8l=npZd~^ z-hkM$_@E3X%9K8sv?KnFZPFz{(P+KM3AdKV6G&c)7a0%1JO#Cg11RwY%he%5d_>1# zk$^q(j6Mn3LDY$TUOj>Zp&pxa6W%~1MW+32`g!_Ryy##klwsr^vHXefPa=O-Qu#9l z@@I;){Fx$=KMfk?PYiN~a6=^-A8Glsh%tr$gS6fT`4iEMJb@;{d5HX(z=!<|Q71(H z%$Jfs;XC?`{Q0p#{)CwC5ItRvD{zD%jr~MM0J~xiuoMOjDJjbv=nc+nj|=SIJ%K5( zyAVyu>|6L-NLZC9kT@%~^XaUF^@TM47b{5Eg4%hkk80VVz71klTyOg7nYf6wrsCCQ zu*8nm;YBOZ=@FK;C-c3~P->n(>Z61BATp`I9Yp*ga4a616A{FbN-0XFaQOrm#u8j0 z&*chSO&tV|3PC;i0#|jClLe0!91kc!8FVycHNmnENN81s;hkcta60-Rg@SYOT)fu; z4-V#GL5Q?mNR7Z1!FL?Cq$|=gXG;jV>&uo9q^HN05Pa8xmL7|=)KTCF?v)zA1?K=X z2Ew94=;vs4VaYeL7qF`ZTc1Vhg_)?sI~~kQL|T9AvGRPFgQ6_F$xPlfLPN9{f1mW zB%yZl01$ALe%Jhmc$fk%alIf8EG@{MSipvJ1YF6gCi!_Jkdv<5V8v)Bxw4)lXqVEZ zM?x3a6A>2-7gUM@C1?naQKa2tC32`*7OYzPi)>EDz{UfiErh#-(Z!pO&FH{9C>{T) zh!>Lx#ILk~I5Z&&909}jjHsc0BCRDo2%&gMCS02XdI_~2^^F;S(CMVJ*$BlUW>b-t zEpBA9+Jcp$z2r(Kkhegk4@Kex!WW@!1|9A4;Y{+7P>faIL} zcQuX4_hZ{f(Dp#m{?GYtDoJ2!zvR0yh+;EG=oXvV0aGL29YFt!^WErpni8Zy@I2X# ztK>bK(L<(CI>Uo*7YEA4W%n{5RwFUtl6wZiC6(dvXUNpofRxJc_%q~-ccd32W9>i5 zcLSa@$#;wFPVrR=(R>h!x0E6VF0065+bcz$cTck2lh!? zV2Ls0ykCem%R*M(3&ZnZocoPwjQbWtNZqlyH#UkXR`P*^C1Ct6D@P ztsx=JDDdEb*@VYa8!tJ=kRxbC6bO=QM-%HBw)JS(H=dlO0*?nM_IwZmA4n;FI(!m( z6%~;0AegVvY?mYJu5=|C-~xG?d~s8XTiJ!S7vUn(l101JE=R=5E)UTz9i~<|uIn0N zFLxmx_}5d1>?lgvVI{$(;ll7C1QHr7$Ptra8f(vuQbE+b3&RNP+?kbA#mUDFk(XTJ zXGA;9qG)d<;iO(2^**R?8f(czoT|XZ5A{t`>yT8Bdd$0>U$f^JqCOP$G;)%*A+iY1 zH)R9@GS_GdrClbkXPYc2e+;Ls;3-_9b~!ga;K~ztOc2fn7FLL#1eBl@UK9lE3%|!e z;{pnC1RQce2w{$(&>Ta)fD5}SJT0tjIq8W=1o!8^3$FY8zXaYNWL^5{EQU9{jFkvU zZ*-`_4IeK}%E(6ys05)eKz^jydK?9c*<_z1CIx@U@511AoM;;;R^#bM!>u=IB82)H z*@bPxhRX!2Frsuu1Yc~8Y{+K}1>z)=0yW|gn~G7FzNJC>BS-@oOB;!VP;#N_ip4BP zNLgrUdY42IBs|YYr1e^dJXT2VP%uj?bETRiT+q;^9tndX#S11vq6V^#R<{=<2{lTo z>ecEd=?>uam}pJn*N~xF1LPI@IHElUn?Vr2iG_$YGndwaoJ>Oju4d$C(;D;u@hmAV z$pcE=VJe4?K~xJv9#jj4B;`Bm0-PcOLv;&qb`}ZN5#nh?Tp^D@;JWjSE2RB7j6ha| zhzW%@Of*-?YP7Q5wl(^7DACAm4%$?fA2-zqDFL{5hEI-V}&V99elHEhSWNq8%{jXs2YI zIM}>c_b)t`46$xpzCo>+p`7tF@6b`f>?6iJ$e0~)>@P$VCwAqL0hdZa>`(+;a8xKW zqyyfANIS-P5`PL@*Hrx(f+QLxKw&$K4}vd*l+U6|0Hk~}3NRIrB*TZot||5K@;SlV zr|^Z$8)Io0tWAvB#t<-QHqu8rn*DJT0{g0}aoAK9Oka^aeJ79|3uF-4kgymw2Z_TL zNe+@NTO>J1c5IR4Alb7;l7r;H7D*10BgsLsq57MRh2ih=kFIv4gnv6O7+>hH4fgh= zg{^Os{G-y1_=A99l4-H16#{EksW!M+!ph8JVsm_mGs?3}aAzzD{fIv8hi1gQ>i)*6G}U>gF870 zY?OrW6YB|b;Ug$b^TD%ikeeG`CmReh><%Lq%rn3%VwmYkS6kzL`LNFr`b4w)h=qnj zEHv+beOiOrf<{h=wC>SV8NyFt?~O8~OinOHf!%QDIBld9SPxW7q_qYWafIU$dB(kY zPs4|USh;&MypCx~)?_3LF^T2tN6d04%6kBm!$1tftZW34}wg0tgkx z%sw{6CrI{_LtZ69*#?-28qXM}fmHELP8sf~Ty2)tc*-zW`f!J2>?Q|hZKv)aa3RNK z&}2GI#3bd?-Q>0t@xTYP7-7~_umX$5#$sS0LZw0?tfQl3nTWMRKVX0b8-|9QP>cgv z;SCj7*Wn)^t#d38Q-%0fp(+3*P=;iDZ9E)cKNmdZgclF<}#t`hkF^y;%vnLb}8 zM0^!L5UNy@fB)%whH9SF`P=l}BH7rFlWJpb1grO)5@H-!v*^C3`CkhsZPIByI_B{h zkMif^OFG;riE|e8{MP+>IsoObwu-5^PfCd|JzY!^|DoFM-z%?ohsO*87;5a zBhYF{#;}qBH!J^ob!T!8Gq_*1zg}5-ULUPHeHFH}sX5WXYMpoM5h2N;1_@=Zn}(ke z)vR2mUz*kT=E8%4&b}em=1!J2)}Q7K zbPGD?dG?65cgO9ma{~u=A2rscrG`sMNRIB5oUKJtRkd`U zyE*Nso0axK*Flagt7L{|Ysh6AE8Wynxq8a_yS#O~qz)r)Xz=%W4>!$pGJL+|*o5+O z+Zi#Ow2)_3TR+cG(x0bQ@bUe|oq81_L?LLea>bmyYI4>=jbK>~ek9Xgm zmVxB0us6QX>{s``gqeE3)HIuPbaBYFuEh=$9)DbNessrv7bADe&NJRKDZK8{SYM^o zu8I4y3U&HeuFOzdx8}!Au5wT2NuOnb8*MM^JEcv2xN!gTuuvOg6{S1FEjC9DxtINB zPssGX%O+iQwkjL7dw!@wiNloEy_UUyS+rtaOv{peS391G);jvFTi~X>)4pyBu#R-y zW;}WI!j7qvSG?K3Rjuc+X=l4eol;fV5ZCjSp3GK1zmS>P$2(qAFdH}ULB)#~rzcvN zf7+MbN`LgL@br+PSFU#6H#--!JELMgI_g1Xb{lQqxi8lJT;Id+!8$#Un57jfTIHFz zOznB^p=yr*>X|bJwQ0L^zvuA3Z`~SuEym+aNJQX@S8d`q83zw4&Nd0!6dvT4;N9(Z zk7=1pcU*gyHgj!!jk9mY1@9xahl^Y8SW(5yYv5?dk>up8Gl5Yr?K; zeXklMn)e9bKf3pw!gte+tUHtlZ+1Fm@~nk{mbaqMt!w$W7Pu^bJ^GX0L-~NMD@*i) z%s#bxn$y~@&8gQD?uAcv8yRQQv(r)AQG<-)eWn;uRL8@ z5H{PSet7W9RbJeR@IFiXea_XpCcims=#x=+wet*6w<@)v^Dw z9&O_G3Qg|@E8Yy(QC*?$+AlLHXh5%axn+w_o*kV!!g`XmjbI~xd#&@L-X@&WPUXj* zmWBCux;<`2mvPL*1-B3T4pnyBcz(Q*%Z()womOn=)r)T_r&y5UKS#KG`d>HNovfdC z=vB`fLk9bu;q(a(=@AiEo3z8waQvwH*)w_1-jxpSn;deo##z6GSAtSZ@t%pktCx&z zH@D{}-KzeJ&9ZxM-m&hP@fx*_Yub;!5uQDvU{{!eYmIk(?5C{P6J#PnIh{YRI+rdI zX@B_puxL&!-kkZeKKYfxtBFdN1|@vpnT%fcJ=|&frB^|wR>Q~5aW|SPR!){b(uWq3{>!_DmsW2p^PGJ&f(9N~Sw;iq5nTJ!uf9gZBmFk|+|{Z|tF z&wu+mz-(7cv5Ar0^fKES+YP5|zfzwT?c#3v-LBo#huc5(zm_&OvPC<;T~(u#qPHzk z3hVZ9z}b??7Qq+u7xzkgTT{7Wby;#sNA;R=&l9IycIkxacX_3m5uud4Zi*nva$JS| z$w^B}>oilwnPi!UX@9&@|9Z!Fqdp26fp^d4+0M$zt8-H-OUvfYN~tn&`D@*QfQeD9 zZd{Yu;#G6)x&Qr4tDC&CP^GLLefz&~$@f#Y>)Y*7AMI<_BVD$fIA&^?t`}J5@hT(U zL}|y4!c*n5X0IqcH0(r{wdJrbenu`~DL;-M>v~P4FC*}C4jt)bGwI=#lLKD)rq8-E zVPcrq7S$o!?|glIU0dEDzkTt+wN~idZ~enu z=RJ2@bR7{K%Q-q^kNMLXPZo6Z8S&Ub)iFxsIb-I~s@HF|O`YCV4SlZaQ#Ev-U&V$i zYilo7-;3V1YrtQwE9*-O41LOW9`3xQZ>{`JmAa1~emXcU;cphM`}Uvm|NJAx$r-iD z`FD#J+3+^i-Ps{~w%%*j+*?Nnc+Lz7=wVr5`?+wg_3KtjyIM2T_Si4CG1+~#$p1roGbHv#(%f`Ipd|>;7Z1uIR_!8tAjB#$;DY zoym)}Ll>QSyxjlEqq_UcdiDAuC!2g|_jtXWO<#N+jkj+rsZ1KHv^Q(8fzm9CnU~so zzK=3na-!qf`R?U|SGfjE>X% zuGBitocVL(TxRZH>DTk`*SKqq`?ks2cG&Vh+n>kB92&M%FrjYfg}T7Ef7R}bOuVMh z@|2=pMK`a7g?XJ?pLD(X#J{ zTPKzYP6dBxz4`ba_oFs>S2WtMTEYud4^Vvbb+cAgYVfd4UFuc+JMlEidWAl*U)tu9 z`F8U&89D9RO&IcW+1eRylY5;q4y!$E5v--(bKfEJp&DziUYX~<>GHCIEG}tV z@wo;!Zf)b9y!(##YMA`z!CSfwkhxIWWBZnEmg*~u?XO@KG#>Sa{XhK2cp4y?%!Um9Cn~{pY}T*d|x-Qn`e3Az|?hN zD$hGyF>q?TJz#s9$Yy912G+|ZZ&a8T{>tRfvnE?-e-q{i~(yM)9)|I)Dg^#CXzSgU>J8XSdZHGtQ zS>|umTO+ut2^IQlv+r=*C+(;(Z}qH$$@Kw+XLg-6To!nAVd=7`iQYA)ad&d6)r#!i zW^%T#)437yslQxq<*idk*14*+ z{nwL0N3z=t%dlB>;IAR0-YD`_*I&6}_Ppk1uEU1x6&isL6@JzYw@UeZ-)K#F*NXlX zKZe^Jt_ZwU`!aXno_QZdTW zk5faJ2f2!5=6c7S(6$+C+UH^5S!?Zj_r7WuevXal{&{zwzWoMFc|6%H{DGjqbZpKQi;p8N7}&kP^=!(TQ$`aOjPHGZ z?$c)@H}nh6zVYy>wsQA(>OBUm>YCeb(TXF}yPMl;`+nYa|K^n$>$mT!7#Zr};kT?{ zvsdl`!J)kGU$Oy?_74Wva9Xijk_0Ia^irxBdx_Dt*%GbDFi^{I+8xL#c zdFRpP_0@Nkr#&w@zw2A3!^egFOkYhJI;>`z`S&S<{QJg^=&$o_+>&o^_U5H1-_BKw z^8KJY`+Qwa-kZ(B6>?c~Si39TC-x+j^t;Pl;9C-;Or_9_V?Bx!zO9 z=CDTmg4JuMI$W0T9Kjhf{k^f%!NY@GPQSUZU`EENPZhF0f-Ye(-iu0>Zd~wmWevyZ zqptH?*$-}ikM}!p)?IY`;qyG}x*ZV0v(+4bd|y@8OH+SH+Q*{sAAQdC?l^UA^uYOc z)!C`NgW773Ilo}bT+JC?SvuGDES#BPtr))Q^rCyC<0d9w`hKB%=S5W$dunlq-55Rg zl>7VSa?u&QZs=nqvK5jJS@9&x8 zKh&QY`}JGQ$d)avV@3|z8of;Q^H3{~k#ASp-89x&;#MG25!U9;$hF}$d-m>+)StIu z)`JJ_ceD>qE9mH9kPJA<1ZCYLF`flCT*=OgPtn9ei$a~JP?<0H+U*zB0{)6-InTzY|SFPhs`oBA` zQR-tkZ}=uv&AU5439ep>+r$j&`Q`II*DWT|E!4fVyeFn@y)q&AYFwMwPruln8U8Xq zwpBuE$X3x~r_+wc)-XK)O1<9MYr(YgWbaGPgmL%Mcm_$J}(tn$4(bgnJlbnE;Gck6$+{f3g-9W(ZnRumMz z-#Oyg$kCUwOFB$Ri|P~;wIjoC&dJ^<&BJ7>9B<#R@35hx&-5vR)9s#3T;Y+iY3#(9 zM>|S>T>5MG3X4bmgd4BT5FPpX#&qI@-q9Wzojx4bIAJ+p%#f<-+f#O)iJD^`C)hhC z#q_@I*p0)utvviP!xBfdx%DB!a1dW^kN24Vvx{Hu6gXzin;-7uWcJRW-73Sg`|tNG zxTZ3H#`Lx2sok89n8o**K7ITE`3tl5ZdvYTs#UdPiiT^@iuONcs=a53^!+W0Vn>yD z%=0?uY2d9sZU@gN*sDX&*1L{`K7F>uZ=TS1mHPS%+uu4~U3|DcAT#AtuZ)Lzi-d%@hmY($7uyYuiC@&KHlY#Kj()4C8G3J*;{> z#IkxQGd{SoXm-77ucb+&V>V7n%kUezV%zjZ6EjPOZe6UIYxyj^<-^Hwd-BYkr@R<6 zcMtD?(n}NDU~~KG?OWF__?ol-$K^rG+A94#tnQxT=xZj@E)P_*Eb6*JuE+TM3ysuA zuJ3;;$mP1ng-^G>%t(6Xoi#;$Ndq~=I)NVDNl{(e#%_E`rwU_k5{x$W{yaI zZPx$CF6P^jjKtW=!=)~6X_u}|Ika@;#g;o%FIML+I3yQqH0p1+L7}E~TW0Qh{=8TD zr&n67%c4$p>+$vD4`EjF$%988JI#D zt&gX5uu)v5n}^UjOK^HOC;!H}V|ypXYzw^2FY0b(>9pS9$BtpvThC6*oPGIHdzG8+ zXZve>8s5VtX8yz%%*H`qDs-R5CUavxJY8aX&}ocp?3eH{p$2)Q`+hTz8-FEqO?^?o zt&@kxY!bLWRXX6hxby7zk(YO0iyiaG>B^H4Z|ffTEmZ3>V~*X}Cp$M4AMMf7dOmj- zH!^K&7I88DNB`=647Qq5e_>Jlfmd~UR-<}-UKJ5@aN0bLo?fl?%r~#pGtoR0yFE8U zQ~As|v-_uxP0a10^zF9r;2NWZVKR&M#Z4;SA5vU9T~=-X1n%v9E84tV>}sfc`18?j z9j2UddU0c#!-fq3_l)|#-Bh%@>+zbNW84n7_zgSfIjL0Z%C@c-A67ZdoNY4lO~{a^ zI&&V~Oj1glIl4~2fZ5-vTh(QkQ-fXan=gxY`czctHtt~6R@qyr!8eN9KN=m;^;X1) zte=ib8!vQA;2fDOAEiCmc5anl^NMX#M^#tEN_)1Sm&IGJ=dSJbL3iKorJ7$V=DC#wYRk-2DQ)-t*1E3kZCCclTw-A)immlD zw{$-xupH2%<-CRc57v&i{WM9pe6QSMj>mzcs!@72aenJhoH%73$Z^B+Lp(>;H}`+Eg)nb&D%gMi=Z=nCO$TD#QB7r@@)3 z)5mF!wKSVN%zUcNPrsx+j}DGp*SEM+)~Zd(hjth&SzFn&Fuh{W%EjSaxmGXov)qg( zwb>Q)Zkf@PP9q!^9sElz;zy3-B7?1l^?^N{TI$~(@M_Ju*sdxg%0A6-ANwU)DI{dS z+>UmSQeW##H2raNe5=WQ3ZATsD)y9*kc9)2LVR zkm+YF+pV>`$vbXTbxSim{%_qj)6LhO|GQ1w!PBdUdHyx@>O24ZY>V97)2bKN`}sxW zbUbo z>9>uwyInkOb^G01)oQ2W)JXpq{z-ca_BaK5b#|ZGa$u(Go6sBK^A-*K_GO2h#lqVs zXW4u_G|Y7R!8MwR_e|cVbnSg^vbEW+qAdzWQ(kUZd~=^l{))nLvU6_bm^}LsbVsS( zxTHbdW*l(rm~NBnzEsU$u>Nq(Tirn6>vv_g+=nr6&`E_U*7ay?jTs(zMN+F8dh7CVwB{k*hjzqn<@} zTK_JENxrKGi7Ncu_dgjoq{VE})7n9r-@e>^@x*)gsB`@_UIt(L@@B04w1ao19$Fk* z9;;t<$-C6xbgD&mX|e85?Q>6_y9R5t3sRdmUsv63ePT?!$Ru@iN3IQRb$jHa{JmCJ+RvVOIPqYz@`^=|yvF7{%FWg6Z{FqV zp|C-{#%%q$nK$o6zZcK_Qu>T=heGisL)&UIHDV%TB% z$MNrmo}L|IGFUHJznfC=CZV@~$lY;6-}F&jGRSAX%>w_$!|%@5HXnQGfU}ByXZP&2 zHXaw&z8a}$KY%x-d+O4=*9sr~?D6=RC+D;I&d78_+i_c#wf8yMBT;|-*WSU^Pkv0% zyyzhxzeA5>7Z&#USrp&wak!n)jj10B4jTU$Z(f@EI6ml3YNV~8X0zP{^?sY5%(&6{ z`1aA>W7V&{cu;rAg1c>g#G_i?(^ZFG-8EPJrubo}v+2$jogUABwWsQKcYDp5eG^`E z?-XS9D0BX#L+u5ROy=iJpW?Vu&0yD;vsL`p{@xc?Yz+L7bIGheMlJ8j^&y^2;kK@c zsYWS%KHZGJ9i238(MBg56K+YT)}Akn6}Oq)UitiMB>$;z=Bf7+au!ejs&IeCgHU_j zmd}>Acr170Bu32 zkuMygI8(+A@5<^ALW`ofWcKwj+@?Lw^kGh_rHDv6!c9|}Ny3d<9Z|lbOvmW*@dd5k-s=%*ax29&| zK>yaerlwE1etoj*XMd$$3wpNNIJokDMQ4pdpLQB&s;>OB8dPS}TlmR5EHY)aYx(UY z(Mm1eiR7e;^k9*cb+V>c7<;^vrAo+*Ht@wq zsBJU$wAoPqJwj!D`Ln$%^im7|y0f!>_qA0o-t^l(+Hm9OZ<~{S3R{m=@j7AXtm&N6 zjW=BP@=kqUt680%ugY|-WzNfAn_+P%e9Wm?6J&O$Pt~`3G%sL$a@QQr!rdE&=WIBz zcz<-Oj~=6@Yz>P1P;&Op5nsCyKje1KpY&=~#t_p5$1LN1TJ`lRa(wZ0 zT|Wg^r!7gH7HiI0czp1pAMvWA)V9U;JP=%7z9G>5{`U^YqsA2Mb4YS;`Tg2h!`!g$ zm&yg96Q?Tfoqyu|Eb{{?lWslBUiTz~+NyH{6j@i#agZYClU4jSJCNWDBh zhzIXxe18_36+E(=taU{FJnwT&)r4>$~X|BSt)N`r#5NX4W` z^QT#{T8jSy?X-2axZaCZ><03DXyDdT9YLphHqv`z(iE6^j;MD=Q5^(!{%wGT6WQ3{ z?MoclNaaQd0g{JV*h%poFgbv(!Kn}6^nx;0JtCbL{@CZ$;9n|D ztFgsw^4|aG&ftR-F1fdv?LFhQ-_*275nB*e5~D}z0Im>Gx~W&lusKj#0{^O;FhFTG zH1KTTDpkYFH%G>)&ecsa|s=dh-)^k7hB zTy0X{+J~}bA=@HOZ%(3jzk7$7Rm60V4Q~LnSHTabBR3>q+v*W6dcFDkxzMowx~XcV@WYG zFI>lNK25R-j9>BW;Tg)^b~D;JHXGv(x5Lr^k4REkK5Px>PsqjclH@x~@cRk=lB?k0 z-C$D(uBZ!KW!R?c0XRvMe8$!0^9AJJx-=1JWD_i!&1l?a*@rc81xIR|IervHkm2hhl>nn>4b$i( z|D1Y7w$@KsTDK6~a(^nRpF-AYp6Sr7Sq9%)!0LAlA+7Ge(M0zq9o1TIg_o%6;)#>l zh^c(NT5%VXE6f|>gkhS_3fE%9=Ou?l7z@{0fZd&gVCik2)xP>TWM3t3;L2(X5sCjF zxZ%32N(!ex=U(Epp#=R?jC`Z#dSg$c{Oy*#Po)I!y!`UGyq@_@y+Ckcw{KzmvhYAn zg9nV-eNaEiu=&`^x1@=cECbBPr{s0z{R!l%z`n|Q>aC0GjSU5CK0ipVo=@e5bFo0} zqENKnBf%f`r5@A)kBBuj8(orZM4vr_t-||l0oMRs^JC^N^j%KT=MxQ!y$bV@%i-zQ zIxQ)Elqe3d(&V>U-vekEHuT3Fj8_eO!t1L}fcrTw#T7k$Z_-LF?UjTd!j96YP_I}y z&%QADLnqKh%c>eGY!vUDm0M9&ARuYezBCk4N_TrKfw*oKNbho|SVhe6D%WYsNZN28 zG=yTtEa+t@P4?rjx>hf&&(o1dMS7!dzz&AbmfL`4>D6k{+>Vq(*EiwtBJ1Z;Wuq)h zWQYMJ-Xkqyv2@|BlR1y8ixf2g9Xo`lM#^2xP6cGkyr16{_$Zv!3i7UWj6{i7b6KF4 zS=OC+eXF+s8MS5Z<^DDX*`Dmi-hXn1b3y0-X&%fV5t;ZKCGzRY!3UKng1`-25$ z&PHhM9io_>tD5GjlJeKw8BPCMu+$wC(n4vi57nch^};Y^dX>_`oRT+G zKXct(}^e$jFg%?{UnG<1FFRA|DU{ z|7LYpi@t2o(Y|{2)L?5?d+F3DrrE_gB;6E3Z(GpdSj~tJPEzC^p77HV=RB zf!bIheGkGL7rI(LRI}&kUWO2jWcN$#drbMmZBR~i=LUk;vV0`hmc*9UCBcs_5Td39 zgZbac`KS=5i;jkih#AG!1$#iCdpE`6)n-5_qN$+hOp(isOUt%*5l&%VfsOLy1WV z8(yy^g4`&3>CgeTgn-D=y*?bu1t*y3v~~y)Mtz=6KlnxbF>O_%_bToM2o%>Xc4825 z_%PU-+-Bz!nViOzoSBZdH;rCOXd!bGk~tmA=)`CQ{P5;W5j-9+12Sl%5mD-~uLjB7 z?&DI=3}Dk?l9Nh`_Zz0p3G30W(XL?gji*#s-2$D=g;=uZ;+0wn{?Y9zP^r`=<6=)c z2JzSrVtXQSgtVxSS*>X#HqTFObnl?6LKLtS!KSVQ+ms2v z?1!ILbi`j9Bds+crA#is6`N*VRd-rF$qsVOZm`W!UQ=~suXW1+dtZlXaTkxanDL`#E?Ge-j9w?U2(b?TH)|e%MN2mHH^&C_AJ^$@h9~?}pj|@C-4xAD8vPoZ6l)k3GV_`_$`+D9iUtd>0 zp%yOHJk_s-Rj7I+ws=L2y@%Zy@9XGrV1vAiV9nx9!EA}ey76i;SM&^2cpW?pB}Cfp zvNuX)XKKtV?m_<+>h;$g6?#gU7+#7+hp=!UvFd^157G~TP@{50(5P=j#vGh$JL!z- zosv@cO(5pGJh!+{`-tC8FN>{?Pu$9?zuO70kXU9jnHij0eW=Oc_6*pwsTzRPbkGG6 zY{t#G4P7isfQlFS9R+`U>j3ft?)MpFP^f235^I)LM)Z`<&{rP}IijNr0t(%4v0F_H z$#nI!fMPY!2LU)e_J@#i;36PDr0uuzc6a=dG+Aqwy$ei1zijj=k;C7~;~*HX+*+

oPZse8^e089GkJq=5 z7PR(LN6|lHqwmu-RF*BK#MH}~mbmE`JM(>>`P#5ei*2uwRR``zl}{*sfE0s&Y)!6X z1|=SK`n=RmU(f`IH&=ump*OM?S8lVuKB7dNMEN3OcG*uK54U{|{H}(wLFuzgwzk z6L_Lgn%NXQZXF~Ui(k2Y?7Vm^EE$I}RI$ObbZUNu1#YpMv-VZ(>KA{T}0{MaECWG9zh5!wWj^P-|!%R7IppOa$(%X2M6<1ZO~A`c_#wg444E`22oVk z*DPrDQVs(bytY1onwo(<291zq3Lc}fBKFu5zhO2;Ca)6eh0_-1wd|_#x@=!@D^&1K z{7>--7XK6@3AxSQqICWXqjQB9+ijNNAjnjb5CRFMB)oHMBCp0zpBuXjDVr!;syNr! z-Gm=jS&u{1{GI|BKO&W&~S^;hgzwXWYWJJ$`wZA+<3c`Pe(tGwQ28krGV z4~0`fb(gc`ydlIa<%>&~d~>y}P3xH=&7Qrz!U$W_xoUwyms6V%`-jzqok|@~!16Ia z-<85$&Mw!YenmSobVrVi@!OKVeRgYOWumQ4!P2Q2I(xR&vhE5&Y9DbKBCA`tP~d-a zf<`S`9{7sSe~iyM>g6H2+=M3H6pZpnzyDBbp=O%fy;b+NaqH4xb@|hL1GSh_tehIHyk9)Br`>Z$j!#>@K z?c+?Y**t3`h90KoKshUyBtgA112~(i{ikLyY;(s(vjXvYL8Gjk#4Gi9mV{r)i9)hU zc1CLhr>ztbaLYKcmjR%*A2@?i%doOs z>Vx}-3piP}YO_q&c8O|3!0(YsV0FuT^u16^DfpG`KTu~{wwQ^HdNl#tFLftBUh*nJ zeYrXo{3A*Mf{*bXGN+veNiaAEhkojxAw55~>b9FX2l` zc1DUYv21P+Ttn-(Gc7JQ4M?9vmP@mVA$}`fF)Wg|mMa8No-KWD;c$^DPn%L|9W{9( z1~5zHd?Y=9k2kLjtfjR8SS9?ck-#lQ2H6vDk@*;9j$8SS((d7Mzmugf7Gf@!(?ti6 zQ>^KuPY1r;j@?WYdV!rEW6IP57E1#E@Tl~nLtdu(?In;AICF4LQytiH8Rrab%EXc9 zkb^jAo`4%=&WvzbMTmhkAvuuh^*aKgRFq>W2M^{^BQ!*#g^wiMa|ykSOsWO8F1m{s z`7%MRCqGMn11F|1*fA(1ctibddL~(c?!@*^8F1e2qVQfuU&Z)KU8Ni%5JyKXRSM5^hD{wV=UdVcOA4rD8e5d2#y>FyZEco9dI zm$&|rWt2gAulNF^LugdX1*cUTUYhSFv-ZX5qLl<(~bIt~iS5G~FNGO;Fv2*D*Gp z*?YeMg_Z-xnC@D~6Tyxm?o*ezN069c=6rCN1|Fi#d|c}I7;}cQ^WpC8qf^~Yvu7h7 zz$s-`P%k`Ksn3YpQ~)`W9LZ-mjYS}v=G+Hcj%_*)xY`e|(5Nzr)sx=!3~z{XF*gyq zUE2Z0h#jg+Lzfb_dcts)TK=*~AX}~ta9N@$UbPxrHM~B&=0*z(XGVf^zz(Q8MERUc zpWL5^W|L32w<%_j)G0mQsovM;?XZqFHk-1#AEH?|JQt;G3L!_J7TBHJCZDMMFV zt~Zd~!&VKBlk8A%bmNhO&Y`;;A@P)f@eo}k$sZjhX+M8@e;6IQZ)5tvfSZh_3IQ5MUbh;${p$_X(5$x1N!`;w8Ec?n3 zm@J~rTbgzjKEP6h$1&t=A~25c2>0|AU7@S;VV#EQ^`KGA{nQ+)n2eAJAtdr>7KGd? zW*s-2NFrkTdCjhXs&p zAW3qku><7;dfJUlvx=a~P3UfSgl({_jFJ_=ETm-BbhjmLkYC&w!xp5t8N8Ak5g5gM z(2|o;v(ar$sA(!Zix*wDf%=R2NlO*vLksDHBQ_On^)LGRyL;l#0WS200#=pWxOgvvX^!=v2D9s9R9c;hG#adFRa z7LZ2rU-!&|y_fd-uWC?;@3JzYnuY)PRKlhT3QOvmPU3v(d|s}22S>rrFJ-FL4u+AN z9l{egOx_dq>b<$(7m!_*>HrDNrQ<3vfk zU061yt4lvhSXRm#tk?%&b~=Q#K`ZgSkIm`OHA*IRv4oZdf1E+} zTxxZeeyXgM-5fvqJosriOqi#J;VOe;+s*KxEO#9BB7Cb%E7GCg77eeU5seWgLU!i- z@@L?d>&ocSZXAVtv-odrMr0;XM$q)-1{f?Cqv{gT#H$b$f3GSj{<92b9s`85TTWs8 z%D%D`T%tapv|oW%!jRr*o7xGQbj>=|kQE;h$Uv381k9o9<^aXhzTKQDFtQO>=2k9? zgFYN4;8sKCZNP{EB2(Sb4@&W|n;^Yk2UPbz1Jr%pyH=!e2@dz@djMbH$nNI_S`YrQ z3|<6j57}7>ztewI=gDm&x>=vrSQZ)ccH)u;F%V zbwkM#D1sJY+xV+Cu2}Q5`MT?LT=adf4--J_@RwBcETpsx%9i%+)iF=QsxLe>9-%H+ zm?Sna3E93FzrnChWz7)yqoCN*W4sC^;B_n1Ks|?woQ*YIb91ayO@GTIF958P{D>NE zLX4Aj6&Qm=UEfV+nGAjfb}r=uVueJcR@4c^chboey~aOa8q^~uM>VT)ccolCzcD<5 zsFkwiw?jIJG@{fv9R=)e8M7FNl5OY+=QDkVI5GY1gs=_=p7Gn$S#UBcOuIodHC(Fo z0$`TG*V}Jyw*PO5D?P0`VfQnz0-$@ z+TT8iCvJ*C_T+a`!>fHn_pEc=IE1KQte^#h6Z$X8Vyw`)nUR5vCTWWWYd#SVOCE7f z{rdmb7BJ*UecXc~g6zeNzHX1XUyrs^H+4&-$S@jEKVv;>#fWeC+idyokuUHeio10Q z>KXBb-CAhT=-o-9%qu%V*m@jd4b1XgcA7~Ep5%|o0N;8$G4%qf(8Xkuk~=BHQzq## zSOkT;2(}I!k;0?;k2Qs9sS2b2CSE;wi-9vu77;b=b?YW9WJn7;%UT}cMY?V5XYPl zxql?aX~%kV&%PD~YSSTlzs^|{;w$BpCijm@Jb)FAhkElu2jK5yFT0srAO&N#xDYoM zjDt|wE&yuyB=djsALD^v#l$8rJdA$g-_fXsgP9P`)+fB#ryrSYFfdGli;sHvEMy=1 z;|#Ulk)kpwaR^_bcMSbHy#_-7jco55)%q_BkyvBLCm}Xs(0yY$7xi<&vd_4T!c?cGsSNx+7-K8-Mr7vn^AaWEV{RG`0TP65Mb&fC0g>&L={6jh^{sHHM+nhYK=Ps%we5`oTQZCznnDi ze&^-KQpH0-OJN$w!S}0!7`1_tFL_q9679aRa_M_H&_5&}f_sm)9i`#pxp3vBpQ+L#gP<{?Z*~yv6J*)aJUJy!iTC zP-ONXW+GUHbvq_laJeV20CZbZ%>6M7P`2+xGoIkB4#P@^#Hs@jxrSoou%M!S--Dt? zkSq=w1pPzuMzSj9mFz=4J_uknp7Ev9YPU7*@h7WSuFF+(!|cd*innyr6S zn!Ogov%lyTa#&iCXZHs%0(G^({(FWGNy(Y5_YVOP_)Rbh+2!Imj!oK&0+)^fsD~m_ zg!Q;^K`vkLT`+qhI;)iPV-h9I2SX*TlRYpxz<%(ov8dBeWGbBEUv<%SX>7nW7bHDR zE|>86L7~VVI*HWYHuSNhOBb#Da z{~}wMTr(?Q;@TJ*f*it?> z?_D`|&HBFuP+$caiYP+dfkFH+O;$&4`3*=A(zS^EC;v-zDaU*cqHY6=G1??-zi(CK z9gL5MMdu%kmdk!*hrh|XbnH2a{t+BS)H#3K-kQO61b`9`KB`BLV5g{6)L!{*wnGZb zZo;nry<`pBK9$!S9R*omT%l&+saOSV4c2X1@xl5vQlA4K^W#<%Yl^u%#25+Cl<3?x zl?|!yIG{GV#YO?i1|iX!GcoBd%iNKgk2>JCVube3x8^{V41z*$G70F)V>%K+XT!2K zmEtF2d-0cWI{V0Z+r;6$YV*G4(s+BK(_rqsv=p%)!&RkkXepNe?7yQU-VFEOJLkv? zzX-w3FT(8r$qiwWxL<^kPg3=P4x$cX5Q+(${GQ?qX7fpOGtZT%RSSm@6Y>=89z zScGd%rEFa)OnfHF4W%At1@Wl<)Y<%AdvO=r*iof*EpU#&^=n^yZx6;%04~u8u~*(W zEW?#!>1$XrPVL;)hp>s1XlG=9n$p1rNgI)}YK~iC78E?!;{P#BaD&fBTetzmz?Scf zXx~a@U(7$(G-`(aR3Z$5Ir!d~x<7)k;}4IyFnalZ!O>`C8+EM*C?CoX$w&$BPC_T@ znV8mV1@-?zRR;LElPCMa^!iXs+!oQ(KEkd4#glESB^hcZ5ihgx>CT&R>2;~>D~bkhnAnXBl$g3mAUSJWdJRiA1Y<65m;A;le~;U@+cT9ZaU7!n%oH|v0o||?LbZhhFuzSt#H6j=#6#T z7H}2^?JV$plNCADBuQw!gw%hTiV3rEhQ7q4wf-2pTF_6%vg88&VE83+7-9Na&+1Hx z78HA%c_|V5MX=S;i_E^qg(Vv+E^#8BUe)AIJa{y($+WI&t&md8cX=K*wj`KJ=5^|E z>rV1xp}^h@k@Dz@a_8D8nUEpGC|XtNGk4yNUxde9-E73%QS*?%bh+V?n=Bz*&qyZH z3NYs8o0b1W%Dvf7{K1P~@}u`^=Z2pT)n#vj4Nfe5AocpNDBa>=^9K4izVo$0xPhU- zUBJ+*Z%FAhu!-H;P)MYvRMBS=0VMuQIKl;OA%%O65$3Z%hj|1V6lRTbp8l3WzoW7RguCjW%jS4TM;DL=uJA75yr#pqsXf)Tp9Km|({8CC;KdJ1;$A z7g!{QB;wgr zCN0mEs11KcS7{&n&zZ_?OlPL;SeZ$t%C+s!E6{~`%k}Y3 zU%Laz;wE!X1nK$U%S6qrEYEs*2C(V4R&8>CWHFOhOCJzu;dw2DlEnAXE|t?#MoU2= zTs+hsB!M-_cLaiR4a_SfKO^e9Pmz#e^I6lx;?fs-Gm%Zm*uBz{=Qi#}!0&*+ME2?-~+c4<;Vz>FL=;6anQ(J}>{8o6b_b$N& z!olw>Z&dz)c)}@r3@1{EN08x0%pcXgXV$<0VwG-IQxMUbOGvJW^{E6~#5;p8uVi~8q7JiuQ6!27Vc zf!g}E8iHb0H-}9=6y53A8`CORg0C@^8;Gom_SNP-)-YZ2x0iFZ z?T%F;a^}Yu8Z}P$NzN6=}US{eU3alf>v$sl?99r#uVRz3orh z)cLQKd=TZ@W0bm#-{?G_3vpYHSW2@g;Tmr+I7{8b=5lnqcNKzFovqsXj_8_TtmN~= zyx+^d_e$0=cdl^YHcQP@^{AO>H%*W46!KiqsqXp9Qybkmdct9vQeE+B2qx&mQcPIs zYyLY96$%!3&rR!E4eOrOBdL-CMg;H+g&6xkYNsoXsB8N8HTvdn+yYa8Dv>cHWc%R| z(`S!u_jk^LPnPOV1i08ufwAQSe`SEfw?7xd?u`c%kbAeoH>2DPPEbi`FDgy&C9-L8mnUS?KoUZ4o*!Dkv10?3wuO&mmCOS%F_aS7 z=q}s#3++NQ%a8P+z8wN^;hkbSIqFLI-3%%B<^Lcr7W?X7@4gOc@ z^sN=6c%PD~!22rwq`aQ!=m56*{MsLB4Uf{tjUjPKbCGU=&zJiNTO+)m5B*&8J~K~(KO+WJkIoqYyxFeO+t8T08xDTut(?gC z-DpKnP8aHEs-cy{8*1q5b~$d<+LsB3Iw})UQJ*}Su=#L8T<;ry!#RI2L_Eux*hjKS z%a5P2j*IkI96;(WaDcKJvS&@}(Bz2OldyKLT2ynmH!uHlv}$=UF~#Q=f?m^CUt?;H zbVr0Jg52$(N%^PbQxM5xV4nsEZ8|<9sS(3ukWWA+KaTlr88_2XnOoP_7CX%LTOoHp zB&glx=^XEs!~=8+t+`iU39H5$$R~_kG>I>9XM#ju=Y2SOC8h2$Ac+u5y|;dL zd`TtUK~gWa&>&&zfeyt1-genY?`EABN@=Z|67!9Z73z=9AEdp6UrZ2YX~*^aX7jjJ zE(w#lz6nG2z~p~`6?qPf`zycNfRp?0_8Uh{Mfi#tm2K5+NaaR@D!8w0=y{k^LtB&o zn9h~EkU{uDnQZf?67;uqCNLrsOn}K;npH>x9eq}?xwUk|tSHHr;`Hp+_B^4(+FV;Z zhZ-<{lVI@6{4(HuMvb9n1JhkSjKa$xi9Ikw@%z$T)O>CL=_7uAW|+<%Q*C$$0!Y#n z$hTZ?C-HTV?OBCv1|xhF*`BRB=YK6u&Uf^NaQUt-for-Qw2DWpl_XVie2910B4B>E z!-fSx`->374T_mNFel9UZ7uGy3M79J)R^A?-Rr-;SnQxyx6ZxQWbEC3x2B?y|5NgHVNC@GrOn`Vl%Mvp+| zX?rzvfV2{1MCUCOmaL~ZBKbD59)hn(bYZ7Y`X3^fEljnXnpIR7*fYeIke}PcL#u^8 z0`XBw(nEa1TZKWMjoHZS7j4#&Em;(38>au|_{hZ$GY%}Zq@MjeFy^1ndf^ehYmTjM zLR0xk^n~{C_JJL5s;tSkZ;Q+d{db*Dj`UW%h(0@C7e(Es@#%dZsRCu^(OKvj6t^#- z#437c?|+EYZv7aAR0U}kavO-eayhp*1Klc)BL<;(qqk4P2Y0LI6B~0Ca4p7W%j7!n z>WIRzG{%HsLIQXj)xR*4*#I17|~J9Bg%P;UG9P zn;{s6?Kaz{5!)N`JF)qfPL&;?9o7akyT!3a=ZmUgkoW)c|HKTu!dIaotxc!4wN0H_ zi7y^0k5^hOgp{M(qGo(yO>~mViZp41vwCvO4X;qJML0GAnC+-9OCIn~&C~wl7t62> zS9@UlIZ43>69>V+IcFyDDLaxN1th(N#2^^9s6(2~~0Wb$YsOe|;o zkYxAeyyiKvtXkxpkxhGfWPMF_S*&e0RL4QLt7a(T?(Apt&DG}N6gTfu-(mvQAGm;v z!@32U6z4_Y7`0mh4_g*wM52*`A`hFZ(Te3cE#ZG;%ypdhzc>aK-4Uj|Kt9fAVxw8$ zWTs@3t`K7`nd^y?jSbbVabiDE1XCTSo@Q^mXY$(UexC?_P4f?E>7F6E+V|zj>?ZR7v_`+VYCEb zNS;*Cw<*014KSgP6~iR8STeJ^N?dPuLn9@eA$=71qZq$c5!*&OSR029I3dbR-}#8< zA7$S?*;T$d!4t0SIS$MeyfUr_mls=54~Ob3ruI16YAkEo(d75V4BqhZF3g1)IwtK! zQ2+MGHv3xLN&maXISdZcO19%L>l^#-ax{bcXW|4VMQUJ0Pr=<63c%!$(_#{{z@M4S z3+$r)?L-bIfT&o>lLE@S?b{=mc8v6`@#v6&`7^8~t)DXY`*2rSmU8brKYQPak<$rG zvJ^r1;N~g$v5<?7cWU1v%-rvomZx6QKS}uKi<@Z0Sw=*&PKEv*TYrOj0RY30+cf;72 zg3~}&eMVnl@;cIn$^9i;KVup(1#>H#OaX=@E22K$G`AaKMb86cT!lx#`5>h9JHMcmWdz-05;Cl|Qf6+02_zA9pnT7*Zft~WsQLK4 zQk^OeeY2Ga2$PCo*lY=-m1z}SayY65=;2924fnDCA$qx3RLJZ`^L?f&S z4%%$VSWqe?F^G`4>*9v%dKS%euC=&iGvnDokr>w4o12#9qvh9p_(tk|GxKVRP9QJ) zRrAKyWl!2+HUS%EBL$guCP&iy>KAV4%6|J&vQ2~-uG6v=!6UBi7Wno%!4e zcc7D5<}$Uid{$f-FLxX=;92NuNzO{-2W04;2=)#hGXE&dU&b5by8{n z!V$C^FVC)4n=V@oO`k|ZlNHLj`g7X;j}m;m2 zYcx|t4_!S0L3Pb2-qMhp4WfwR9FY8XJ=m{z?|?ju2Y6=`fhUkj32GO7SA!-H2S2Fw zUkO7ns?H2+C%Z==)lH<=YyM z-_LbIxjB{4Rv_y;Q>J>RhGuGbJOY|dJfH#JgeoMEOG(Fzuy|9Q{*_-4{V-@`Knn;X z>kWyp?sap%2mq#;3V&PCzGs;hLS8&zT+x52Xjla{uWpWtrj@i=kD0fmi?_uL%>%sF)lcmIIzC{GWv3=tnV5*+aUg1uGGA*y%h?w4P3!{Vs<3ci9x@H9sO?tsE(?G-P)~;6W`LVz1R_bFTJ}_e}zRy)6dp%i4%(Z1dt0Z1CFx zzz zI@LNK#GSZJgPx5}W~@@#G&N)6Kyxx`!z}%E3)?DW#@% z2eMufd4?%#1i~hJsvwr3f-x4BRuBnx;tBxqCcECq2P*1z^1fYtZRYnsHYZ62sJySr zJxCe~IRmz#Kab>Ah*i3yL?bC?Th%s{l*e|{%i&0~PTAKc0^`e+VzC!{9Zh?BpG~Xn4PyrxIh#iMBJ@Ftlx=~?O+3}#M?vLcQMEH zRqWCH9je}#Y^JeXvK?*C2LgDn7e>pV`j=l^Q;2$#DKH&Gk|hlzU-8AoUONA1@tIQi z#Ct>Gbgw)K4m4r)1=jtTuCVCxNS&;XKraW*i$&5_1PM91L>JSg6du(p(>U?N{oX03 z6VuH0hn69;AIn7@1gevPbG{8~7^c=JifA6@P1xWfL}7@tyylA5@7S#5py--z-(N*a zRS_x1z+Qpa9j(1pJELBbKi&mXTFTL>TO*e@eYuHg+dpeac{&ZGk1FTca~btm9SA%t z`!Iy5GG%zj@~d!E&ZTo2s$57VgyM)vvdl{q)t^{$1a#Fm*I5&1p@Qw1W?48y+i*ia z@%BFG_L{m1L7Y`$Ls)^S4&t!We}7yhS{(0b45GfK8iN>s8p4+=B_I9A+mv&NG-Jr6 z^1VX+=el|nfpW1g-$Il0n_Y~)?mxyK2w9t>qM5l_Yyi?f_v*6jCO+Jw$A8_m+EgZPN#^)753+S!JD zqO+`y2@#9$iw2hJMkXDF0_(V>8|L-oNvQd)26SPup~J(RO_pKwjg11|}LV zZj#KA#Ww|mN_(I&NnBq51o2*6ws9M4mq^5Gl3BxP22oz@UT?$o8N97GCy-G za5;F}s-4(dwzSCtc(OneiS$Kbio#Enu;Ju2tHgSB^D(!;9~CQGB1Ygy)z!WctPHZJ zeqEV@O^WT1K#aNHu*UHn+FejYFM)0cLj9!jZa4gU>gt#oev--^#8CJrVI>{m&9w%6QMmlK3UzpCSlB2wv*flSAS>m57 z%E9&zI_{zxpdTPBm@#=tP(89c8u-Fr95$-jNeC?j8u9od1NS z_&`o!PPH>7DB)f{Y6@6D@8R<^DNFUEdBFzvUQ`K56@7=jOs_aE$Xu?^QwZKeX|Frc zSQbbJOJ6aW=(kU$`z^8EA*ONSF02VUFgQSNYkWagv(^h`^yBlq>4f5Iy@UY5J}7bJj#d z1u(E2M6OuH`{GCxQN+SG3RJ7X%UJ!_MBCaBIYx=Q}TC5ErOd{g4o7Y4IZyfM~0ToBn4dS;2*3e6B@acexBV6 zkxQO`mNd+0-%BkGfbAOE&vG94^B&u0sC)A3!1640<@pe?MlRkqaOWq=bYc!CH}cKWSZ@&7O-! z+BfedJd4o#KKZa0GZ{F%spVh?Tfpi5m{FU;J!hNzo{#^YQ4XkI2f3!_hM;vJ(quA8 zJixEk*dNJobatl8KsJ@BIXUy)G4}oZCl2XZIfkV}UnOPU_~4UEp;;ZkYDb*`bd7Sr ztc*m_BDmb6S4rEyCGypn{O%wC^-%>!fwO-JQv&;W&#-ZKV}s?1J1=H&-^BEOZcG+y zEaAXaDHHsmt#~%%G`B5GG)`?+GyeJsBNP=1Yq#kVKTq&-!Evp3#C*eoRz2$uZdl1Qpj! zt%{lk3$LHNS(`@xhLin_r2%W~|zysc_LKOOw z3A&YVW^m}m9=U;=6n}-_NPXpq#hyIxEEN`typKD2jLO6wSH%M(^}A-%ZK!*kHo>k# z_0HDtV1)_#g*P5tN4*4%omB69pCzB`{5sQprCS-lZ$3S(x)$=$hZzzTGFT0u%l=?iGUOXxn@E>vLcE`$OwUNR4<91|Wc_H<}tg=e*`u~x34hW*? zSP*U7wr$(puWj45ZQHhO+qP}n=KMWO7LiJB)jgGYzc2Xi2xfva&e!)bQuKKbyr0Sb zDqTPBf)}=r?vcmCQShzNk81`gZd@vBP`;pJ=m7{TGZcOXAaqb5=95#RT!xqPQR^<) zAOS!dfK%4w2GNRLP+UQ{LQ{$;Lqaq9($|J5CF*VB8eewxL|zf#0+ZIqCH_PNUAqtQ zK)2FuEN}V*ys8WIqE+T1K1Z{-GJgV;RK*~G#87`)Hve1zCFeC_ct}Uk30P}Tz*9>m|B(m6B1OC!3Mf|@5_{uZ_HB9bF;s&?SAZHura>CK+Dx)dSZybAG&=VkL z6QGH{(RQsPW}XW?piBWNnzCbZ6Mk5K?1L8e?N-Ud(mh=& zi>`B+m(KrJ|1)t%KikK>fg*oLj|}%oeSly3PWnK@_urUa?X2<0kxN|ZiTg{pRmh(4 z>}iM8BHR_A3Z|kQZ=DoqS`k4?b>+L)o2f-tGVRqcE6|{EFwCh0-En@GucDv#`nv^z zYfaCA?dSe=w_+EQL|)opJq(ga&tsv2E1P?1ZTc)u7(y@Am1C-fGR@iqMp6WMgzkTp z4=0?1>@8WoTw&W-RK;3tu_C_#Uukj71_NHvb0;&so#6MT|KE6hAE7CQ{ zLt(=t#A#hY`iHxqNe0%jzVuL0eP!zf!5dzEQ!!Fzz3_A7pi%9mjnxBHo7U^`Uro*z zZrVxb08up*>@QrEh5h#^af^AE#hbdI+SX=ajs^~%26RQ5n}u)Z#HIuEHZSHLVy&5v z{~0t$)i%?-E6No@-;W)SV-q^@rUoS=YCq_4bk3MvKHYp?gzU9FP@<#mP@;Q=#~3YO zp#hrC8W|L&5&BpK<|r&j(UyCw!=Q-DNAT~fRhcH|sAi{>9U1~ms()PQ;T{+bVk`wapDqodlXv1+_@eZQqq!!rSB%ZH??lM0 z3t;!{#7YfiO*WT@Z15e7VS{aIa#GsDX}=2NfUCLWjh+oDZ012}mPgU1axkYz^}%K> zy}{veB%70$Bfw6}pxc0IFSJ?C6S=VY?HWA$rr=L#-s@2RxbgbMQ1A2brLB#aWZLSw z)U)i?sM8N>*5#m7{;@%?*eq|G=OjC!J4v4NmGKcO$!wx<5@jIGt<2E3n*7$^R`EKr zQJw7c^x*z?qN(29z^FG~&t_lIt!_?X_7`vu0{k6O>6<`~p zIn{QQM)d={Ybu3c-vX713@9iRq*5H_>0b}P*Pv3fAE>M6B;F@*~DN%SBKgE z$d8~pfbx->qL=C7vnXGbSxO8Gmfjidm?)hUD?-i4wC~!8q(|->fG!NHlNJ70a_(ip{LrI3gxzblca@)6{SkOPakSH0XFe!Z~8omvh$-pV(nX_5rV_GLiM8!Gjhofn$*M-ri{ zR4vpQy}seU`CKs~&EA?T$!eJd9P|C1*dZFqcipZLLMVr6k&Gku5Eh@SCCx>7+rDS6jDu1+XXPJE;A1Xtli(Ab};=2ec05!Hls@3h)KJD z^jikz02q$NMMN$7%b4O%pwn#LJ~%>})_Qg?1?Q%ivs0uDjHz?LU(FqW#zg*uE`&Qa zN6Pk^pdAs_yZRz0Hu$(xD*7R=8ru4HDm&Vy#~ho#lRtCCFIIXJBtGE>Sv^Oc&~pTr zqugL{{o9_8>x@}7X^WVt?vB}2;Zu31w!g^*kj#ReY$xAgIeAt5aWi1Ck{E7l^BR9W zjb^{coE8!1H8MAcV7qHsA^Zq8%0M_jE~+yc0jvy@=hai9(Y~7ZWlHuRvR9~Ro@nD# zvmK@lN3+&-ni4+wd? z1FWHLGw+Ny5htxNrLA3g5RZsw#s0i)=wjxL~4gt!~tpGHle~&-*Fj;3$#5s;M*>CW``$_uw)B}%$ zeidUCu(u6xh_nC+7&(Odl|r9<%N48-Ha0Y^@@BuZW6ekJ z%X@hT@vrr0!>m2X(lL-77`apJsF8?>)q)gY_ZUU#FVCt^5%l`|eK4Nr48BH3p6n*b z24ZfIEL1_D^SWP+uyS-D3EV%rdXw+=H={pt(bE;QEzml~$N0Da6HzazDWHysvfcON zioyBmM~nHAKf4lg2wcBf0eZUO#QY*BX;oap=NWPIZy=}3$I1-=J%3IICziTR^R$k} z3F7&UYU6mW;aa9Vd`ukWylKcgR@}dvPIy>RJk{Qe%BR(7B25nHiXBk;z3u0F^|=Cc z&G3vuoNH#592C=xA~7+_q$DFc=|T^4WlX)pNd+(ZG6$m$^W!U5LD1Ac4Q2jZo#)Kg zTB%E1Sxx-T9tJU?O`nF%Ki?J$b9JTr*=rfiOYi)&#}7E$Dvbw2qt>}!AkM`WcDSjh zVqQ(+C(~S4$`YW)>QUJ8}G(VuV77S|LO^n%v2p~Ow8ATu>J2r4+efFGYN zQfFqe{UI6J2kx((JYqtV^8W}IqAMA8&OD2;I!SKhP9Amw=+EmXZrLdR0jFMakpysW zN1ed>uu5UX9O@upqqeidG|TugGH9u!6!13G&!y}r73F||qY|FxsUG1F#N ztyvPuD5_t4`oKa0WDl+@gmJ!H!$@TQ#Kryw+;}vF_m+@l#SCsb`lj?r^r}4Q%J4?$0aV!n+Br) zD)`d`@oceVEi@k$+GtO2>rYD~K+&!0ko*ur8y?_Rl@Tu_aX{B5HR7DDhxQiJ2#E<5X<*&gK%wzk&AP!62-MW=>@M{XZ5}wnGCptay2hOk zN_qR=NtCSEpd=ubTU}_x_;`XB7f3YG`kxiHhQ(^gVy9)8t>8-ig5lp~Jdc<)e%CI! zbIymV5v#QXwt!EeqE9w9G2xQ<2N$05O3&b_P2K#p7*$-*-vDsFT*hOz4@t&M?Ea3! z!y_Y6HsQ&@Bpp+gYS2-?7am3MeJF7dyZL#$Ayymt z`?*onXqdIY{V9)`qzj4uvkT6&CC0MsTeUaRJ$LZozsH0_Y=Ov=!(dJkCn$SB*L zAglcS94v2e0x1C{UR^`;A^MLRU&Hf;#;oFujNiiDr|2!40(n)(mx4&* zv~#mZSmyB$7lklAv6*$5yAp>9kp)%%6TMQUsSb|*7(?>ztQi1-UBi1qQXeDR#m_!^ z_`t+!t7PQbkUs^bki)6L9V)qIygMi0zzTpRd;Jw;+a4;oxwKi0RJ5P4M}OB+T3Jfj zGhX%)z!IU5mvIVKu}muC7||sw>DOL|yt~vYDk1S)%J_T#WJ$=n?guU1)BYD(`@u{F zMPIUn;h^#gL)`3bQN3oVlEP{1Z0*NPaXWpwZYX>jQ$lK{ownYRuCkJEbROi|Zkeh4 z4|61OssP`J>)3rE#;hrPjCbZh^9y9x%{Kn4**vYw=Kj7r(F3<1eHhu&HUh9$wS$H^ zHKwDGOOR`VKE7l;{Tp?oID;KgPZd0QP%+@~m=NB=QNU6E4`>ejlx;hDBeH_Vrx?=o ztK3y}MOAw=xV2rp+2h0R539V1gB;bXm_RBA_&OqoS@oZt-1VP=Lz2 zw4+yKvoK6&CY)F~Yd`hMM|i-8!g$LJ1eLSBYNl(~;D9eP=s_qgw~U&A)?{!grYqIw z^{cju#RD`o!3T6N&%#|4+*_X41-#BK*}f&;&S*G#B#f z2p9?oJ%-Dvj!krn+Cj2~Uc`O>lR5m?j;=Af8zw)$bnK-e>URcU0&p`>;D7yGoz_%F z#3#^Nt_v_s1`?8(DqEVuq-(EQT&mGsNz^T*p(pJ?*>Mxd_}OD_klCpyh+tBRMr3Z3 z*a{pIFFMfOIXD_Qr5IBi@GHJ7E{0h4;Lm?AqCPCS_ud%X#>@8mK#LTYP}4ug^VC5K zf2^XS%Y8NvAofAVTo3U{q5Y+Rr0Kl=QZYtKK2|Q~mYRG1$9Tf5jj5Ytd;w3GqfbtB zKwobCGnpew29m0kRmX#oq$<6iOLu37GR|BSj1aAW#WdcqscJ_w8&%3F^7GkIEr$u) zw2!S8HW%IpS6p!|;DqG6)bwBL$m!olSEy{O!ldrLK$LnH ztZZ;(lwRLF6w)rAT__uTejiOY+>k8TXgN0?0CBrO?oicK!`aLmN@}oS3CI|M$PA`u z2-;J*Xz&!++i62mmcrY@LM?NKacVq+s{GS`3H&)ux|(}6c@`4FAbhRYI272qUmbaSpFA+b zh?M!LOCMPtI6-br0E#|7{qBqVVvq!999ReX-Mgf zd4A6#JT9CerT@(7fVodn7Nvbvz<{BQ1#fZV0fu9Od^jOeV@b$PwUf&iljO5AwNp-# z@vj9g$IG9K)c2@RvavsA90i^P?+r#F#KXgK{zyJ85eQW{YpSL6`S;*;X_{rXOZqze z7cU1n9Ir@%Jo^=}DDA4npl{J?+26vDvIY+~yWnorqz=%M+&i>&#<`H~HB`&r1Y%|@ zz-AC?WcsmY9E>ABWy%5-K$FsS(=GXG&1YfpT&@2I;t6yhctiQO%9F2WhW zo?2x)_$pU;*R=JgtzJZs(#(dfS&kmq&oFz8ue?AcPwpd6Re<3jhAX!6cx{zcX31S=0D(eKcjJ4K*rbDHCKN)<&8!gStgIzE zmY49pB*9SMTh0N3@7eOE*Wx6}JJLYaO}@|DM>WAcdHdZds+OIx#<}JxD0gSSb!Ht(4R@h9AS%_jm?7 zT$|NP+|S^%0hrTdCo)QvT%+xLv23vvi5hum?${3^qm(cDyTodi9y%NdOUdFVK#zi9 zUAP2KRIvd`_4`6CG^L(LhaQlQqsPF}lK$K*&Ip6K2GA^?%MIM=+TzLTDOA;Pfjrh| zu69OlJ63vf?cW^)=ONHjkiOVvya-UR)l@Zf0j|~l>v5V{R=wg!dXqfev+`ILBB)yu zANdL2!#Nli&qtH^oxrs_oyWEo-I zT&N+K0cvkEcX7cOr`{O98fa~WgkT2r#E>Jv97n1Q4VpVbyj12qA6RcnEhUQCqE;(aBnO5-t~I#|P3 z%+(h*V7eO#VLQq{Is48yp5o6w9r&o}vu-0X+uA#};Wid_)gvIvr#G{4+EtjuI{jz7 z0kO0lAiN7;;ca|MB1*V(#~WaG>g@T_gMHu-wX+*BOv1oXE2c`lz~745PCPw+`Uc>U zrs8tHSPd`Nan9j#^aEV?RPg(a8}71RfT(--csM{;U)nTM3Od?F&5KD3oymOrtH&f7 z4wFk#)=Eyc&xpDhZ3jHh<5N^ZUu-^$pa!^Qt%Jeh_fUafV9b`o+`~|8#_w&xV?y0y`#+=i|2&gsLk~Fc zpvOMl@Do{@<5^4y1qX%0PDK?-z%^}5LamfFK4|s6YA=nEZOMR7AZSr6l;?HHmC!=M zCW1TbUv58T_uoL`4~%&A;mQ%l)!-YBEwSN}m@?AEh5M$yfSnus zt&88a;qT+4ckYN-;gXwHc5S!9b>=ilZ-qSoy!Nr8Kq@Z=_4ILR+b1=s;}nwd1rx=p zFk&%EbxSwVyqJ33nW#^(2n2d0>{RX!{6u&V=%3!L&CQJ!hxj01KN7}_&T zy>4sFeLokvSBq=f5KyRy1WjKw_O$2G;~`sD`>p7!fOtH!Ma0$2PO7m6f(%w$zNm3O z^Es8CpXuNRjg7LEPWQ*T1cF%&tB)GMY7ceZYTwbJr&CKZ{Y=|Z));9=w{x|6L#1>5 zq4!8l1rCHpXlrFKP6b_)2aNnZ_kU(VEuYQbXhFxI0z?N%U!OLJ;9{?moQ z=Ctgs6``!(q^(H{(tU~>ipWd&lQ;_fU-89i2BU1D9T6O*@g4)Z=!8W9G!c*ifVZF} z8VP7eQM5`7fMX|xc0zo1|4zv1j5hOyszZ-Gs`u~PTp%*~vHOv+7277Mf$5+hP@3*(bvJp>F86(X8Ioy7k zdR!<~hG?@QyCX^n6F5yXtT<$wm%?NjYu>aPA5Z*O|G)`}neRe6Qjm z$fZMaVe967K(mmMC4<=kr*>hK`oL7Vk9)D!+u?|yIXC6~1{Tz=XpMT7j#;ZSrPFuAHGOKzt9z9ua=x!hiYwJ)=!& zc;Wnpm-})mhNVZ0!#2Q>>)0Rk?}kxXRGt9jG`+>#zyh}9&nb`lV|!6tzURBP_nN}Z zxUw+5tjSAo#?+g@@s^I}i78dwLShM>WjrUO*9QK~E}Ru)jj!?UCuDM|Fi$Dt-D~_+ z;^e>Wg6|3rI7lv-Ewcz%8XTa9cb+$Da@ti{s2>|&2D}A7vp94ir$k|gP#Rb-UY;~+ z8;;HwLJffWaTpTw^Yl6zhcMv!r<#82j*RK+iz5tk{fjsM+<%mlyG*`_e;GWIp%1G7 za}Hn~5Z3^nfP*Fn%*a&2JghC)32A`q{Eu>F?nCo6=(Ya77U;>~Uko~gcW`!$qv5rs zcaTpsy^NyrvFpl8xcrD)S{12XMu#EFWE?Np$XA(C{0Mur2JpuN#uN5$Pi4W*C}4$2 z3kZ0W?66oCWWe|70#{^^#})=qk`RgrS_Vo(&lB<3!e%ASmI5#m;B>Mcxr&wo%FqX_EM*<`mapvCSJwOCJCd<=e2A;9OEyp8Y*MiBxuB%u??$G3+>ycNt zUBLqQXhEykLk8{Z!3k@Kx-oC5%FXsCW}zuy;5c$YwxINdOlT(&SM~}Y5zg>jZt_OM zcJ|N50g4y&`-O7@vYxz|GcHP^8x2v-jl;+ClcxAt3%dreo?ll)8d|PuXVpxtuZ(aR zxK7X0=y&xn8wowny#P*>I9a&-C+be*4{Ia3Ebi`%yT3;wjo;2iU!(o~A1eAR(*qS> z8DlT5Y&fwXx~$f{%V&iztCMiNwe!-LbIfIcjr&6z<@acc%kXn#^DHK~kWC zY}#@h+%==pw=^Ce^fGA>Y4d+Ru3mYe)0Z~%em}_Koze+swRk3+OX;R%eh*YV8{{L7 zTS(vW*ixUiM4}^4wyC3|iF@GAA$ch;U-pvrY+ujn(yd>ju|X$>lh+yYON;FOAiHo7 z9Y6ADFT^uAeBrK&>-bS>P-HN59I^tdaVSG4TO`8ix+|iolnkr@chuTLpG`{7#!iM_ z>^%|1*S17SPUd@}5%0u@O7;ofWhGy2Y6ak^+W^lu|?otIb$kZUk3O!a&D?6r&c zAV!_~AqlhVhTkm+@@?Hv4RS)Z4-A?-4q$M6K1ePR_*XtjU9dg%;j^&4V|{ZU>E9^Z z4Wh08Q~m)=*UPjKI@GIJxVN|t%_!Bo`!A=(L~0{mEML65O|ui$wqNuc(62MQYbJN{ zyezI?%veMPpVFb}kN;(YW=QTw{@;GZCme9$n`Q5bwn&;buM-ZTn`~dX|1u* z)+hrQaSWL%2h<@SAUW%e>YlMaVnw50XG*^EglN#-ctNx8bf?bkE&-a@wGLyx<|M`w zKY_QpECkQXgy~3PQ|xx06+Vw+G)12M%1A9FB497%sm@O0jqs_D#7#%9uMp9nQn{^% zf6{01;frbU1)J5DoYOj5wOUc1d&a|PA8rSaBDgl69GrQwx>9zNN08RFh~V|DN?fRQ zyyKOXb_V`Y%H&$KT;UR@t8Y*41NfDVeM{`YY4Vlzd4c{Xot{85zESj8fGDF)EMkb} z-ezWPxcO&t{N%p6y&DmngHhlmdzh!3pFryJHQxCrEtxxwU;U-aGmV6^0iF) zg7K+V5QVPXNv(3&4V2RP{p-iW8MfS4gd*Bg+n7L(4I$f#BP>W z69%n>nS(%#vTH1KzlxfznQz<(LyARlSHD%7)_-ZBP7^P8alL&IqVZ*?I1sB6h2O(m z^a1z)hbJAG-6UD(bbuUYG(ZW-!g|wdXEGQ<2W?J7{9 zyO3a=?+9gm%{uf6?ukZlo!)58j*+4mZJ4|Qf!{eJO(eZrImibB3C{#LU5A_Gj?=?L zd)>J8-pkM*eL%KpH#BrdmN`jcWKPdrinnLcvL?e|XGqF^VU+Zg=9ektu(Hb@>+wY4&wck9xVmcGeftgm*ARyl9a!lM1+3?Kz5|nwL)B zkK}Od;SFQ^U`m!s3c6mGKVb2>ojL%Yj#~~Ja2F3HDxF7Kmbn2>_!a_)c|^KSonF_#5_!TCr@JYN4Tqp{%Ow-uWH!3C4p40<;~ zm2Awv#U9$p2VfrhNiz|n-Hq@VDQ3B7QFED%7sxnn)j9f>t>DnyMwhU1HAS0 zUB{}Td26@!qd@c@LqrMU|J8p_QA#d3mgIDsPP`F`i-6t7!IKgzu*=P+v2h5L(sBO0 z-FdLM_dU=5qFdZAI0IcPVclQTo?Ur|KQ&%tvHuEG2RZl6*_v^5e$c(6zR2{b!74rK z$c}G8`r_8+fe65l+T_mJ3$vwpkpp20)&ZLX1buB1NlPjf8@8O7A%ri2CeCc}YV_z| zD3F`IbKtMuR^0!qAt=qVbL?i#<+jRyz~|FY0a7VM9q8TTU8YqLy)W;oDifz+et@ni zQvF`eWF+Zj+@$?DD^J;vT6_ZzA#_g{IHxb^mKmh{-LxUUGWNtxRi5z|>c#bofM7db zS>yc zacRe37FX84|5i=)flXQy#8KH2YTrY4aihti|gaoujQ&l2zz3}>xc9iw7=YgfbGTU-2arll-JS#1Np9jkcXJ=bAY>D$`&oI z!^7&lM;yz`^;C(H^wR0x@3POxGr1tjmG2Edrr#=7eKeAxLH8SuOfqi|+V4wsV3kQ1 zSE*-^^MmSjqa2bda8(c&C|>O8E7I)6^p^*;FyBZ`Z&PV)Or;Bs&3Z<(>(L%-Xf2;9 zYq-+AP2yXAUmHtk*Mm=zk}6V7jD!PW-!NfjS8drs7#E60*w+1C6xYMzieu5r+|vr? zpzc-K>2QAKB z-~bB4zDk zUF;NpJ0SAS_e~8VOA-s2rDz55C25KBL$abD>$-FkTAQo;W^FM%={T=5P0XOHnN_%p znf2Y#Tm~njH4ZfL?6&I$3kmXTJ7=L|c1LDOs~Nj^SSl5u!@rwWxz!ri2YEww1dvyJ z3oRa-?vTQy8*oI=%DBcl;C->N3Fm&1BAQgGpo{>xSOaA7j%=x(DEx{ko6d(OJ$bIXDz9S_#G0>Rfi((t8fslfeW*1X*f zgto(zIjZ!?u)QQKISohwciDhz2nEi{Vsu02%)0hy~GJ!$^V@0L*QWxz>(%fZF<4$&c#W+X_i(Azml|Z)T?@rpPtZT!0 zI}prseX94{8Zqess2Bjwrzwxj${=?3g!a@2gyXGJNpMHb1+kzitJ$8tQ$tO`fJ>HX}Xa zOp@%b0P=u^m(rGLhdN2m8MszBM!oX~)6%$o$-f32ub?dDh4~+bqe2$mW9fB#10C+> zSq=8vvCssA=rOGbO~;NYNRI3Wd@1MMO2XcLE6{iSdZNWL#9zKfHG^JfrNC7fIqE9A zW_4l(CtPj5iHt6WMj^E+EMQuma(X(+cK7rz>IrOiYZI5iZ2U630svr~!sIuU6Xi>? z^s`HdQDrRj6J_-|(zOREy-VgA_!Mio!&uyGusXM*$TL~x1LT6_u83#l^EUbnmQ}#;FrY|Upa+U}-J}!TVDu-7_lRf{B1Tjaf zOUa0M`6Aqy3R5lR5+!88zOUz;NQm?z9%n^VbT!)cUFm^^K%%8$caEdhGbkYO93a~x z`L+u7FERCfOpe?}ngG}LdvvbFSmc^uxRj=Qe-SU6ea}_;=i2hUS1x8nl1sjrpzD#B zAFi0jnmmw%fmh2IEWF;oKVuI;C=Z;M%iP&c4@&q(HXQ=_**ezFVWKc2=swGz z`f^^)0oI`j8q!(XERRZ)-TSj(!|t2?Ys0X+FDLR4igXd?5RTfRr}!+IMpZl~Bn-^; zdq#7o6kiG7OX(1!j^h(>Jon6_%M(O9Hg%@F3?#p0(nQQdddqB{G#|~TVH{0Ot&t3 z!7J(YI|^iCf-aB&e9Orwhj?fT(&OjVK+A~4!W8Ugvptqi)@N-ei?!7a2rTAqU*|wF z(9krs8iO4}pJT5B2jqP#oK!6L&Z(o#r9LBs&Iu{$^tfWuN1f!)kowTAa=&m zk#9^clUugr*74#q$}Q@--u9<3p|?m2T#bSStQ;M=gX&{=o@#hyAIXSOlC07o(JQ-} zTX5~9Ptqs9@E)3dXpYhDoap=W*?;?`@XvnP|F8b9Yk!Q0AX(Kvr&K-PM@Dbj1f6sY zYSdzX|IKmv;;n#hc25sl>K*{@4^f2hCk}8gU*;9N{bLeFEGwf5ZZ9xk;XSQqygiev zk~^tz1l4~oZaG_FVV3A;6kh>L{E7(SQVBAdezC7Skx)I9g!F(=6a_&5{}(?khocN+ zV^M!)UZA@nrbvY@>1t28`KgX>5s-$fz~x_!z3;Y5_nS>g9#K(`jz?vZA_7RTa6m?9 z+2Z1mSGVt(k|(w5Pjxt0dbV50hZXG-OeE#RL%+LSQ(7gv_^-ICY=73({I!xb6QuH_ z0tDJOm=7VLCP?6H=(}UH zB^%4llz94}`ZIB>zqHl!;EwwmR(GMA4W3Iqd?VHmviAejZI=*(Rgpad7%s2F-Xozy zY#hbatY22~f@k8FfD|d~-3#v6Sxox&KNeVvbOH#sXE3lG*<#K%WwPzW`+!=sb(XlW zvGL(O7}>ixrOANr`4{xTs53a%T%GF zOgNyd=yS<4RFFWmX{XsQaSkOM3;kS@KKQ=`PAf3&`dVIza3LI)6Z;T+LY z@I&IC5H1&bM7_Kz$H|tjWm2-;sIi7N$&=$il=BXErOA03&j2kX1%c&nlZ`osJcqbe z=L+$4Ofh3dT@z%++H$Qwf5vt-YFnDm-phSAOmcCYv`ar`m64~9sQ}VqB71n2BZN1M zLFnCsa!tC|T`1#Qfe0aq1dc|u7o}IyIH!N|tgpuG?a|c7Lw|_dV9D>zmB&r3B8(at ziU7_NPZHw+QNw>K1JBts5}vPageMt>hSo6sN3Ry@Log{S$Czkr?N+U-+NQW;!8X-( zEKt{w8evgMKIcLCOwuBiKZZ=r(i zDaa+e*5tM#%kNL10e2%$LJx1wuReafs%Uo~CM_1;ud;&PB*k}O`UI4`u!g=@wS?EU zEV~WzDIweSZBGR-ba}XW5Td)}Wq~`LoPAGN;v}Ia9+s>giuoTe|A_VbKZG)oBjBU3 zz6E50g>Ly;%#E~UPx|VU=w!;s3G4KA%^U01KAL;L3K4_7P*{AB2k)G3k-*B{1Z#sg zg)YbALIiJQ^zl2Fh4aJ-=7YSer7x*}1y4b%`?Q#3)-*W9I#3K^u6qrrZV#mrlc~%| zVR#20w^BliIWUMUqk>(JomvN2Gf#!jfI~#rGf0}L%AhYUlmRLYFZpV9;!yJI38*_w z-;Aw_*=hT;*h>b5Mne)py4=?InYU7gKm9H9AxNyOttlp#Q@Uc8$?0o0CO+>*j8C74 z5h=yHACO~M$^1Us958jvk82O8aZAkjDo_^EAwvZB4M;&{_4AB0Fd%eLCA7hraHLwz zxLFFREcsr{D0M}!y&?8C``g_C2mJgQI0epEkNbwyQ<1mQ7sSpH6-SNptv3A%i5nMi z67*n4!T)O@es+)SL+^?sA|f*@O!CDD-v_C1LMQ*~af%72fIdF(#`dYWd}Z~eOUtjT zpmH;q*YL*o93GPt&h*MUlSCr49lIv#dYaP&T~Z$pp78vMIvm{r=a_;b9&crly9f(< z97;QX?52gkb-Z9(V>aYp3`Io5U>OI0bOzQ>j0!kXhFza+Ri7p2|o4)hl z1zX5(U7OW-ytMg-v@ffr0SUAX)(uEj)4FCeD57*yleW0&)EQVqc5=ysgpR$x5l9l6 ziqXp~r&UpQ8KZtmgk$L5n%C{nuQA8Zd+}MJAGbHI-x}T40e5PyP%y^!f5ujEJsMZA zs=$?9ep`UPgVC3j!JU$F7!xpoIqvWp5gGMn8pP{yfOHODg@hsTaXZ<_X$T|bg05-( z#;Dcl?F991jwd*_m!faeo^OYR;;^|KV~cvWqpDCdv=>?V42J{9`n9}vfRbCEYhj)N z*nzc@eKRkadlrYM-IBGyS@-$-ogNb-ZzPGXuDRZvJt+|a=ijB=vZ0J;=!(-hl5D|m zc&mzbcdqC3h@G%xG?FPiRk+zxB@dQ;cHP}H)9%4sUA+8#*f2!CD1&v#BSQ@7vZKjt5DzM~90l~~(mJeV0RBjSh9Kvaf7 zAimMkc`}Z+K`s6Bz0T)QZFI6MJsyH{QTZ7aNxt%qA8LDk}?d^X#^D5v=nar zn@2(re_%7tIdFgl)we~}dt>O?)Rjj5OV;Dge<-3(O~WZ6(}wUFlK{I#C)_ZT$cw!j zFBj2!k>H{3A|lJ$N8XJ)c$KK%y=e@Z|G=eM#7P$fkn}PYz{F~_PIozr(cge?GKPU9 zt-F%J7ae}*cp*sN>bAMyFt5}PDiTp`v_D?aFrHck-~meaOztvD?nz|NZ2>LBdGE%vMfU9 zSgQl&m>Qe2YnFrp_q~p@$XJIh3Vtc3)_u5{PRy*iIyqE6MPu1YhDc33Tzs*L zNYy5w!F~vk63j;MR-vN=c(`mGDz2{1->haB?ORWKORbs8Djv95n0Q<58%h`0DSDRT znoZ0Zv%{1@3h&g1U-MFK`M1H+-nR`v=%i%P4(olK#)ZS_{mIB6Ur+zJbw^1kBiPPq zQnLyw4Jn@1{Rjh(_<5>f(0V*c@&=;Sp^p|HKhes%@$ z@XV(;Hl_k! zHnjfuN?=N*-sQBt$4tR1%_}x8!l<3TT|ss98vj1r21Q(1wZesqEG0Ive)F3M%Py#O z&U=ZQe+vcI#8|YZ^^2VYchS&iE^uOrKt7<8IOmWJKbPM%@gBNX9NeA@*UDGX^C$#M zJuSYx`6LQ;8(t9$-`3Fw&8I2m!|)s#(bgR(e6QcV=pp;J06Zhs2f(@R)$1nqBD-O` z+tle}rrQP*IBP=YJ3m-(P2nSgC=*sSV0W!3b#nm`^&^ zKP@q3)Icn8#PLR1OpWX!krL-H#QlpILGhnFrxjf-2&CN!$qF6DsIb4Rn`snNts;kG zKsHuLH`Zos%`Y3=RgCXhtJL(NAplz=4B;l_wggP>7xRKvyAyipb`@-lu;^|=a)gOc zG%7gY6n!v>(M^@TH2SL_48}!r?{AtF1mdbU!+ju4+{i!SkUPv@-J4P9sf}FhE^E&F zR)Ja09qtEj8osGWRR@vMc^9*u>&#`zxv`U1u8!5P3i~#=XsWHHs$Bo4oTE=m?kFn`}Atz)Oh>y+&L)s(%Q zyPt+2Msc?3ib!$w5{YyC@s!O-NuhH?SL?zXgNf%`zrlBcx}?McYfR(+CIWG=TK(_` zR;GWXG06WBi#iy?5VQ9Isn&ToX95j|TPcnpTugS>bU({+&9NtejA_a9WwJ#AhanGKmaLQ1~RGA1%MjmOWsHSJj;hmwzwuK<{Gh|NKE}2yNU6 z&K*5pYcKLuSer3>h|`s2l;6OwvqqrHI_I~5oa*9_4^KlEYRr&1%w-1G7p_S0Nd$Y^ zNN!ayx&oeLAUU){&4d3(+Bs-fqAXA_wkNi2+xCfV+cqb*ZBK05wr$(VyZ`XMqE>hB zuEIstaEliqr?aB)6Oo}9EzqmoY56BDVL-vMebhrL_h&xpAC0U-iEqAeE1?YhK#&YL z#o|ONH+H2W7-*R{EsFyA(!2uZvNu2OJGO?XaAP-)K!jiLMF+o3 zKmPI{E!c8L>=Oq_Ep`UICT2g58YhQ6tANgv_~D`t{xkW#cdF~!o?O#7#igW?a&NWp z;*4k|k1P~AD`9GAKoVQw-CgudVh&VuRXkcaK(%*j@Cbh$u4bRS1QPgq>`R%QyPIbGq{mK}RpW@W zGs}jqQ$=-#=X`wz7pgkR)Qu}jjUQKQ9*q68cki9)gE(e#90}SRtO}0zdC~K=Nzx^c zYS4~fn6q6UW^d5K-tG@d0Lwd7r&$0OB+=HfHjPElighFe*m@wjR;3Nf`np78uyI$| z^g>Z{oA=l9e|6x4%vBMpmYJsk8wes9G#@*6y4 zO&kx-moQjDg^v=iArG$z4Va1 zs1wbwV`b1;p8yiyvGcIb!n1Uu7$>N%l^R~LuGWK?w|>~JG9a%1hR0Q<5_W4{pSK{P zDAl+LmMW=N!XT=2e!zQFiEeoaq3LO+agYMWw}4k!sipAGOd+x_83fP$Z{A-SP$zMA zfwLB}Fyqa!1^XHslDrX{_NWnSts^WDR%d^z7Eit-pK)Dr3uRH?%w$+<=8IezqB#)5 zj*(QH3qw%`kLucsa;&@|A)U4**T2fMN0d4gdn)wXIoM)sQHJ0U%{2*@q|@x)(=t)o z=uVGn(F)WY1{)=%&fW22cX_f~`Fv3J>UPR^$#VNoB0*P`8~8Wxr*B(PS3)T2EGJrfw?BA+C!jVK=?1V|cwlBw3hn2#c3&PIg9=Pr6*UL+!=fO3e^hSkD4#id zhhZy@HTlegM5y@NcV#rUmNvUJ1hW=cyWSVzMkxTg=IDO5IryI~-wv}5uWOtqpz}-v z;l<8=D&>z{zNiuAiV<&_vNjAdiGFNCl^>4FgoorcfPZfjdzF(>oSCf{lnU$jZzYU+b2x&Dwe0T56rBFZgG=NN@YKu6GDBJv zlW7QS3;VBt>rn~PiI`u!#^JxT z_9y~CIXl`}svvb>R#qRk=8x|mugvI|=m+b~XI z|7<-}FExVIvC+XxxZ_;@FhhAlWc!Aid@U4j0Jf{rkE76Im|#NIc?-!%_KK zPfm?UQ3R&)qax|5sHn}mbS1Yf7HRD@mC`_+V)$nB%`f?n@u)}au`@0 z&gHoXP^E`(ZeR+MDq+9*aTagNOy$w=BTbN}+^(TN#!ZRj;6n8U-R{rVICN z_W$x9SK|ri(kC?CtiG%|Y@h9IS|*$MM^?#=-y_H1O===IuNwicBIC=36CZ-w}lgKs^5nP^_ip@vIC`ddG`)`7ubZ_9bK2euYu z3PAYei5Ww(Tx~SRtC?8^QM50ZfRb?azR_U>u*Wu!kN{46N&~)$ATz(ibzw0~!RKws z!YsB09{;d2D^tzjH3T(Kt;$#ZQ>1PHi9#E%>qLTag?PzAF~XGHY0TTXIrT@1Q-cwb zMui_ajSh~9`cyK9@3M1!NM2Y4H?d*$uy%%Zn8TKe9>hYg4n2>J1}3)9IY*#too?;> zcmJ{Dy`0N($kv%j=V?~;^eoXsSPe13An|Zlsu6l8DPG0-6-7K9geTNI@Zj19)P0_+ zDJ#JDZb*&d(;8MRfaWf#<2`TKFr|i2UB!}r^ni(_=gtT2`d#2NGfR_R2k?SXwKqU1 zRV*j*ebj@iB+9N(?1=)YdiDuy_FI)U1lZ6&OFra>_0cqc2?fQHIrJ+WnaZ=HY+zu= zT3VPnf~|xY;1G{Ut50sw8&>c-E0F!ht)5(;X_vixUBhQBJ@p%Bi~#a9Fb7#5m8O&sgJr#S@}>yy0lXpm1VKm z@!jWYz}xoiY)p#;DjM!z%RqiV@*nEvC}t4618^vikae?#E~(G91`7h}Rb|PTmNMnV&)tv54?iP|B z%OYk`d9L^IL(~;i*Pu?pMfc_B2Jbh~k2p^9*2v<4Z9QuPH>M+{=7A*Obg_ofk&`Xkt zGyeJA68@3DLLp@8YA_iYXaG31XUBP3)7xa2vc~aPafq;0wDj5woq4fj|v(mzmkrrxN%|5q%zE{ud)Mt}anpjC5y zqc?6_;4Pw7T~otV9V3wOXzg~K$YfH@q1gRrlI@ga-&XD_upL7v0 zS1OT&NRVb8Sav}@V-LP0X)v#FC2|heg3ggb-z*bQ740`g^;Qnw3gj=bEsv)4*Xblm z=G3H~C&y+@fc|ErKc&!h33T|oS!MMBT$dZctIS=*65K+Yn`VFnW>ldE$+Uz_ni{}V z6ychC*}_Ig1m|A-OL^IPB&%1MB2OL8p0F|JJ|7FboEaf{y*$;m&f+!qynVSzuMg z&7H}QFTinq*E+h|H$|0|J;8B9>O>Ygi_yB&XJskfQrH4m^2b2_6+~)ZBjPW~@|@*g zQ+TwwV~&i9aa1@`P%`=oDY{rBv3F}zn%w6?Zf9yV+Ny67Fyf~?7rjCo)!Vt*j&cD1 zz1bz-0`k-qLL)inWv6vU-<(4X1V-ZCW;y=#auErKo-k2JF%VFhc51|M_NFes5xQfzHsW^YW=EvBp4!*oTkGh;|WK$V0_ zWikw#A)nSLS<&%vF1C<`zhZ~Wu!DkTKY<343k+$t*M4+UG4<&w3_V?gZ^$`z>d#ph z7;qngZ#mXEQTJbUW`kyYU33`zx2$~_xJx+*5E64A{vO6Y@6v%)r4!~10eA11t*RH2 zcoEO(?Mc2*z6hQDcF(8-*0Wxt9a9u?&P(g9qSDb z-kgtkN;NdF8`F03|6l%BkX{BjgdP}t!^KLQE4?L zxLaNx_nLmO#1l-_gc%(0z)ZYW3?okv3*ENDaNj&O%NlYrn{>o+#}l^Tf>c|(s67lm zLkt0R<g9-biR9H}_|37a;uyhdy!UJr+6kq>$3RS*1qGbeQQ6%~tDBr@`_ zuLK95Wfd3u|z7^>{)!ieyFi9bInAyp@Oyo#juTsDYR*4tsCp8|dX&p#jmE;g`t z8B}3qrSH9x97|60J1_-IaNDkjYfN#B@D&T2FHnBH zQKdTqK3ZpfFS~iW(%ZQQ@mB2?a+-BxpD@wwj-ct^w!pr(Txp!L&~O{)GBJlu&~8SD>lw&UaIt$x^4n86#2$93}qPG2&c+lWsH+d`AY%nV8rjj9Xs$>dhc z)DrDBf-1_I?el%9*4wgC%V->UZZkahQ*9&v{ z0V<86H0oO~$nmIB@LJ#M>e1yLQZI(2>t_f=aJCs`Lc6t?lh+sCw}#GdlxX5{|487c zssLQq#Ww`4GYv->4y+A!{V@-C*{3O9NjLO!u^*!}SN0FG=pn8PF_nQYNq{w`^8KW2 znk)z>9EE_R0#kIoGYEpkBOc23b^p?!3-0TmlfA)OK0d&jEKrzRYlb5PTS8|hVbnq{ zBXeAAM#>XeY+FBM81Jso5FUmUvJZXE2N2$|h1|4FFvND;@=vRUj+y>3I2x}{2HJ(z zjNE^BIL%Jb;r8LDuTZKgY@Z*4%Lau7s*iov&L`o*L{66nDqE&k%<(E5GL&0WAX#1i zk;$rqNs%EhhMesS21CN%KQ3-u6o8c7^2aSp9!q$Myuwo>pm416#e*8IOX3v(xcxBx zrzJ{;dv(Kmh;$(X9I;30)H{J(gcjdiZo)elF{Jcy>d1|^z|tH&8qmSFG!qjA z*X>E^Sl|X`dhkRKroo`@K*X9>+G2xH=jZ24VGU5_i^8i$TjlgR$a|oeB_QoTdsQiU z4l?HSi`b^hn81Enzi4;ObY#qw|I2C(Rp8g#B)Avj7+;;Au?Ps6#A`45d%x6vzeVY% z85D2(tZe}WWs7m9>^VHejW|J6?Rx|tWNXVYW__lG$``Gg|C`w@|I3cR2$uFtb6w_uB zY%i;lp7gbEDT38$R5W=QB%jUf*ySb7Z`_NY9vJXbL8d$ibZ}+FEy<>2W?PKnMYQL> zpaQItu<+WJL}!ugW|cUF$s;LMNKLPXl|FsvA7d8eEwEi&WcEKO zdxfN-m9@}ANNXm6KT4{1Mya6gG*=z}xBvI*4S7CC2Z7(2bD z&fK3;R_u#|Ru6j!1B(qHJ_H%-_)+o5mA4F15?YA4aKI~-^5uPa{Ze)06G>xQ-lAIl3 z*#Z6xlXxdB4-HG49jLwFpXxaKI^_+EX1ur!XgbQ2M*Cj2S zXw0`4i_xtsKtg?t{rGsq;hkwfM=9)u#Wj4b*;VmGeMuv7=T0Si?VaTWVFA9~A*XYo z`TnUX-xvypuVkC)lWYZhds0?q!SvoKrnGN+o6|8Hg1R|LDJ4x&FB}*kE|i`os-P%t zH6cC-F0uNSqVXbQS!;5b6`{HtkFPFueR-Y6WNNX0Bi|}d79f0j2+&2s=nNZRl~sy5 z8}lwKp(fpL54(+#MPS9_yCT>!1CJI~LFIzVT10Xc{;`CW{>_GLv?W@<{sr3myPY5? z*^QF75zw$2t<0Yg7AK}$0~KpS095virrUcm(x$YHh5}s78l)m7sLe;0qxBmv6ffO! z^II3?Zy;G*U8G8Fp3kUb)*a2E>Hp}==3i9ubu{m5gLwH>7g?=RVoFu*CQf1V{E0!h z`dYni%|LG^L5%b_?n~&FXF0SH)eA6R=H^>Mak#M+q)!l`qYKD|G&Ei{_s?WhE_)tRX$1j72-p zEdqFdR<`z;sVK~_#lT51B{ji;(rIUpp{hU0&BSZdR#dKY=y)*9wHHuc`iLAy_nWhl zzC7(rM$e1ws^eXsx!i^2daQp~bsKR)X(@@u@ye^9*dG}0?UCMg;L`j0KN6JpPg4v#jcyR2C<=j7ThdcEI4Gkt+_m6h%l_T{@z6v=qS4IT1z( zvZ`JgLhh9%9uhu&JZ;8J&Jo2uYg+L*Q&d6kcG6X;CcB9|FtqX@OQ{;o*-(L%J0pMK z6K)nJ49~ihZ~1Sa9j_UrPFU?V`b2b@`l3+l+)#Nm=!t#b$ev7}*hS@4xW4k{7w~K2 z^1|{~{x~`jU-4sVe?(MKw>>eute>DNX6S)_C)BO+X)nwwYR?Kbu!+i^tQz2-m8$ha zxc!(}lyRnZu9?%PW&PN39<|S8d;oUkf{fTNMMOTVR$ngv|X`?d7E!M0UFU= zWW&dH)0zkB`&5;k-&7X6V~z{Fu_2p?mgvHvmjBL_)7g$ke&3|2k}cm#g{rDdP-bUO zkJ;l4KD_HwSF|V^(_^$zTadMATBS6(Fq;K}sa_$mjJ)ew%DLC?i&9N!tP}lso8zQR z3tzDmx7)htgKu$F=aO{{*|b4Y+N?7o*yx7F-aSprMd_<06jH$GVSm2XAJT95C88J7 z5>c;SwYcX>Lit#ZhgokfQ>4(Qbd%9Ahi~Ktkx{cBgOOz_xUA-Vg0~Dj!09?fK})TJ zSj;(Q7s@gi>c_ytSP_;Lm+nX;KBM1U&DsB=ourL|Vn$}}d%;-Pj(z;pjDPFl1Mx3% zJRH0$a4(#}y8@Oa?pQvgwa$vwifbuZ! z-J1A?E$Yz{p9;Z6nC*-X9j*b5+uX3HTcOT%3%EZ&854G2&5^dx0(T8q5HztFt$qwh zm-zYtvOSnM+n=QGz(JJZu@E6ezDJ|~m8{iR`& z&9fOYq|*4#RNZ`UNEGk&QH3h%<4*xsH+eFRK)YS6vHpKL(RZ3feCm#hWnnp@KfEi$ zqqa;D(y0poOuz(0s_T5W_wg)3KYe}kKt7vV%VL@RRqT1u zI{Ym;*r_X1KnE=sg%hEWvS8Kr`7tSp0vM`^SOTc(_O!7kSf4V1F@VJpg9nOECm&@izS?nLz;?{-Mpx^l`UPg$?9>{X>w zMI2-bE^ztE!o#CWma`wnbZ>XEtgaGnemv}8wucQUkI5D#hgX^I- z%z302p_zIRdeP=EBWzn0pJ90l$x(fE(ykWshAX6QnZovXGXw#ar_m+)SFM)9|#dq^aj=Rp>5ay-}AqIB};1sECVmwgVx=o z#LP}fP<0{N>)~7EJt(nIsrR@6Qa?19YV9B$pjI55_CifO@ZqsGX5b_FDK~|?=_6bp zX?RHpJp$Ua+ut@8@eI4J`=m#o*gVHzZOC7fM}&t{?JTLq_C%B+?EFwEvHA6iS;tHr z;IXO5!2WObmcbzjgepzP%JrwW`tM?P0a)*bb%Ne(d*|mB&wGNiQpD3Li6-@+-`=~-p4?iNv3LWM(ja8=y8Ic}*qor9gF#UcMBeKT(1 zG2)->ys`0E1Z1pA;WSJ!$flmFZxoLk z9EusP3s9MfV4AOgh~KNp(qV}Q1QUD_Td(+Iy~U15Ji{U-PufpdGgBvtE$bdhK&eA)2A z*|Dh5+3n0U%3T>MAcTa!sRJBe^bM)!mPj1s2l+#0IUzWcbM9LFxONOmc66f~_C&j; zm@G8hyCy!h8jBR?_{JTCI}$R+jmwFB*6xfgU@0z#3}#Y$f+}ZHWHZ@Y*CLSK-5boU zg0(l$_krYAQa|AU%57k9nW*GL1ePbnX~LNN^N$-xn;c#fmAb#R5|2i@WPSMjVohf$ zb1M*x>RY^a;jco@zZl%bMmaL(hR=TE_Zbbu-QMEBCfu3<=kSyQyM>T@RA68eaw84D z6Tn?K_vXcjSZ)ikI_6{Qp(G*s6G-e)4j*TWqP0j(D857>Ys~A)82Fsk1#lFk4shB{ z?damaLLT{w`)Toog2iCqV_~nSU7=PD9}(@jnQoGTxLjL#Z+hYAQ;jNg<}` zcuU`F=XGD7qoZB@0q0Tc#c}qEVf1A&ZP6|@sMc%`9)=gUQ+eNP2%R4AM$%&y36DlT z?5;8Tvk0Ft@Qi5P$|q`dG2_xsEs5#ID; z(xd5bVvX%$2=Iz|L@(R~*s5A?0YWbKxB4>m^Y-hKzqFMu>=+S%W79N$3crX~&5PbI zpHoTx%l=qpZHC#5%7!nr&7l_CH|Jc_#DZ?UW3X1cV+>Bb*CV?~D+LoPMMP@=M(O&F z0!h>@krWyL$r>_pFQoKU81!f){>rN1|yTKs1N@| zO9FP_0WO1jc%)yB(fno~-}W`f3UWe%1MyHTeEgf;ENe}GZz6QPJ8%iV;VU8Lt zL4sSXx7VW9ch~uoj8>;Cvk~-E%-HcrCg22wHI8EBI32Nh(VMq^h9%o+cE8NxsBqi} zK&!jcK;}lRn7qc>N3l^Ho32rvQ^e8tAF#8Oj~Rsu+x$o{S9*P!cJZbQ&IMzIX&;yh zWGjZNH%MyBHFm}+JK85;=}^FWcM3?d{m-U}-2~eeE-_xQ*8$Bu|0wJOINujhg%jegwe}$0Swn4-yP?!&r3&8lak!PMD%R z4L`o@54-)#mb)`Tu!=Jid4xTzpMi`cv z_=Y7W8qQ@U(TigG+X9^6I}5#L5~(?%){`XVezl6m-Z&YW7J)RLW`d!~f}4JF4o^+B z*ExjueJ~Ahx~qV!rc^_tE9<-T1;>1%YiBE+u-Y38Ox?f2*Icz1yKji&;UM3%TuQ7t zyaD-6%x`m0LScyoa zqrrnmao_KBTmA~E^ug=fq|7@#ngSva)0%hw=4h^<5_8iZnPwOy2=hV+l-x>!o%<8} zr2b7dWd4aO8vdqrYS85l1rLYQTWi9K?HtVSD-0HE7knKH(oW+ufzt9eYt_g-y1saO z9M{$;e{nrxP2J;u6)VZY)Go3Q;aG3>Oz_0~A#duGkSqQWQnHdacBoUn5x{3I~zKOvcYm>Ki`^1n!6 zKnM&`vWY%3y}yp$v8gP(FbT0^k~dlyYXwz@&@K$!q*2{-GhRNbS`5#U8B7sW{-s*8 z3=)GZEjxP+6`YIrXULki;c)lT+eSH1x&qfUS2p(^JK^1(DSH)D~fObdGU$q}uFI^^9a7;9` zOVvR>PS_;%SZ|@JS^x4R5Mbf`I+WTjsh~|2q)qNHUDve7wNsMp=)g$_K zu{};)&Qkf$1uODAQLfR>drndmZotdiCaD%K1shyX=H6Y;jA>l1G4^UV8_`!a^95{&>#r5)uP0#3FO zdP?gv4q#i}tK%+%f|&HOKeK~~clHcMDVWJ{i9kI}7KC7acK*YqEB#kRMe#FESTJSY z!3&9yzPF*W0V$uyCt+QgFNE(yIIhAg@%GP%k2{OYM95^%wBXS8@FleL^)L>G2yj;b65J{l*m&1TzTAErVrQHFhRWsww> z0a&zm!^I{~ub@L^E)8iy9V+^<=Ma}}tqcOqPArQUSIJ##V!igN;1LIjx^0Aoz(LKr zzK#W^)X0EsVSO{4#>xgG0#|#M_KmMRZbG=7LfwwCB3yf^7DQZua63m#Vm>bQZZ&HW z+)>V=36Ha1=;MODtqk64;&sFM8BLF5p>L~%!-NT1QbkPGKi!ey^btf?0BSZxbjokC zHC1F9P>KkYyagx8Z#;|4p_9igrtePGQFLnkslSn)i*yz>|L;V89b{_Bq2Og~AUKX! z%ts#>@rV_)E!Djy-0(;Ckj(qo{rb<358DiL94r5BqZ|q^0B?eS;R_jkfLa}5$3v1Su+kZ8n^c|yy7n2 zpq8oO5IZjHb?mpi$+yU+^{TC#Jr`!UPGaCic5-sWYVooPhfciz7Qtr8FND^T-OZt6 z*vz4MM(?|Iy)z}AeL%d>KQi3%3&CzBG$amLuM!x*-k!dy!+tstA@otk4 z#GItFnF7&SSD|F|ALTCb1%^{of88s|Z>*?|^OteR(eg@QC&Mt<`X$;W16KXlbTcOB^N7oS|X~jBd z8np+e{bkA{vb(BGjoC?}S?6>2hatxt9Xn{0cvjp74uKO2)iK}TnD{>?+es#NsXXg@@Rz*_Izvo*j0%h)$SYG7{ROaF_J^>H$9bKT>gAx7r-0Cz?VYOt<*QGvsPDR*s~ojc`?HgxqQomq!to=< z`>LL5W`5o% zHqwduDCtg3%bR>q-q^GS(i2PdTHH_fxLAghddaSZX*0Km(=SrBjw@_sfp-6K-*IP# zfBH@W8q6>JfBBErJ8&SFR`&yzc3uHa>}6y+{5de^9CLLR12+(N%}*@F!(JXtc8FhE zH!$9A1BF=_O(4ye^HX0(h zED?ARjXr$2n^&Nx+Nem*jLmQs%0Q?1_%Uv3=mpDNXKH>M27L*tv#HL^MjtN{7)y~%K}>ROjx>a~h=*P?@GhyIhvXiWX^Ix|HeVvE2HEwoXT0pbnM zrtME|U3|0*ZAaS>YXcej$+UNwW@l-$sA+zYADqk@%&942GG|YUz0&&_f!#_uwgHSJ zTF%#q7R)s`i+!E*o~!k*&Rn)?qkp*B7K|d1gV@7O$2!etu_QkiyF{My5ucln#7z5` zG88i}o>@J+l^PSrImMv>XGvrY_Y~5M_@qw5TRh}s_IYyYG%GwFvaiLNCKpk?dR^I* zP*m;-3Vp;)xeU`^P5AOpLzOcUBm*;SN$}s#&fG6AVbc+%?{HU3Z1uXCTdA102&JUI z5W*J*Q$2>YVP${TxyT!;#u-stHompe&N+z}g|=b;2?q!wvQKuLmGxXF_LqZ4ryh-a*p=zRw=YXES9 z+HTt*FCqw%l77GacNh*ujiJ25`eXx4hz1sp#$9jDcAI zOGAe7@yl8PwBPHGx2=6%EUPfQQAfji#7d$1-1p9KQLiUG*{nhI5Dt2lT#%=diZRv; zjl6?Y)O1{kg%k2VP6{`Wr_;2#;}-e1B>9FKD}OQVNRQi_DxL89_|=XjjSDM8GDQZB zvC{Heo7-z-mVMP^l(dEl`ahhZPB?#F{m$bg6*vVJB*P6b?N{)x!gel2W(fWZVCxdg zt@fO)KvwCrYK2nx8y=x=4}LamS6Y~A?_NhLNH$;BFwEMX5ypjQsOcN}NTIXw{4hPJ z!*|5)fKI@2z8brUutYY<6^Ys|%Hora`+BpYFfKK4_s^C_vbQep!0dHd!^AxzZ~0on za2EH|C_KDQ%}MWflVQZ(7UlO6|1mT6=Iq!%@EK}QbN+C7`A3O~rZ zeULul5yq^dDl1P;+G@JFU%t6kF6^5b^M{OP&cvDqLd^1} zat&A!KrNlp*c{}b3WcOs*+%aUDc4vcww?xq5U<3Gl(*D9r#SscWm!3Q0?t-_D=(#hHLvaf*K=s(9_`E0E z=Q6$v_w|bK2@ltF_JdW}<0bIq{EKk(39_$j`wG_LqBf8TVhhSAVM4x0v98~hg(O#I z*^||`2s`Bw#svLZJ8h)_rO|(FtT_$|RP6ZW+c7f2uhVI^?qO+%Tv~RJf#E>~S&%^U zekqxc0o^HKaE+5IY#*(bfGOWcVFfXay96!c0Q=u!ZZJlxQxseJoCX{z0F5pDT#>Um zTSHs51ef1KEE-Hob?E?CezCCfioruhf_b=G3j&gxH;3OsaZD zY7l-ZL(z_eS|Q8xpV<;(-{Glv!!bqmEel-b=LA7D8QoaPJX;~Jfu0eZD#~d?Gy~e1 z!R%X!hhM-68`6rxU?^RUgmlaE|K-0WKIX@j0L*QA<_cpVLw>Gq`-NHT?0EwbkQ3=8 z4Wg2v$QvGv3a*i^?S~_gJ1ENU%QGUto7e8Kr=cnf!YiRath2WsnLpGfXhCMGKap|5b?0s4u;!47!83g*q&6Ir%1HEUc6F|(Y~ zI7Plf03_6-8oUCxDe(Y!8|8$AAbzThh_iIv8z?OnwPRJdd8(}NjY62P4c3&-41)GX zQcu5TKT|((o^QFT+R;|hatqB&D|Fj2{SkCG!IP2Cem8Y5UAr&v4;#L_S(CU=JHXE=^0g_dz$t%AWI=XJ7wjM*wl=IA{@ZFe`U4VF0ZTlBdI6JU7SJZw~J*=2wzn>tt4U@6KDvRrn*Zp&IElpy0(GNWH40{3( zNxdo!Ig4adYo4@q=FHu9LVbbhNqs4C5@B(2_$0Q!P}duYzpzP$D~chf#<4IBkF@%; zB$|h~6{uIEr6EphV3~PWKQA$4l3+4lKb8|d#xBDnOJdFAjj=>Hx6X(GPG$v-ryOwG z){YFJE7fVjBNN}@g<6!fw4R78rmB{?Rl=Bu;&f*>`CcFY=PMjuJT!%>qusavm!EmD zToipWS_N~m>sLx)38DxyB#d76USoyQF*>he+Mq-9SNK(Cv#QlRLqH}W8S$m!fLqEf4`3M+Bdyvnmf~`C}V}7{}GWCG$~N5$+v{-_yTd%8%gnw z>=P&lkZlsAN8qa{TGPmW7bKu;O(^V^VsKmbfj(dW%Nq5f${Yaw5Wj;Sij)iMHR4=HvIf;zSU7E2HP+bvy@(U8>@?O0m(kvPij_|=^i`rHr!EsP&_%{qMDc^^%aq@8}k=>NI;8?lZ}?9fbex-oH+-f zgxXv81Yh1h>CG(!!M=uVc9DnRQ1@jnGyP{R&_&4gcKHo>z!RmqPg| z%j+^PKjBe~G$OIh&u9S0wc`O3jiSsS=dc@!zdv z??k$ynJRT5^@oc<{WgKBDN?K98TA1PA}+U;F?6^_qR=!S@`VrKoptX(#DFMHv(j)n?KYzxtd@DywW3AQHeG% zH4z<6-BJB>Q4}|J{f&s?azBfUIov?8uWik99a&XtBf$K5M0eTG`KfFa9*hzGzmHzY z&%n+zAlz?^*Jw(&3Hob^g8lwZl-Ng7IwK?tPJwb(9fgduWxi{SbrT&|7;*wEn-qfoaUsRpWDjflH{sC-Vr|e~i~Nn5VX=-Pc`V<6d`5?@;(3 z!OOAhTA%Y(J^YIS?y1N;@w@=W!=W+n;@lnN4wJb7kPTK53!a0RVtDphzqN>X7?-93 zby;WjU#{Y~heijni^-tkhx3`slBgBan`K$cc6OkcbY|VZ9OrY_%C=qYJ&-JZhZg2K zd64Z@=ZNSJfaR%Fky2Y0iRiyTWFj;6iR-xnoQzZ(V59Rn0Td0gk*UuwjGWdb&i-%? z@TU1DRV)`|m;t+-f7n{vEB&91v*3}?(O64MDYUp;$o^mcPh-P4v>>WA2c~9xJAad$ zb?i6U5a=ds6an?vVFH7~PnUm1jMpi+z#?qCZvw-o*{?ErnhdYOShZ`u^m3@z&kZQg z$aB&ZY(R+X`9|cp*P&cF*Gce~)ZDA7cXZN;w{ZMSJV5_gV$jcA_2TnUd_%tQaxV8r zlELkx{JydUP!A*#YSD>KTyr}tGj-2hwuCdAepAOWc8uY`S4XvFH5~T4a)#D^`iLS= zD2`Ki$H?kj*aX^ALNRYq0+n-b5vtnwjk3yxYFzw}v~y4pM9YC_+qP|+)3$Bfwr$(C zZQHhOyQjIc-|#--<|MhPs*dfuzdnODrlG4A#CrT8mFOUSKMRyUP-02B0ix--euak? zsg}GTCU{4M5c4EZ=55(?vt?U6C4Tb)yv1ZAT1U>2Wy*}v^qc!65w2dDdN9jzQ%z(S>ju0xNfD6QcPg6|W^_AyDov>paTBtlc zhKiCxwlT~CFLl?`D*_@;J8!C!;oYgbWXml>g-f|savS2=~qAn{6 z3PdTt^%-~T03v_Ga}J)==5)rZS)aF1mE*?MRhj{WQ@ zdwXwGsV89J+uXkRZ4QMLB?*J>rTSdGfO?j2a)}*_E!P#P*c^lpZxjpGy1u`>2p>d&sa==9X_V2Vp_EJY3jX^_BHfjJu!YQJ%9Qd| zfb>4a6qTOw_3Zm_A07^|^FCO0o=02=%m;gh9+9K))-wxQ=uCY=JdKNn!H_OO)1AWX zD=cj9OH4{Loz|2_EGipiuE;9ekts^YVPzm&DV6|dq{%09MgQ{1wW32C*}X)h3(NyP zBji+?U(mA%rOBD?4Qe;@DkgPFa7B0!JEXWESi8=h{l)NnxNaEtvi^@^UEZ?Zu+rEt(C5w% z|M=h5fJuzgqWAjD`F|OZJWgjjRA>#YT(-mnHTL#oMkS3eo6+x{-QF3PVPBw*il$PE z%QV8lwYkHdc@#{4dqI^8A)r3^*d{n=D(j;;OV9E)M1an58liI$-fcWP&G)dLV1Li^ zWKbK5b-Ob>Mj7p%=$oGI6DbKQQC+W^<_&_H3_*AIeSr)q7&6^6-o+6TFOH6yH!p2S zw3ESQBYehm3&5_d1R6To>PXw=_M4w&{y<~^P{>yrRl;WXbfDs8ri?jR!#8+Nc?r+& zVl%dnf3}0lYU?B%NR^c<0DD8@;Vg5Qo6x@GEjMZU-ra zVia3-(QxO(*Asl(aVT%HPx6M7_z6An9Yg>AX^JXlYDTy)dCOsiQxiBwN$SjY3*BchWjTCpT5{J+4RnHXH%nVfIdLz*l}Fnt~Kv;Y-x!^{KL4 z$qgcG_0JjN$~UkIWMhDCP|CRqMOh$%WigFpkT?Q)5;X_`9 zy1_j4%enxO_XUFoyg5s-R%T~Q-v%b=`u3l{tZ;_qe~lmI1ZUaa0?0P=1crvi(b}_I z8Q%fs2^dX6nsZkTyC6wkrNB@!im(H z$DX!zA*j(j;Tf(rVK>+S&s%Q3eCBebNmiWA8?}uKxnfy7ZpjO}aP`!brX$7i&`$kB ze1TKu<;f_65>M370+UcOd`_$I&mtV?Va=G%66SQp!-Pf&JzcicVqH*AvY{y7A$E3#L+HLEW9$D+8{S5d}aBM8^ciA_G6W zonQ(msnc>2dO3}2J`08Fo1dik&szfbHcvUFd(w;`j2+~ruJQcLPki8qhJ2r#sF=6% zuQYagG!#eGm}IuH?GuRNlQ?=vxji!PRoRcFj`E%+R6jFkdtB+sqX-NWzB@&qHpgv;h3O%=AN9VEc^S}e}gI?37}E64C3b(jNFHj=c>?jwj#z0Ax#*{U_L zQ#J^HLp;i6aR5U;RU?42bSEpZLfX>OxE)jpOCl9s4WLlR&efPLsA;Yb&jj7YOU%tp z>V9JazRdRndyF=i-4aZqv^R|@oJhQWI=?%8<@4jQ4i#r*s>-FEKE1K&g6@kZl!tf~ zeNg%X8i=rRgjzH#l}fb@+a-`yp@y;h9FR zb*|&OA4?Oh-B*KugOJOD^MAL%qSeDt&(zE(UqWD)oyS_65S`A;30bAb)rigDe<-yK zoPSPc)E%@C6uN<8zd#Gb@$%WDi?b*sU2OUc_D!H_TW7EUupb6hCm zxndC*+XQa%L*E82Vs{IP5+Cuh{tFtBRiHwQzjBs^z8^ASNl9_ z{6=Z8H9P;0I2_N8Cg@RS*lSuxAFdGid}bny;MQU=s^P=z3`IEj-4kq!sZ@_m&Sr!N z2zBbF*mF^cIV?dmP zLDU#g&&^)81JCltH7}zS5q1|QKCrNWI%5%dm6JponF$-%^KPYCs*X}ECd10w=vCoS z+17_6S`L{@2{LgJT_S9Y1=dHJ2sOdv9EGiP0yKg-0Af#@j^%_?i~!xfsEU8=wr%bQ zf4Whi`{{l^k8y8Xu{wkhp57b=3W*?Yoy1+9Axs7+6Yh}i%)Y4ja!6lix!9JdU!F_l z!-T0`f%RqT+c7eN1BC}Z)wZ{)NQ{ZLJw;nhI-LHj6XN|R#T4-khMKeJs%c?rh z=l5yhE)HNnY$}GACmT~aII5z291Uk%4&B{}R6xvI5&&a75ziufpIvq^9mBpEB)JKafR(glGhMv0VbR5>jjHC~(D6Il*`G z#vuWh7i}Px>kzbU;j^qhZH)Cyj3vEhnitq&zE7d^Y6?O~>9Vx`E?YWEkNQC6X*r9j zExxR>b}0V(vIuQM*pleSuP(a_^N<%hCp1Pn3o23I#Z z0;#?6ga!RKNc{PAxv$RCCMzZ~Y~MKR#FDk=Jp1eRS#y!DzcigEcpcNo_{E74P0o7K z_=c8x{>x_e1k>wqGHg9xuiK&sfn5~bR)0`K_H6Wt3UL56AKB9vfT2_JrLrqRtSji5 zkNi9DRUXzmakcn4VZg}uboEh?pJg*@)G;b)aAt1pLfO`C@iTJqgfD8al^A3>Mi}jx z3V0gNVZ-$rA=+26))u-0G+izv8T3Yq2^IRgNIPmn9OEtGu8H&h>jZzEZqv(V$8}a z)iR1>EU#~=ig_^KLTQ+OUh8H4?`F^kM~lXr(FPl#L;gr}e*e$5Y^`BxB!d5p|g0g2;k{vmHA+s^unmeK}7AK%6PL$;YPi8Mi&IDmgFd%mJv<&f8{?7MT0li z4n;dxnrJVXYfOIQD#RIoOr6zJQ>{z^zM5nDH|-<6Q$t7$LKr|9U{#nWLAXZwPi3VF z_`StD)A%%2PRwKoVx0KmZrFy;*kcWe!La^4CxhN|%D#gO^x6shnZBWX=t>~yg8W;V z?i#1@P^U}7{8Ka~@;sr8YT45u1>-eR61VtNB;sCx5d}MyM9-?Z|YWhxzJKRAq`_1QF;L?+hPrv1DUrEJQm?KiSTuC{NFX z%w36{rF5iv&fh}oVqAKarK+8UnGdYl_aR4j(B;HiS(uGvZo^6Z=BULdl%U&dZg1T< z%FC^rNHCo1YJcyCfF%em35KI{s}^9|aH7P=_TSpKsJh`-j^C7MpAmdk+9UaU;bwHj zURKisZ-KMLL~@dsGq^N|Zoyx?Vl!=8&O~Jc-T42ay*!Q4asC$q8N7skV9ZNiQGPOex(MiBhdX|tI8Gfi$ z4h-Q`_}6Gj9PkMoi|#C73Sk!EF!k>M*lhn09E3DKJCSH`Of;UUii5ut>w7}DRd#6R{qN(R;LXfb&x&V)Zdx>N-O9`Ob8!@Qf;ki(v$)5(c z2`7fn_jh03%q7^l32|BO&RQ?&;% z4fM>AU1DeFGJ|A?=I5#JWw1AJr0@c>%%?08Mty`#Zs+vyy`k^17`5&YEl@wvlE?Ut3aN;4Qur+6R#F=k;G~~H-SwhA1NgD?&GI1+-CG$Fl za?o%oag$dXr|j8R7hi|4Nz*eJSedm5`GI9u{a`8kBR|&VEK;3t{m}8rt!c@?*$-We z&E=l@EdV9EJfj4ZxpZE@5y~+}*6VK^_!L5ftIKxFrVV`^7ZZaKCbA>^b4alLq9I&+nGs_7LB9n6`Y-Q+lg1pe6ltKud7ecg zPIY*~#Ju>}`?NS43@a?ru;M+W;VJS)4Dau@^}q zyv}4Od5;e5R7TTJ>WK-)yHj-383D5K$&$us<)dRxoFJB{#}%5yhNH5#du=m*JiwGz zH*N`Rh);!AK20wtYs>o)PAhtmY9xd=Ye-D7E(IKu!hEOaiqTzJ&#p0xx#$(4HADs^ zb}Ix=*%vbz!qZ}?Tz0CgKl;GL_Q#=EUM{*QTL_un)0y!;iV?iOvvgRAXX+aFgm<%i zXVJf%0~Qff4K~Td3mv}nh1KxMOtQ$Q1C0a2``|_FS(0;0P0DmE;*HH-hp>~=v6-A6 zZgNqyEtu==R*Y>o(;#(Jn8QzT*Si2Hlb$nSd09`CJXmIDSeQ z1ihe9#sN6QGYPe7yl@TC!b$c|b3-F60`|)wEUxnLFEZOzqPPmLjR1+B2o;s8YgW1sSE42{h!1ev+s62&=IKhOL%IuW{IC6k;sa5MIwK`g8_KjWk zstl=^j_I*m@yfgX)kez-MPt|6*^VYV6EqG?Xw*4l8T_ zRqc()a@>sz6e2oQ=Rb|!j&NFOd!{fI_O3eLN?&?JHSH}ewTrtSPQNx)cP37$d9jDk z6EA@0jc^QV^sGTEmeuV^ZH=Ha5%_b3)BMvi&RH5S-&z*>>E3mUhj0NjT^bGt@FFH&tWhq# zy?bwtmwBtszM%h{!JHC39YU62LfLN51+61!2ayH7W$rhT1^+quwrycuCNJR^=zX6k zfXZSe>kykO04`wk&N-S9i=;yyIp~w8cdBacdYm6Cey6urK*d;>J!JN(MK;mD1Iuw& zbUYr|P2Ud**I-cx_dj~mF}JAOESdq^ZK&bINZNk%T&59OQo~8!idC2-Bx_OB*VR?( z+Dqew_vQmuO>R)>$l?C-5ql0vX0q6#S>i~FBH_&GGjULwTIFsyDBI|n3h`WKjYt22 z$fU6;x37GowX(kCIZ&vLOOR^R;3R^DiZUL)<6$$h%#NqCK|@}gv3F3kb-h7%aDACM zd#qQa*)t>k9NGUZ*w*Cr&pDqfkgy(f2+H2H0itPMG@ak%0<@mIeJWO^aE$xZN|Il{ z_Gp1Q*L!a&GoPLj_7BI}!C9(%$LEb^jRu1W}>C}~|<<{O-i%*P0$$cc>QyT9F zt2OqV9l1~sTCgT9Nu1veoXyS|YfG~A(6F5*&+TGNv_C$`r_8MkVt{TFJ`G`a!NTa{ zGWP1_14EzISA!58KX7!)>yB!eNFa5D*+q|n$sH8OtlGsRkYjj` z-RR3}{dLQ>>cXXcgRF@)q2YUxS@2I=IbcQZHpQOVCz!Sv@7wQUhce5$>Aj-}*jAc7 zDJ2W?K68dK%8YGa+y`M3C)!4yjBDz2Kt~x{LQ^F*wIZUgN-!<2Fq&*C{=wX&rZ=k< z+y?!MD~xgoJsYhE?l8^P%4h?B1Y*ZlkrRUdy$#-^CwyJS{_kxp)eOvQg={&HvwH8&;yH@SFe zW$SJXr_>f{k(!1cH9)>CE7#$ruLL~0GO2MG6j(yVTLXxiJ;YbbcEV#dydj%kqWlv{ zE$MW*s$V4Ti=QRnZt)bz7>Z^r5<$cJD|xS4F7B1BOFPJ=rt9*`*#qu=0mZ>_+5FF< zoQGfW6qvYq9m2DX<^tA$55=1|rdh_4@-P;<7;^cnO5G5D=XyBN&dJb`v=wb;Y51w* z#U-BXJwPepQbzf?LI5&F;WyyC07oJKM?E^O-8F>9?uqw1G2xB~!p9XG>g+y_M9@gG z^?3t>2U2vL(3Mhvom?=1QzW?WD}bb+0Yu5|HD0X1AzWT6m8M6Sz+-N;cd31Y*#TIV zqA_or(4x8tZhoInqMtiPxiWc$#6(KAJtcCf(RJ!5di{0-L{ZufY(jeG6_=D*C|C6Iy^*QwYbh*4PtDlK20hF=bCx~%U#eVHRyUUH7)N1Ats z@$NS8>l5h4E0U}{wDS{z`eq-;1~;xW`r0pC)KJQWK#Ew>Y-=b~vZrIH^(W^=yf@~9PiG+@Nk0%zE{54L z7t&FQ1RiEK0hDM|Y3V`4Y6J4MT3l_uad2fATbO}3!(sxA{P+NSfqH9V6D>6PWCD;I6;zB^o8?EfM+2ru5>F;cW3u8;7cCKr(&!7Z94+pBG>2mPgRfj{!!Z(56N)(8RxkT>`C7sS$yT0Yzbk9pe4XW~3S%WI ztX~XN%Z=^F?@0_jo*s$Ttoe*9GVxYKa>t*wjmzRd_Qb0}TnXP1gwKb+>BMhWPS z^U>iua@G4+&wZF^oCH*+5OoNQ5#}R8Srlt9y9BFA1*J+k7Hdp; zxBk5@x3`@pnNiVY0nH%p=)dxRQKDLOb)u!T!V@_N+IvipLzBq-3t*lE{NjLqe9j0y z1z&g+yMBQvPB``&^Nqu&v%$|@?}IsOPaP1m;Cnmyj?J19VHbzcZF4x#G4rV^c^|6m^F&&=!yc`E~P19!N-}^ib>PR<0 zt1pm%HR1~MCpnAFJ>d^6BtO%)tqcb_4=TIk*EpMgT;%tQn%x!%@7d-mjZ6g~-fODn zZiKgCe#mv%C^i&^omo3i)%jDiJhik*iD&}v*EH6`08gHuLrX|sASMD`)1)SnvxBFG zuf1*hC$delpx!inTzWwdk$|vh=WJzp6V_4AJ=q5{UD~;<=(imr1Y81h`ryi6irvPA z9LWtalw&E-bE)ipEtkMa_XGdxo}n~V1YFW1F9AULCCFZC-5W!V+hARfA<;bY$psHB zI?JFS?npm;rfp-%1^^-p&bMHARw2RhewBo2@J)mVa8M}?gWkB?X<#+r1SLy)nZq%D z%f5{C%&xC4@yE9ztuU39U2^$XcJAL_3#)=MMqw=!<*QaRkJUPf#e+4>y=i#Fs;q#X zfTpILPZ};VDX6{WTUxGc{8JF03$PeR(j;NQ?met1Y%Wk=&W!%WVhQ!OL1Fm(&hg|aTXLjTe+oo_Tqs&}=UjzGyA$f*i z2*kaD60?>Hyv}bohD4S^=B7fk|6)vp+3c$icn|EvT5ybpx1!|llSzyBVgEkVqSdZx;+4M}-Ki9a=ytI9 z4}up*>yPxHyLRwSoP(z`$H)%KUKJbftg^HvkVTosanAnCuMlIPNKF%o1IXsyGj%I< z)EcaV>3c&>BAqeAi!D1#*L#)kfu2d~_K&^Awn2@|H44M$m2gs3K zGukYe3|My(4Wdo^jIu5S*7SCkH&r<-3d~>uNo_yC&;|c{E4A2|IDhFStjr#Q z42g=vZ*5!yfsDn2Ku!Xd<9ief9{twaQ9WWID`;@a2H20M7&yaH+IW2s6P2Sy;JLvH zvaD(bYJA^r3rG0g8WnlIa=DRrP5S@`ZW%Vs4Vn@xb0p1Q#-UV(q`Otnejq`;SX0u6 zqCkLu4a;1;Kzlt|fbE7?mXU&Zszv(mrV@k7c_kCWTsPd(z3`zdezZZR%1@R$@n_2q## z1!>@SbOI5$zMzvuCH6KJbY69P`zaP*%UrWz`pdy>T zkF~{n`HKBn+>?`8Xq6BR43lS;lJ0#)N)lIuHDceBJ;fb%X%=b02KS^sZ`_RiTD=>yaF%dy8JK63rG)+(FU-@r{^maKG^EkME z8S5-eod!1qZ)^x>8qYrAGG9V2r^mm*%cuBcc@LfSpLnAuzT=p6zrja==Pe>`V`Q8* zJ0d#dL^jhA`aLlikEv$d&LZY_bl6-EK9poIfE}b<;l!CbUMKY+d_AV)Zq%J(Yq`Ye zw=d}2{QC~BKOqG=@v<#mSZ`92zd*oeQt1jT&0O`9JY~D8&lVTh>T%UsldIJOs1D?r z10xlT2aA_~GqL9z?xJ8QMc8VQx|4^1lJ52XsybO1-u()T9^OXx^Vg~P_ZjRGWu`CERVK!SuWJZ1CL@)u(=T-+sQreAQn6~p=9sL z^vyB)ktmpJj|jk)pG8F}M`bg(o^2op>q@``icl2>wsadn{-%J6m3^x~T8t7kCq_hT z-)zo-iL8SjghVS!>LyJSLvb4rHE!X87&tj~bNx^z40G!mFu+8Xg+mP>_xnyOysCv~pr@hy;K*)jv^%o z76Z8^4hkRJ<;t%PpbQS4Aewh^udnh~Eo*RhMjq7(jYC!nQ9#OOTdj#l=!!=@Zhr5fZiHdv@Nh| zUbzHQo7Wue&Y8#|5AIGt+2QJy)VBLe{u#|4a0HNabscARxZzQ46^Jnkqj&H&hWkbF zdOwKKH0?b35S#huV7IwvAmqFzZGtXR zkzr`sRU-Ujc3(dAH@?vG1o7rI}@kBz?c}gO-B=P%D(X zIzck)=AZWsrvtd5oOLi0YwyV}qV*?8iPV55hn^8+Ixefl5RtFlo1Y1gILEyF=jeUh zyezyW+-{!-LZ!Si!7j z7V{_+1XAhme(N7QaF=iA%yT$aLd(!@OaiJ589;sJKw&uGdmm7Iy! zdM+Oq&N~8sr)!mQ{Y9bF6^0lKv(8{gQnzx((NVF-ccyBdjm>3pdmWnHd#T5rY1rL< zfP8DF?Z&Sy8$0#D9+YxWn|>QFbbShQ@{W%tlcR2s4Jv%@5~r-^8(}onD7ccT!j1DD zY9lm+&$TT!4vO;rrU@%3Y)EX%6!Uyi?o$?Hc6Y534pBm^N$G3*FH@`ZpT~4lGk^S! zRV=DuD2!l3+;X%AJ0`%3_m`V1{)o9oC_bYEg}7up_lA1MaV0W$hHVO3P_FnbiS)qm zu0>2s7#+y=slc~T7{6R8WBYj8aLzZ+KgLW^Kl{`R0ltw&QI7^GLp%N&d)lbVo+NgQ zsghfb01IJava)VDi|nD`bhf!zmith;w7hIM%0+V*yy8jDF&)l+O@D%2xhXQ;yB`uj zp4lM6d8(HjY57mWi*#`+dBmny2uU!ptEI}`yL3u1Nh+l(sWyrnGLZ5k_*=b^!U8V32Ch_(&- z6FJsMyoGJtcwMIp)lm+9^ZYZHgy(5e1LX22%x=lp+m%MRpK@R&gm^9FKd(%>1U0{N zTDu{f7`tg!Pk#tVcT5%t@_C?`&(e-{lq|r;+6pv{t&1+y|J@&64;HZsuWA#j=bQcK z>pd3ResYkKVqRa%$Q_0U_mQ#D_>xfNum)uBa!*pyCfs?2p_zmx9hX;g!j$Ycs9O<=`OFqzk$@K@f}aM*Kt6iGB-GM9o%(3945#(U{& zv4acV_TLnQ1+*hNcBQKe#Taok!F9Pc`>zs|j;46@*}5CHjgXdxfS77K@$Co3+d`8GNVLH03M$8 zO`k|eWeM-;e^&Low!93j5~75F2B8Z^l&q6zE3pudjd)7lvkS=x z*vqrA!{s1bdjuBbfNU&Pkx{6ih3eZJ12Z^T@~d{S@Sy{xlMa!0376ucFw+N)fd&?$pfJj)&H;VVvWn<@m-b1grx(b#Vaea8=(^id7o;L`<~H%y zzE99sR(s6U;J9YEu+ryv>b#ZQ)?EIs`*G$5dl|@qDQ~F+`0+!0>iMtySGP1(48B>Y z9~`PU1+blxf-sIJ!x2#X17NIztO7nbSG1L^D324BkGJF#6

g?Ny?d*#_NvTUKSx;2Zj8N_wk1PEljAbn5#*K4Prql)oQZU!IBz#6Pcp{%tSZN z`eek)FIQ5wS;avER1%Uf3K#u$~jAxl!J`)JvBDAmd?h$IIQ1O9z|l+hb&zOEDe3RBju zMp{&?lRXmWZ>X+bL`@A`5t{UPvqEa#nl*?gv{hcxG9H7+mCl9Ih0RfmG@E?O8oxOm zYK`v4HF*gp>TAL^x(6~NDT+;cgBo+4Wvn|n$zY>{4-WsB$I9)+654qD#&T~W zgJ+8PK+>6%3vvb85WGBl-GfRr_6qq zp0OSv2oY8l*`+uB1Fh9ratr$>GedD+a2BoLRFJm*;>RVewpClX06?+c@v^yy(Z^1{ zn<=X&%AFzNB#o-!m|d@X+@aM;5)1~wYQ9X*m8h=;*3iBYn$XFz2wu>eB4;N*cf2K6DY1iy}%YWM)ak$+y}1lmEcHq8zCcUA6Nc=Aw0@`0g{5rIeA< zo0|-GQ7O?gx}_C7s6T>tILNNk7MHHRgyz?58J6_?waI$1C&$ zZM4g~?By8Dff1MTiwj^OYX)vnN$k(<9mFmTf`3>$`#3N!1~LIX4D|%Qu%0+vi{arq zySubU*~DpNfj{BUfQ~n`4V(~0PQm_=Jc#wjN<{N`H9&@I@;;c1z!>CiX#Z>~xbGny z1sod0mvz3VKEP?fgga2L`WJj~LfJV_^m^joj1N?#<38%)mbY*pLrQw&!m4~uXwRY+D+bA)|GKiha>~(Or&&kZd?Qa4ZXIUapg z2o!n0fWLumIH{nAGb-h`?wfwK*X*Hwv9=zGA;!7zOjIQ=d} zHKc>PSAtUR)Bzj~D$bY*3au9TKI&=dquMN_Mp@a5NJ$_xdA*bJILAi4oVfx#g=rnP zTxjxu>a2<=fjnL#yZ@M7Z`n?$=?pmVfa_t3U0MH1=R z5TCA^+S@8YSQy|`0z; zSWx|{gFzTUqp($xuowyXhh-cps44U)0detW3%kx}jp>BN`Ni{JCH7ejs>-_LWS?~* z1Q=XKqh2QHIRmwq8cK1VjaCX|mm%D#)dX2;3Bx>>+FrRS!k91{gX!t_IEt#}5oIuSY9+S+wlOgX>=r0QD@bq9 z|8e`tqWXSLVB|cUnJTDRT|(o96aq_iRe$mj!O%8jTWl&%rit|t((dmdvKW8bSSLP~ z=q~Uc>-_mK zx(D~ORZb4i@CLfKcR3;3`J<--=9Bh8nj2&{g2WP?wFE5q?zVM9%6N($bu*H4^eRBo zrxj_YV|i)|U!QR~uQq2Z(kW!UkFE}1yIm=z1%J$A4A|RYupl7BL}R{8cA(;i?hhI3 z?250)bTimfF((OtW?S?wuyMJCGe*D&kQvK&V)EO2~AUvRvnLqC)^G08_CnK6lX~A;R{?cJ6pYBe0`zb%Ixd=SLQ#P4lxMkyp~5iPQS;t^La5=&(EI|7`$U;juHlu2q{apJ zN7n}s-&Q~^D(k}8HI-S3JwN>Uyg%8-R`)O3wx>H+$8`3@cA-M`5UzJ|CUP^&z{>hw zUENIF;|*gwLTry3tms$w{q(16;_1&^%PdtOmsWz#*w11tleL(}5b}UX!Hm~Ilu`iJ zYaEYt<&fDqApJO+iOvCwYJG>e#iEzcz}PaTHNfK6qU%M&0sHV%{yF<&@~fWmeM3}V zKi@UN&cTv*wY5cr+-V<)-ZcvQ4VYV<-g*Ns8`Cs-zl?s**LX;1-PmY~UL{~0{p|R_ zGL?lhDO0c1uz7y*vO5bv`_`XHx|7j3?~EmoYpVv_%efKiIJ$AI`o$!%XFmlVS8g_y zp`CxB)V8pJ@9=Mrk`N^Zu8YgQQ;y1*JF>z{0urg49L+4Sv=VdmI->? z1*Q5)+bG!z{SF6$bQf4^gMX(5Zj7oJr{qG$TU{9F2vit;z^`|ykA7FM%+3lR>+_Lmo@-0OwUNVdGF8kpA{*Tw>}RY^|1-FzpBL(s*QnAlBNM31H&0C-a$}3YBG+pfJmc>AWib9NL6GPOTn{Z1CqLF$5f8Lx_|ehJo6uy?P%H z{>)KzAUn~LQ`k6!k7)l?Y(9C>gbL=C9X>+zyAuEhN%N()Jsklv#pip{1b=!mok2|! z)wM#prY15Ra6G8HxEOx6I}MayzcHQ<{UJwVesvZ`KV|_M9-55sD8v6?dl_L;zbO_D zOjO~sJBUBf>LVoMdZxCbo*Z&qeGAg^3=}bZH+J;90C<5c80aE*D%N(_7;H7P^!rz- zHwhF})iCy|!XK#h9v!<``uHz{{(bcj!W?7Jfr}FGsIdj~B3i;DB4URguLe^+Auzg7 z-X8AW@LCA+FJ<^WHE}Hu;9R^E%|3GxTW(EC8V;V_%~xRTO+0yYdYnVU{#?G$E2?Tr zkI5?dSl@dLN#+&8t%0B<8rwfw041F83hxKx3m2e&pIB1DwfTIetz|pli!)G*Q7cB< z{iIVF!37WAkDx6~M;Lmi)7FI}yv&`_N^ee6Q~ka7c>T$R58y1pD;0Wn=GglI8$>KB ziVq3vf!Fe)N{zx9Qn$Zg{}M;Q(&JFMno%*Fi6&AY(2T;C%T8T3Oxp#eUjYv1A$)a` zKJj8V_U`|b)dczJGZ6@RVKZV_oWJjxA+-ZrRo1e3*EXWnGe3Eydl1Yut(I-ch7A{MV!rjH6U~gt@D_Ubx7U)}N1Ol(zp>ch zz=X~(z(Gjr@@m%dS41!#U_72Fn&g%G@#{!_swdR>I3dKffOhHr%n@_zsRxL9``CmJ zT2zx0Qp_faeyg&9Oc!y%mul*N|F`MtbCO1u72&zBWzpz|=3G&2r`tn92GQ%e?Dtw_ z3*8gB^KJ3F+`~VIv|xAJWK@cdHSAW=ayg2k$6ga~M@9neWc4}mX4C*y(U`DVcmZEl zXvCRzl&-79SDtnXIH|S|Sk%OZ@fuO<$8jbGBRq9(1o4QTIxL&4zDr5IB)J|k9Y*!O zGkdaC7{bw36X6E9V}bb@l*m78+BM2XdBg3G1+h-g9P6;)Ah`3#vkv0-o)Z(Mj9o#8 zBgD~1LZb7Lr}j8R*Zf$zNgwg8ZhKZ(cwZ@lJ<;r#U2TWs+OAJ*E-<3lX|nJ_bur`- z7?B+tJ{%j?>we2)>fOxw(7&Y83<{ePajtnmf55fCyy5BG|x?fs4@L|ILB`kHCo0tWxLO5 z{exR?w+q!ATz;Gn>eDC5bGK|ky76w%_^)lJ#<|;)89S*524$Dd+~>q%vvdmnJ%eyr z@q52<9W(pp#EcBqp0f82o?igo)l5)&k>d@lm^eXvD=Xkw$9YHdm=Me#Es z7){C&p!eH6SSqY+lliaw_q0Q+e?||gJ$1gGeLGtE{FVyauSRS6E_w-f1@P#2Ya;Wtsu5)|!Hree*D+DxcAd%R`R+{?@wf8CE}ea?9g8dhWDd zv%qw7C{B~+Dm&eNl{zQiTo^n>PUT_$y(raK`-6%4Ktr}@L~5VKM_$?)c4?=C>5&%{ML3&|4@B^m`$V+s_*`nZp+|e>opF- z>UD+7I-zB077-WYso_VlW5So`qWc6veb3GIfL$^TAJK0AReFK~mKFTR;Ps~**#Jg7 zT$Y$BPOQ*k9LY*dt?S(!f72u;8DIK0jgZNk8B-GMyl5pzEQB^_6dgoFU4`?=>f9G# zJF{OS*iT7f9|aX%mw`9}U)ile10AV<$}^4hN_gNpmapa2o)ZOwc)wFlJ_)b`6+OSg z*|s;$i5T`3Q49->?aPw!PG4jDN5XU+Iuej?)JUx(H#y?-6y(d_ zVNjiMWHl-ARuS`(8?B7_CPB0*~ATviz}>jY{HvvPYZd@tZFydl|^@ z=+N@j*%_fRFPGj){9(AYw)mv#9kE<%s`yHTB+_Is$3Gr%ykt$_r4fxsGqz{~C4^cD zl8KLU&!sc5EnpSB=4lj^{LSoI>%_sXQne5zi@jaQ#W`)z3(pBCU;)pDKv(m|$}j%D z3uVZqW@QIU5j8#&3pZRzAfQ*zfV_wWo&>pY#Kb9l(?;{boXISnHEioCe9sQP2NicE zkK?&E*tj6$tN3;lr$=$0kWZ)bzaP~$#$drG<{J!&Lt!#R)0jn7EURDuESwE^QA+Xk zfqzjG4@VU;@<~2t!KiFz7^eNVTA$yEhUGrcm_rxs9$0TqpPLz)EF$pP&u0O>!jaqK zq)Q!}o=utCW|$WTe;hOQ;005HcMGB=@VQ_F>w|n^oVQ*9q}U&40%;^9?<^i6Zm^lY z@=jlSkx&;Euz;{eO8{dT1(xf3C~Sp=^R-Qmx2;yaYCOdmZX236kPAGnE=+38&I>9% z4x%O`Q6FjFMC>(eH$G&KEujkOR~V>5l$*(IW-7%D`vX^h=#cB}rs^K1Ws7T(P~v(Z zSbZ3nVNB(IDB3B@_#z*4P*)kvGwHng@<_SYOJYcuNZ_-=1Sp!9zo*CvE@ zs-*QOiBbY%Q_itMVmb`@>I2{#K276A2Dt8trEoT3jLPiY5WrqQtf+#XjcN zM(W;#;5-0F07Z>}x@;9(2DS&d;h@3MB~j1sDD=Zhqa-|nkFouHh;=eNT`vzVV_b#v zV!hl*RETmeMi=lO+=2^ffbBd4fYSs&a{Ii1uHZky;c%i;K~3MykGc62zaR7Yt7+yq z-70>tTyU5B>ouNQ?TM9IP0!-8W^D`CRD+qGHZgB&hw~}fABKY9FBgcf5l~`^;@3H$ ziI@qMZ7UuCMSA`Q~#id0AE-X zIqWx#7jEfKBYin#!S6j<^$#m;WZh`}Zn`m{a*y^oy?o7T6C(L2mhcV6MW*VFmbfl5 z>F028dk-cFIL@HoivagxC7e5}E90wQ8YL-h@XY0e6npL3{$JA@8BuhM zxG#E<4>0+xPzi<#_lGMk_$8MO)IaebJ#}I~Lul z@p()sX+v*Kd5~h2YVXC`Xpl;Z-Y)$AfZR2KB~cs+q1pB)BnAw(c9w&#qG~Tv?x+pp z&YKM+k2}>2G{AB0a=+5!!27Q~z4`uG-?$`3%Eo#>kaU^9EOkPuR+id6!ssq4XCR?_ zTQU=F7jlAO2epan+@ozjrKafch>@rs#jUrFTUG(<>W}4`w$G-CJMsY{R6;=iWYL9{ zE%&>=qKgPu;_LtAzy06nV2mS67|Xxy5jvz*j}Ny)OyKz!qOs_k9TM4dU#RdIqx!n? z1pPW^meDd^K0&`-py2Iq7{vmtCOVIKLqe>f-H#yZdZ(2QOQNXgbdw{mlG>mPCD|Z5 zV;Lv&i!Az5ow!T31hmi_m=3c{t&;{vDrKft^s`R;IKCgaa#SH7=c%V~8VoO_v3|XIZM)VG*i+1MiZS^F*)_ z18uB8MTbW2oDMb-H^0h6RNR4_w@<-W_;K_bd(=No?XP}uktm~UIm#`B2%{qqoAUjf z`An+eW{w>T54-14J`~hDrH*)uhxJpjJTGR(3ueuVPL8S^94n$ya%ND`cv^k`p6Ur| zUuI#+3xkLmmQ7dSqU{KIjddzK6q1BN(699{rU|h!lsTBdnn=q60RFSstaoLV%Z=7c z0Cuz{2x)>{)D=B3t%j0K+(gk^dLC_TtS*_6Nz<#d8MgLDKaEC7hUNaWPGj-OY=}P+ zc4|3QB-Y<8Sf9*Ok_9u?gJ!p~Ig<>ETL6jOpW*!0TvV8!XLfPOGVt&RbHuUzJ`$*f zwUCzzckTmzFhvU~vi_1FUP3=E$859MEQ zVFXknyRHc(M+Texh5$ey_=f{E_;t&C+n?WBoiF(ZzOYB9dDE@`pm@m~!w!HT?a`6w z3k2gt=9h@N4ooVeYCu>vyIlrs`(%KLkGUCZ&IP?PnKDn63+Z#SCR+K1V#liJ8J?&qTo)O#_Pd`w2w>r>&TJUw$0jei4L!Gyh;gO2jjtBxye}WS(S6xe{+2 zsTsRJIp~&gChs$U0sUu!ty0g9F2IDHV`^Z4hbY5ff!GR6)iy2J78nW{Xf2{NT;KdX z^ZByDj!%?1&j??X^oFC#r)Qm8qN~ImAIm$5*b_@(SYR@fmO`?Sb_($rZk0q{yLwTJ3WA59##DY zitTK!aY}JZIR<5~FQY~Z^(wWCg+XF@Hv)0@0nLXq4YEQE*{^~ZBSBCi4|8EE2BBN& zJM_iikah0P^%1s0W2I)4`r#I;NmQuv=tM8Kbi6{{8d$Hk{V?iv1xdhr`y{DWT}QEJ zdY+U~tlrtc7JkV9(F@^$HV!QKS*Zw~i4 zD%x;)5CQ)DY)3Qko!HEhUCVu@AJk8B3zxWb#yi5j zE-e&k*QKsTln6gu@U5SUW7ZDQ8x44I3AHJitxDx04mKLz@5T>$%)t{RnJxeNQ9gXQq~a~ac+ z%ZV)dR0Y2&x>oK2O|pjbC^DcQ^V+-(UPA#}v9VZE?Wu^&{3r3>N+J1miilX;g#FX%#3V3xZVmjB^%OC>{jpD8eEqmR;q^4bJH<=l|v6}h8uEfRSoFm60B`=ZUK9(g|GH$O+E&%d# z75f;&q%}mwa2D<=bPHbh;Y+k+mSlC1UGm$A^%Z&5-_>{0CfRWfW)Kn1E9?7D^sXnl zohvAt9kV!Qpm^c)F`80)nZ!@cjdt04QwM5!V+<;}YFtX(E1>d9TYoG;q&XR08qx** zM22Ws?Rp9NfrR=w6K&`NiIqtQ%;HKlT61f9cnCY2)j}VFj-oviP+^bydYN{1N;c7<77} zxSL25-YbnwP}*bz=znb6utoYjond~o&I>pU-nQ8FueX0r#)2c17ION)K}!i<{|^2< z?t4BeQH^+CF~5YL9!yShKQa;sSMg18BXN$Smq)q6qd_*Dx#JK`5U``_;BO zQqx{p`j^1G7T@KGBdP`{+Ov?veJ1MUIS)eCi1%$PjL14g$h>U6xB!_= zQUob$u+4xBm3o^B0^<0n((;YTW4+g%G6w+e8X;0YbIk$1OY#fa`yP4Vf@8gekRZYhZ)4p4=uSkO`@x#e%Bc zz(mz`XdulW?#xK2d4Th}1aIGKVu`rg;I)%@xv7G%q4_&uj9?(^A(0J3d3x0wESGR# z9Jer`byp?nqhS9wpBa5YieI5j=ju5(;|-sjv1NDR)CJ*m87RF>dsJoI0#(LCLQp{n zxnE98t&(_l;&ZWVyz}@Lj(JbU!IjzW=NkFCmzdB0FS>Xs)`mwB^Y3>ELvfX2K&SAdZAro{L}n&HO0pQ!Mg9E=heYs@gxp*>eIE|4H~;BH5gYcWfQmojjs!~MD$N9 z)GLiojxrh($LcgCAh?v)o-lkKOM+YxSc0vYfKT?v6(UY3b|~m+F$jFcP(Pj0|JuP+%>XSvGENyhD;0HR#7y&=~OBf)g_R`gCxw z;&bYkz*mLR?ghN-M|-cY2in|xrs1Z;Ha$hci&!^hp65l&-5IcOtlvQHEgZHSP)Do4 zAiQWd`CB*^3a37Qb+d0~ocWF$5rm&TJK+;v0=4*y@uzK0$Ky4ALA@?YJqQ^4{_&${nU1`8OBT&HCNI4tE@yNPj1VTh-7ZGQ_+g)?*2lj+$Rie765#GGm zF}jWGmT6J6h%tuWlF0*wed zUI#W1!`O1htsJo!SI*_Y$Y-ngq|{sHWf#iJi#;c~bXtiDzRO?=iRpP>1d$VXIU<>v zjs(~8Lj9t{M5tbATavdCM$2qQ^hT%BF}+sa5{e|OZ71q6j!-Gk)*I3K!K-(6{LB`e z$P^X{y{$ahle~$=EyIR5OA^FMQ@z$*{j*$Va(mBHI5%lf6hTA=?iRf_N)_SRk0QkW&I1e&Sht5NFk#Gg!)Ln-k5 zsN!WEdRxkgS*mTxNTycZGeVS?fA5yX9IK6o1{Vg5nd-$YCQ>XcxaXSy(=W^iSrIg{ zeHL8o0hA$e5i5IXzbH{DA;EGz?I^lBHc8SySLwa(zA&uXIoQsCw&K^!hk(Sm7&3jG zjPu86F^C|zSTv|+Uofq!D!w?EU+L@?qL?W_Hb9;V8ypz?1{(5`gOX*t)jng~)fv#1 z$Tzj{f$?u8&uCN^6g0b8rP38epUV=?#BBU94DBgMu|enQCa3XUC7{klJpO28g9;c}JpIV(D&|FrVHN z*OgtvrtBSosHJgCq`z^kpJO=9xc^f2=aUCreO#TF2$(~4ZmuDzYPH(Xe@+humlFRt zyye`LP2BHdM+s)wmQp%0-S$on9?Z)v=343#no%_7f2iMqHz!Isca0Go9)GmMzz{fe z>w4k^X;EN`R6V%?#>Bj3b7aDM<#7ar933~jTo>=LV1Py0F6yR&pHL%ykI47CtUa&# zD@Trh56<`%d@V|D#Qg9Rm7CcBMdU&PW>SLn47Y+#8fOZid!3JMba%Xej%zv45zc*t z6a=rG_TxXIjEIoH-|WUaRp{Ap4DF<%xb4Aiolj^&zoCL%r4s?V#V$OO&`WcpOBXZ? zEiq__2gzRxyt8Y5NS{rC4Xg7VZOn!LD^t(@I+E&exhgmljCRjPdJxf1`0|^`bl=(e z4m&r&RxfucAsOd5DY(8^v&;c%w85Jogja^r3t-lpU9a`}-Mg-wvuZxag~4W~q*wg5 zXV28trs$xhh}2i<6+Sm}w6MD4Rejc|YbT;@ks=K1gC>MX=|bJw@O^Rxjr?=Y3Kxf&6@vQz@;|=jC%|1Ufu!n{8df#V zXzoM?begXpxAk_((Awf!5eS`1w z%5p%(_T*wayLw)fq56q&NTSM&2W znQhtdH>m@0GzRPykDRUU!NMXph+|yz#ggdSZDPz_#n2U0yGNR0x4gf2!5hAA=WzKs zMb7?PD3H?$q8{Mc^h6DxR>8+2e`-Ekp1*OBEhUFC2;D*WF5{ya|{sS{@1Jf^`?5Sdi`raogUO3CM@vfwTw1xGKN~7bV z%uAvz;8u>Cjmk=6_8w4IN_<+muip@7#CsT(U9R1$-_jSb0*m;yV+YDHWRe`{h=ENa!aeyZzpF_C5p%MA#TD_@27z+mx6)160+q zt&CsHgV6Sneadch^%7-PG&Hz`ui`_0@kkk@*?)(mWkS6oM3NgKxa^$Y0Jpigg~syH z3$u<%Tw#>}@U4L!G??Kt!}FlpyjQcRU5QEGftdAV4D;VZw*ap@dnN|B!)67Oj_2zo z6m3|Bn`YPo1&s_t*d)_h*eLHd+$Vt|YbCmntCR}xwE1rxv8V1{)9gI0lk2=KWGyAr z0d-DwO?#RBY{YoTcN6H`&d+bu?c*qgSI?8Yk4VS&UBQBjCHz=||NC=_e!klfrXmlf zHEZ8o^QP3drp6~-#>T}dcFxq0pO%P8q)jQ4V^R6<4bz*8s+kWxh)6%Nz&1EzN;OTI zlKv}F4McF}W3O9^H4!;3QHAzanyj-F zh%^mtJ-n$`)ENl2tC*F*xWBK8E;PA@IV!#(%MF?YbY*XNJq3A?RaX-P<3glF4eHJh z(!YOM)n(FfFfFyla+Et3p58kbC6WR=#ikv#ug11&W40~+7h__A8nz=#C`UW1)?QJl>$p&D?m^{Ib@-xW(o2h2`bk&^2mCOW=*k z0AD0~Cnzz&iEpDif}XrrqwDIy(G<>shBACGhnfPdO}bECVvf5xa_W`O*&JgcR&0;n0q$Y z7lKCk|szq)v<*w(H~N!h|p=*5q#q=3REWv)P9u?e$rnLEF39*I4kchpP+ir7$mgcR9GsK46H- zY6z5G*OX0|>%qZ*f5uKz`fpo}=OwK=nFVleSz=(91=wK|`a9u_E*Z>lxqO@(8Yfo| zq*f1~0_nJie#StTp=Eci#o{#Knx_9|AZ<%$xLX5YIZ`BxN$ZMc`PvShd(0^-AcOtoTHR}`qQ)_}byB6xj{PeLb zVnH37kB*O2H=ejW;J-JOcR^d}%`D-DJ0@{%93aOt7#vW-8$Xz8SUZnvj0iXJB;v@ zeE*fu<+)-3u>4oko$S??B7LN0rBCz!@_#H6 zb4o%y0>?y@6rQIdxZE9#AnVS!g65Pt9Uu&wgK~=?t~RXv8V=%6ZbmFK@|(R*%kWi^+c1NyKd(S z516ATEQR5`%i(<&Qq7{Z5iTeLRE@%3D-3(qUk0t(7`WUgGN>vDrG{u5NBKqoe##;JxFG!4n5)kL?#-nJOmJ}oG~P`G4bG7eTyp+b}Uq@^qh-7 zKV8-iPH$f_g0C%+n0gjNukGY1=Vg9UDQ=O!%{v+Ce)ub-g%|y8j4>2u6J44wxlYR? z`f5?Z`?ZDdP*x%%+}yY?7O$n=B)xL58Dq#-M#ENs_O{dGmicMV2j12rbbv zv3Z3&%^G=9(z!njv*m!?^aG^51aK2#x95Ql7#M&79X?y$<{Pha2L&>4ik^-utl^yy z>4LGYXf_$5GU(`!QHGX(!+Ee0vZ%?L_HLdTG%@5msX4=rI!F zYivKPu^Nz7DAi|>dwI_Ig4G(3R`PCSy8(@NNQ7L5Fpcng;DS-b7gl}?IP5QFz8+_+ zWh(vT@dYIFiIlz#YO#rP$Bd-Pj_TCdF*Ye5@i$gX)^S#x6;uO5V&mqwBO;llT}i9c z0Edb0HskGkQ0oTq$c+%)f#^KP1b{aS@R*HPe5$A@bwt?7t>WeRR!htSk zZRz!ugqTk6D{W04d{*gA<|Sl_5Mot-dL}Ivo8seCz~nL@UlTozNn^&N6-08Pkkmlr zwm`g`G?z|Dpxy^R(wmVUXOjWwTkftCZ8z!Il*jp(5X6-$U;%9sav`KJTy}Ls zyVZ)Oph7^(T>EOkj-x>DYvi*pTueP;l3h<6t+!}kvWk)XX2O@qh>H-5)vz3HR608w zNdP5^hONAmDZN*@@5`NVjW3>&R6WNde-sCRy$wYW4kzSN=94rVYt*-;Lhi$mSlnRu zS~V|(T;UCLmW(=_+L{|@#_XB?Y0Q@E`?jka9_~q{N1nmpSQI%(cuL901utn)6HwgA zgmlJAnSIE|7!@&;?BiPV4|xmHey)&icS_oS+hA5)`&T=+5NjlLUpJtZpg%CORKg{&M%-NmSxi4-naM*9Z^Vo? zCIjSKGI;|E$QXWf^Rw(AkHVAs;!g9KZ;XG-!_9WtA}VN&@*N!7XY?7Ck zT5|fyvQg(7%DZf;N7CEGTDbqg;3pLHi6H_1{nim}e5a98esb|=?Veh|{!d6CI9ioY z^fuCuuYguvHD~*0-}n4=kIE58!lDX->c8~KPBkcee0L$0%byummZG_Z-9%;CN|CeW8C@{isoB=2c+{2#U;5y{%C(%nSuRM?qV!=O zljZL@nTv^N4gd4CM1(E1Znp+u6ib+p+iEHJ>nl&x?x&Y}K5)|mugu0&;wJkT_-C00 z;kDY6xI+g3qZ(2{l6}{ws@RoD`)}>%ZbFi&&Ld$IHnC z4!3Fg_5HR;VkNA1>*y`NhEISzNVjXQUu8d4$BO{v$ti{%XWk-K!+>tlpIk>qihdHSBJ5y|1qGs!85C}# zZq}8tQ2P0t2rRg!*Nvx24WRnBo*=tsoRZ}yN1j@_2}c7Oq39*uMxef{-;ycdD1&Zm z3FT?B9m|9Nm;XQJu{pgGuO|dTAlOWx@LTCjL;Fn^Kd-fObf4&x=y_8f?HA`kYu$P%cA6;BJeRh&u)1fcySs&vhTD$fkIw(C z)w=%E~|c!rjN{_8@J}7K@eL&l1_%a=Z=Bpyxzni`)o(z0|Su0b;F&C zawEWrF&Hwm2MR+HdW%nf+SuKho*JgjLeUE1$KWqjh%zn;OQ<~r;bBWL`IT^%?jZ;0 z0Oj3mzQYV4-U5`zuF<>f9JMTSgI%x(+0)%d`XrGCqP3x>{ zJ;892ky-=K&mn^oX27d`1Ub!3|BXR#WTJOu~c3-wp9R zl0j`o_}Oi_%4SVG+Gy`3Mt04*TTv$UeQ@p+Fp@dm8E_vz%J2Wd*a!=7St~{6oCHGE z4>P@2H+{1Y$>#!Rw0E-lvSlf^reD-_bFO#YyfX!^s$9H0Q*rHhv!HqE0MLN==|OB|z4XKP!J zT&FwI$1B7H=JdtV2yznMt%p=CMhLnUc(8GJUOnZ=AE>2WNJoV4Y(%L(x#iR^7`U1> z?$~D)$LBZYo0m&vJEleAi`$Wq-QvT5XQy~Id$dK9DUGpDjpcoCb|rwp!0g_Nt7R&K zq(#JQtF=^iG0hlj06ci06!I9|rP+;Bxmw|KQB%UF;RuLCJ>X2UDl1FKh>-m2vJ;rg zk%ayEG08>RlSvlO9rQ9a58>WO#OglWOSc8J;3jJ?{hYx#{)HIOG-x6&umeY4C@ zSr)|S46^}US&FsPYP1v}SE(Zs!J-=b>mLQqognlI#HhlyV2&3Lg#-KV8!8F@16&b$ z;XQCFRWk4BW1iDv#$o40ne%&*L;6atzi5HZIEAR(xlFj9Kt{c%RLlfIM+j{SFNG^Vi(dpLG=x9uYuMnkj zBjJ8XBXo#Agr-49H$yT~qvYwDtArq>;<5oF&m-Tt{~Cy>}f zVS)2L&g zyjP2n5fpJaTt;MCVLl&QDHcDw-9}EBQ+0Bc@0YnFa#J1kpSEAYy>qe+phz%Oq|cOr z%n~{#zZebzM^5b5E%gco>E9$3wc!9G@7xC{7Wf6XZpU)8P%#VGu*MoUH~WJ(#39bDH-C(AFuzf+h4v|1kPXVU3;R7ZG-YvzYCoHDXtv&Pyx2 z2O0XGOnW?45Og3{A=VUQsgH3^DS|%pNq<%EIwe_Wm(hJma8l4$%8l#y+4U4n=LKue z&yLgMq5mc-Y?OIwIWIDCE!6OtuxN9BpAyblc)+UpAnBLMijY;1vPy2^9#~bLm59a%o0^NBQXs(ySq>I&Pa{Wb7B z12K7UO2V2^ySW1Y70+ipau|esjjBGI>NBKfN_?oD~aWjPw`)YgCZ>8!owP8NC_!W-K)T*tkLV!m)tbvPdi z*M%E`e9QQqPz+sCJ|#dgbXKlPk<|!&1e?S2qh&e};C*bqQs|f~<{XnTO%Ld5Bo3aZQ;0^4o0pwPJZ1GH0Ue(#X#g%taf}Ei<%F*ZF9S&mcHPC zHjF{6NZO>3{lmBEeM_pW$jMng1hi)Xfz3SL$K@#4N0gsHDuO}@fXb5!Eg$yU zXfQ#NJVSGapyI)#Y~W6T4gpLdFKnWWGJnzwW6=l1w4h}+iRZ)+ek@$f?oTylMehXE zr`CsuipxU(c@rYd_@=x0v>c{vl135=)3V*KMaN=Y!6;r&W)Ua-@u<>y5Ig@VBE_6x zmtmgYHu;phVsDC@Q|#VxFfw2S zzsrW>J!Jticx#QD&vz!GENzeL%7>!}m68FI?;zmYJT<~5d?86%{MCE0wT;?jY*7MF{Us24>zMKB3fu! znK+~olaFtj)x3O;8c^AQ@2~(g>4ipYhDx%&;Gn4RvB|hFTQ~AGAw7p=LE?;TMu3D0 z&TKsl(OWclYacYIjFJ`?D;gZ~1Kw?5Zbq|bOrja_9??Ie z7-Ky0INGDTTp##;1-n(_N7J;D*=(IEbRlELL&CLgwyYwyx6Kad-JbLwQw22jvFkwy zLjS^$LIp^>QqK(HzPux{O~Wlib2_Y_=Tx8vH5E73W`)Zl*GXJZh?Fn=xQnNUH=LkL z#y{h?jY%b_t-2N>l^A3f3}2D2>T9gpYpMzfIdR1ES3__r>w=w7Z7D=%_Mp$q zCm{>N5XRDf=qdjYzU>88cvb^!Xs2uAO7!x$GJVcS!mL~OuY!#(wTTcV4@J~52Fk%2 zlIE{M8`%Kq)gHNd8R;jMo2}eQ_F8}CtUdy+caO+SMnTn5-4J3;!7pv^lxArS&q%u* zn$^g7n_wz?o($=!R_!;(zhC!#Mq-<3Fc|JTm>NOe8Fds$Yd9g78@j-*7lL4a1q;I8 zowSKF`ct?VXKobhmPs2<6{q6xsDa)&mcRg|s}(fgWq#cf(@LQ|jerfUJOpR}!v#{j zHE|q4t{H1gi7nlZ5KV`kka@``l*hk9?6+&ren4oZdbh<)+2B824!OsXlqV-H<4JSF-So)H?)8(N2o5O3g?WjWuK;%-3ontmJzl;M# z&a$?oGq;enSvb&0TN|L1^Nqc+px)X%c$;(f`UhZFjeZplC82EkB4PX(k5=~2XML$y z0aX4VVDrEG&{Fhk`_|JWouU=Ti!r4+2u$X}o%$cx!>j zQ_kZgKKKaFZl$Rd;uqhZ}GE+@~fEgktJnW?RUFNBiMd>7YuX&~UW@ zFpH+-8;x5Vl_< z9U4=SXgwOH}Rvr%-^A>zci zd5;FHlJW}Uf|e^*7B4N)OwyCm9k^xZ8-5TveKVsaUGpZ49gi%4m4ECn#HH~W1FS^% z`D}skGsAWH$p*{Z1W#B5M$$^6H!3f&3Q<NcNB%kV>KKKGVGYU?H5==Ty$OFJv88 z-iZh|CZRIj%8uwjKS)7#9I<>_0zWR(Qmi955_sW-y{cci7VqH2q5Ip&?Kpf<-;8R! zKZw^{f?=*_u@G0Vz#QGg_rY5lSiYiI3tP-`g7cpLzx-cZr5d*MQQfLd#qTmVez%Ej z6cWeM`i0|f^e_VL!cMg+EVaLXnQY~-jb((jHn(3I*Nd0~+w`CKF=|2xm+|)jFcdz^ z?l5Y5X^}1wuWCQr(S2ET8qM9j`;2z>Q(d_34ui15W)~fNazB_&`%hFfHN-n2G0{af z_OT${Wzdi=LdP%2IYv=M(In!iB2eaHm8TPMG-|W$`Wzx!MfLhG8-Ccpf0qvx+K?&? z{-RxcQfJ!7r@orW>-&rVN-XRtpM(XZLWMF1#vK!eIMd9}cb#B)X1_P6tbV};^9AXf!Z!P>_y4MoT9KBq2qbU~?pGYxaiev9M ziczWo)>%oaPm0du#;B5^&_Dl}y&0_>eD?XTn@&Ek!0~0TqNm@nN!_5`AxjT@Cy+{m z^J$+Jm?(vXjNX+|Q+}AR?7%ONKlLq;*Vb$7&z4UIl^ba6K0f=;q6c4x`norAbM3^* z@O+Ib3^|x#d(GtJme@~iFmZFiWb}s7jfE8Zj-!I1 zttBtQKQ;M_pv^}y8kvb&?~OHWYDxS$sqLKI19_lJn80OKc%vm&>jud$w@T*Z-rnpzttM-ED-_A zfB3r8K$n}daq=d&lkHN~XtY`956^Nve4vBRSEqQgD_ zSz%#Le6NIM#A^PK?<07q)T6OtM6i<4;A4Hb6xXDrwiPfcnzg>r(7jqr`q;QUV|PO= z!}FlWjd_9w`*bku5EFMf_*IQ1faUieA$M@q`5VZ^;nLx)=VQyoUsB(&I))c6Y!F3X z4OoD&#B;f1(>zPRm%ve3KUt6!JGAhEA#gPfX5YzdX2(27|( zTUP%>=rQ2p%qnSG;-6rjrx=0yAIBwR?CY_Z8XqK}b*7?jz*2;koV>-Id<^=wEK9Cj z(vP15+(ReiH-;JE%?x+{RqLk`21?=9lHvZfwViY|UUvlm)oZY4Zzd5|fk`a&SUFU` z(v~0{k~@XulIfc^laMz-9(?A1zL4E2-b)M6Xc6)v^qHz;l-z0S7kZIu{(B@eaQWj| z;O79_?ZdQS9b`&meNSk{rrQ04CaX*D#p|7x+@+mpOAKe#Wqri#v-8b|U#+b0_TDM_ z7s~7y(F?@uz)6ODy?mTjIGs5s+SE$WqD~3bV^sbQ1F9Mpc0y~tSSlMMu{UuloCi4b zvZw)FubJ}eX5_ zH}Tr*c1lRbVx8Y^O|eErx^F*-%>|}Ouzu04-y&{cphhbh_RwFE0-NQNW!jz*DNyNF zi9L&-#~n+{{idI17I}Qw!|;_;6z$IHgqp-ocCp z=^i!!&(1b#S+cW0{EoOisApK!o5^3=v!Am-Ds-6c+eneZtp>MqlJvonY0VWRS84bt zz|_s^6=5L6a$cqv8&omsBTPf)RP0*JtVroOR8m75)4z{1K~u33WVvXMRWP^i$*Qdi zH^pM8OSS-uk9Ze5kOexk((&Em3weY#j7@=J%Y(?IpQV2@YA1`bRCUr_=vLeF=uh@t z%)}BNwOXlFqW!b8o|=Gt)XLS5s^C}q)3y+_=?zmF)_6_2+xFKF55k_kCweM zWf-=>wI&(OXiQjbwa|cC1Kdb2iEKi{REX-yU2%amcoy3cdP$T(i9-xR1K37?B&eCF z8?()mavQc6U~5@YAPo|4UvD-v*Gy^8y1*8;VkF`sr^U_$Xy+bgDXx-ZeMiaI@!2pW z2>C9*q&yhPX5NFcq{S|q>G&{-&Y6k20U6AXV!WrWscB`Sfh-{70SiQ!v2GXrgNf6v z2Pr1D#k+Oe!}^yDB-@f)az(9QNn8|5@@&)l?p2X;Wgto@CHIU(xPI(Y2&%q8c2`anfwk+Y-y=At=xPK)c_TLk`_N-VRkK@QUx8FsMP01KnPm&yFS8R>IT%#!s|(0{xA2 z6Q~a3ZW~p`tS%Ku5znLz)l63&1fBN*-dC%R_DvVSRmH^kN88Z+`~!KyCCZKt)WZ>O z83Lh^G}LGDB8{%j{IQFjDn~UH9jB^^goAl5XUY~C(N7S)#MI!v*-6N+R`*73Ik=|l zg|yYCS*P5|9xQ7~hQVk;>g}_z|7l0leb{8%1kUEl#aIt%BsF_=X1;)K_9;G4UoA#= zq#0ZcAMe{a7Q9=7v*7aJHPC25fLWt-8VRor_oGG=(E|k0QRLVtk+DVHA|m5$*Y@6p zUjDA@*fOmfsJjY9D$Yu$wTBmT;tJAI`wsD!%bk1klYZbFE2HrAS78Xh zdh*Ta>XY_emaLWv5D~|kS`j`6Z{Yy}0)n*Pu&zhr>L#CgGijO5xleBwTA8~2ZP_^o z05+M6#-MG@%fwJ}R^iU8B$HfMAI*#yhrGBp)*zE$qPUsx(MRey`Y3wEJ)KUp?y9h_ zJ2I|;%7l)j7=C^bQyemG(pmFqL<9)81UE!Dz%rq{9rRF_-gy{dO`Rm;xUnz>;TX4* ziqk2d&yIg3mjhrFsUtwmY;I?Ag1WyvWwRc}j55gY4vps7NO8NCD2b+N%T{Xcgrhlg zSG5&DoofXRnsj^r&e|u~hk_yNeW-0#Uo!8u@(=Dl9`9)A4vTBaEfiFF^=Q+E8B`1(?`Jv9I$Ss5j?xBIOlAs-0j?8y;PSg+0 zjm%+E0XwM@xIhk_q#dVjN})AdK_V5%wB=(GALDhBvhxcW4#nx8LT+0C#KF9b!f6#a zOAWU-MkpTVrHeUzuc`+J>p5517!+D0s2Za{{W9XdNTYODpkmNMjf>A>_B1f z>j(1jnq?P;y&5q=vy_ zig`4PC1#=6Yao_vbg0~#qI$uu<@g%w&P=_z*CaFcFK3}wYXH~&8}zR0MC3v`ckc{7 zEG@12-X8lr)Wrth7fsongE9f9t#A0xtJ%+!fBlg!kh@u>tcgxm#s!EFqjMR*05e&U z`jgVHcp1Xaq1~N_twHT6KA#9w1b6tGAVqP#C5RFSXVvv9>4}dq6k+as2Y_z){a`=z z*Yz@)J!a9m%)kZ9{90B;*?kUmKvLEmSzjLvd%7QT9kS|v+lEW;3v6yH!woRgW1?l5 zd6>k>dd6h!G7n+Esg)sxTt}z7RGy2)w&H*A{*^%LCItN0iYyUsm&T&^5 ztqIcGwr$(CZQHhO+wZn*+qP}n_MS7z+`*SwN&Q(jP@QyFJt=FfoH^KmRNp>ikhTFp z&}!T#0dJTKxet~`-e8bEME4mlwK~k9-f7*pcNW4)+Nh_U|At>!Y-dWdw7Ah->pB&& zPLN&?raJyfpz0lM#;xbuw7T1x!~W20KAQ`w?M+zQb8;o&TL6Zs22%g?hEcuPp1 zpp5eV9lDMEm%O)+ZD)J=qaaqqb?%Ep?UXF6jd6_#Oa_wqYuDoU5PLb@3n0NWfzsL> zjb1X1+@)LKf&7|NZLV&XHI2NjIUvjI;}Kj=VPZF7$EW-a4lq%7@&phZiXm76c_Dp| zsTn|W?HKsix}T9N$BlWfdk%<3o|F7@DKgV!+9-dBg?BRJi^r5!zkXC2!xfh#~EGFmU00tKyk1i!2Y%}GP2)`uCl>vWS z=b0{9KfG)Vpx}Tz=rcZ}LsExggr}el*n~D4!~tb6!q^3ZH}IR%WoY?_B+YjqJXAO*_Jr%0WdJsg<6u-_DKQ*!*Q_^c}nJs{C^WM5j5kx8?@7^=gbIB>RbJ)#B2WE|9Rd33J1~n0L^4|3>sSZxPSS`aO0Yf;zp1H< z?{CAez`fowFU9Xo+f$b2Zz5tcdqB;9bUw+?G&$`Ix4smG!=lDk0Wuf<@c}_&$mk&` zwa<|8-bEGU^mvjz!cfh^2-a(2KMk6U{I$Txn<;RVG!9j$WH7>B?cOU`CP@FlM3R=0 zzcK?wFvyMO@zaXM*S{oA`D(LG<>{hEIXby4uLs8{2M`+Veb~mapDn;baLL=uZ5cCF zahvbh>61jXUh%J~VCtVCP?<7j4a@r7dSR!ix5$LkWHHvfD1BArPa5%W}&Ot3$M zxyvS=eE-8$CZ-7A9eC}y68GEFjlpI!^UMg5}# zbJPmaT;#gSbrD+JF&g{0PO!1$XS<&}UZi*Leo%QW>TKDpzS-L?Ac_swEH)7Xz!P$={S5V!#Iz9B%-0f0qp3pBV=lnJ8^1ZE z$Pr%SG13+cM@6Y#Sco|6k*CmuU0-iqeM*O=AEvx2<<7Qzf_^*nUFJ*#b0c?6X89*} z1bBr;yyDEIk}&};hYv%IuNO9{RpFOltws{i!G`*Qac5!{Gv-tfKr(sA0>7q1bg5H) zT}oAK>DEGM11^q{&m!6FhI7GSX0~qtYll6I%L_UbcTQ4$yIFFL$)E-AhM?NrAp&ZT zXqEFWvmx$_uaHZg-PylAoPxiH@)el(v8uVkih1PJj@6yTu*4)<7^+8@k%|iOZ6Q0~ zv$9$;zQv938)dlwa7}%g)vC^mw=SAP^gng+ms>JHv~3c~Kc=&%XaSHWjZ6HM?#?jV zGo`iXUn>AAa&1fhhLIoH{jE7yOozeJ500oXq~XxnCa5bLPly48ObB%ppmZk>kwsmw zoMvgDNjQ+g1fdm*J69|+FhNOU;HYoZ(XUoUKY1zmy6s-r3nl3Agc~zZUX_UTrl_ow zZT8CwHB>_cvct&jp6p*C5XX@gr;d}4XQF3<GT1-3wjI zQXoogI+SMa7f4$ECGq={)InqOHcSzJATR!AeQ68jd8Sv0AUaI!oMYXls>-x-M?({K z&GOORRlt>fII>CGOrhiB+B?aM-HE#u3R{Mx{MQ}YD|$*@jZ$8XUZ}PUV>QqUXAtt# zz>E4gFwpfu$ zU^tN|Ndcl;d%2Dkx70+joR#4>?Vf|Yf49ITT5POydrbZeVeVYnIR9DTkM+`Q$36NJ zRtB1TaMvgSELbxI{t=G-H`!K4?NgQ|adBr5uTpMTj46cd6kmUm-Hi6ZEKAPsw5;Z! z?V}q{@4g7xQiIr>SCI@;$8T3DG|SzFw0{6HtnriAWx47$?!|Dap6!8zJf;do|D$8Y9%!b&ld z2YpwE(P1DZ#7pt(d_56#BVHKVcw-D~L{!mL*7w=FT;1e6AQQGv3~i~m@@bZOhf8$Q ze5&+(3LxcqXqh!U$Zo?J^~kg6>w2gukLvs%Forwsk6=xj^_{&UbN zW$}JjPjBxT6GYiMb2ua?WXqL@=?uc(u=1YWqtAE)Z#bv#kr3xZfD7o>3$%nsG4q2C zysDej@3&}<`bgkl(IPZ2=XL2SMKqz|vksj39~QxX;9F3KXqh+;5$0s?8>-_`-&c*>Mgu5 z$ir64jGg!Y>i>~RzIQE+IVxI!{+Q*<0VJ)-Aj8m_D~61~9`p zve_>?AxSSW_KeAMfv8H)16<+d*L=TsR$mw2=*>g1U$elGuZ>`?aCm0vWZs<6FXP9?nYhXyWWA7BZ-ynNkO%V#KEhv4_=cY zSYv2U0_g9I)CcQGQah(??&#f=e(L9}k>%7*%EY!5`hb+|xRb2=MVy7LV_|t*^-FyU zN_KY2us;lu8=g!YFrQUjGBJ4g9g_%<3bbg8II<$SJH1Lo^EdKsEUR9I8b}B420c=S zpt;`4W9uXOb!TQ{WItpZ)Ea>;24Lg<_thdvfZ?szCUtW*g>|mvYtsMZK~J?T;Zd}! zrn+|TB#w%k9lMP;(x!Wr=)N%$boKQ6pimvd)j&$=F?xdYm>mrJn^`4e>TWYHC?YA= zNbGpayl!n!sm}(%6=ZOu1a3yu=k!6`KwCL;9rOC4!v+C)SbS;u5d5S`Cus_M86PS* zu1=Ny1oY+dNk*j-7r*D$5ZPAymhh>Y&)c}bLO*{5c5Sr%qR}OX5j~KnohTV0TV+@c zo0;{HJYVciiX-0W5GG-<)1j1(ilK`Iv3iy z3VNdx_skzSd85!Z7c0vkxWDP=Exp4ke=V8FowDu7gR7Z$mX}sG=cT)tz>hba!kl#%0C9sp<@GMr^=Ft#+#!)+2_Ai@ZxE$xOLzya3qoT89!kxt z!3FJ8gfijJ9|aaDg@f4K58BInaNJPIb&kLGYEu-Xx8YvY$!B?{rLDwjG`ItSvl z(8(`Lv@XQ+cW8mlwlV%bA7XBpSh3dtLmw=Ba>Z{Ic>okhzV@iif{#DOjx=Td!w zuA5sL$EB_HwfknJNTj2Oa4nv!K`58x96sUxcPfW3e#@JStTPe0uf8U$$rx6{#p3gI zH+UW`tN7qL_{{!?Ly6z1yV^rGy7dj0{vR_zqv8wgJ6d`=BEzzy?#8#Df~|@+h^&%M zTzGQj(y&KT^yEPG>B=VPCZe)U&y|Nhc_KZseBSdkRbgtdmJKW>k2Ey}i-e*Kg|qa( zs>M>Sg2g82t3Y}rl(}Ap0`ROS+(Rfm-**faJP7jjdaDQPs5HbFHgX6I(YKwyZ~ik9 z^y$%68T?S?mNEy>QYA)%Zi)cFII2Qr#wu!w{iF9i<1_*Fwy;UY6jXw&_zbg~m)eEZ z8UUs&o5j3yKyx4e|bqy6Oj0j%U}VNQf3f-l*{EB0Z->j zC*a?pH=-^b`0*2FN$6gGabWvMwu^obB-Hz)fATv;1?*eon2Zu&f$W!h*W{Y=6+Yzk zbCTfXOI98g&=!Z=01yq@J1Hy`PFePvYPW>cI1bpjXWh8cMR+gQVPD;2YvW1m}~09j2`{j`ro$m9nFj zvGA0x2vW5vq%~S*rcRE%?}Al)WqDEJ8Z?;z$0f|#*1mf<_ZU}n~3TMaog-Ry7AbB*U0Y3pXdJojoq z8e^@3u#@?V6$%i3|17?%%X!cbt@w*oUZ0&E8s3yQGqm5|Ak6{fe5$b*0y}moO z1~CmV_+F~mI)@xbuABHvL@^Ohx50a3gjph$ROcolq^j<8x@tSqgt)7QwQ?7NOdv!V z`URs#Qt+!!5oap#xtEHTciHit;9rHHFJQ>AHFCp(H|2B>DZ+mf$3~Dx+Fr)g59Ez% zv1?4NwL}fa_)3j9m>DAUcYf}oAN8w#fi$Wr*;?18(jw@@*M2L@GH@Qg#8so3A>x=L}RWGVlf z(w7@0MOkRx7XA401obORtr!Yx)sRshor+N=;o1o!JeZ1lYF&VU4cjEs)))G(>*`39 z?X*#kzXhC0XgoyxM7?)z1;q(%&~t5)D-&qO($g>>-63km!foN)FRM3mq1oLH9W6T< z?nlEZDsAQNdAOW-@@LVs{WdW5J#HG}7F7#HEdda!Al@zO050OL{pAiW3TLWovj6^r zW4wLl0*|gAMJlR&Jsk8T{gpn=3qnB}dVWu1V{Ig@Xbh-^1u%xPjd(lCu$9 znh~BVPQs~oV|W&R_YVn2=vjd8U^TrWWOtVqH%c|95GPp1EPbw>iFCGqXaHUyLZJm! zi!hsr+zOa*dmOB>ZR{?T6^f8CDw6{d+kd<(vhwKgaOAKa4KE^gTnYr9{#N0?T|xR( zuO^_jNbb|j5)nAX4}O$wbtd!YPNg+aqJGSz)wW?vhC#7vAk?3sBx5=;2qNsl26Y2t zZEP5^5B!t+e)@YRvPrKnocl3fX6g>#?JupqVQC{htV7vuaO%M61OjLh_615NLToL_ z5nvG1eO9;?mPuUu)-G<}o6vh!^;)_xv+-WO`__*~Kv0cA7q|>#g1=~iae+Kux$RFX z`zxmWC3WDai3eD^MvNnl9V&%b+RQGi*$^qOR6l#u^T4-{bnih?WVwu_*8{~{!{h_B z6>x~VsEn^Wod{&~D0upd72?GJpv7|p0&|J>FP8DLXViUQ>SXF_a(D z;o77u5M2aBx?)I4KU4n#$kmdpqJW_(xB(J(JVzUcd7_R{o|W?u-xjDq@1e`GNry6z z34_CyXU9rP!^bU#xJ-%GjoZUtSN<#i#dwL7R8%YQ1d~gp*?;)fvO;`QF?twKPqx*4 z;_iw8*J?G%zlp+gI?49}xPid%WY%H5ncjIF;sl-{w8wGd&;|{}jzvC6q8d9ZJ<$+| z$ovU(-4{ib<;uT?{d5g%!SyG$3VEvk!h<&zh|j%MbmM{+5yGXv{#R z;lsPFqeJ3N;NbDIQFR#Ghe@eC|AfFcUImiqe7E!u2#u<*#S;Vtfx2-plaWM=uirK3 z`p1qu3jY{`Fk}t$^31?DCYJ3&>0^$ClaBZBoQ(S$ZNW#k4(yy9np;^~_EHN-(4pCp z`V1r)x?cF*^HY!fd0*IiwlO6b4lS&P@X+(ofhn6xTnM_hD^K-L=X>7e(w#DK^>=Ej zu0(VC2N5h>ot~kq3e%~RfxPKc6JVBtNcg2Zf1B)QuE)C38n(hR+E_BOdr9|#)`~jm zq4>baF}+ir^^h)cX|KpfAxQ19UC>psZcwy`+uyM6CZO@ z8!3`77b?lVN#cY|$wm9rR|BvEpX4E*m{0VB>vItV052|dHQ_* zR2!{v+*7MusK1##0Pj0%wpg;5eh-uVJOi({L;6f{Bd@59L%2l%Dv*w6OGS0yK5=NY zwPPs-=pp>D#N2^S;>6eFA}r|o@Uq5MBmQd*Q7VY;bzrj!=vDr~@;>rq2sEu5=1@e# zH;ZNYkVr`mf&w`AnzNtzsOiu{0;!By8bTEBtHFhu@RAr)d9|A%_|Ap-+!jLy8?C&S z2!<$K#yaDmeG%YwY$*+k1}-dnnkoqmzE}q-`>)RI{;aKW$^P3$mFv`=@be?zo)t75 z+aBW?TtNRd_MNly_8Fi$9}-&|*23dz9)}_HprmA?1(SFb-_Htg<(va!=T*C3pt#&rViIdD8UzLmwa-mnAP0MoRJzf~Ux%5}85 ztq^W_(F@UVnv8Fy{}=2vQa1it_|_h_5Q7Lt)MKPRnou8#(WcWDXmM^R?f^Pqv`q2K zXn^;=N*G6{Hp5;G)*`x|a^txjF9LhA!>3vzMtk5fyc zp9YEXwLOFsQ>GjPTH8NAODmmnjOv>|@&qxR4qeK$u%bS+08k7AKrYZ zb0T<=70BxHgNU`ML%!(d9fPl#0*?~)*2qnWg1KJ~*#tnYHWO))nAZ{NTfa!??Z|F{ z!xrY-LPN{=yT(%XLabck-hS3xN~%kw+~mJV8pO-$PV~J%x9M^_J*ckJ-+**I(hBVr z|5avlg|mRCfW~mP8En19{<(E!6S8g1dz>j=C)BY1SK&a*rXxZCe#qzL;ux+ORK4JM z;hdN9#yJ8X$Uv9A_M|gU>4@VEeW(%sXqc-;FkSOUE2E-G&!tR=fMfvk^=_AlaSfgR>S!so(7eSLmejC#lV1tO8ySq3*;^T-2XQK>?r?0DG-a7NNF=-U3iY^ z^bbg1zap~la>**7xFeTW?_}Jw3!MhA8h1Ry1#9Lf-9WV?8eP~Si~}cNuOLAHslz<$ z4)XzMPI@{F1Z zFpe+8IDicErb&?*R+RHOktTWiAk^E!t5SXm1(u+8Np~7kUM5;2?SlH=bSd%mc>o+; zSyZ0mYA-&xjllnSEjy&7n4==pod0UuubA`Aj~5?v2Y&ex{V(%Z{M=r#p=2C^_1^x} zZ!d`d3m@|o)&;|>&|2&7l!;xc`V9^aLLgC@iwRj@dC42szGbj3*wHkt=}0HepmMDGp7mSnEAuKF9@ja!cof726-{Boj#VH1CxGN(qTYCVGBhK`?0vV6c` zyj6yz|3_JlER|>iKgvpN2=_tZQHLWEPD4xN!ClT| zH0`c-z8h<}C&&l%=9efiPHtqnOp7P+a_qf~JW*$4Sz;aw)~~C5>B=| ziyJpP07!fvP9_MKhy95oF}&Q5zifi5wFB|Yf%S~6bA!CKp@lbDzoA5`{wea@rq9JZ?geyJPysGng6QYrHxcrh|PQg4)u&C3=?Z-$I|~Vk8DfH z78e=U*04+?exZARq4>IoS`mFgW2peLECa-mRa<5ja@SIN0&s@V0UKZ0bg70@Tk3M_ z2VT{%E{+pGS zSqh;K(x4xT?g$PZY9W3J+@Add&q%qZou8lU<=sC?eNTqBe}@Us&Mm z(a;WJKgZPR9-)6GKIpo!W%T6}L7>BW9%kip-bWa!AxOY^&yeRNGHqSb2AuSar?Lb6 z_VUs-!so4VVb7~YRZUXW(!r=E+mAf*ih)r;d^UXg^l8mckBh$P3ALDssJ0%B(=0`6LAl=nrmUqnAeKIEzK78R{{uOT(w zyrl%BxAgHZfKG`u60jjzW&=r=Jvw*QQrQSdcSV$VXPFq^bPUNd_h1)Oa0ivMD4q!O z@0f9|Ec}ISIf#pQgRbir;}bRCb}^X1$c_cFuh^QV91J47?_Kvi1Bn}zds)EnFrIk9^RapV!nAtq+UU9_HdIbzFUBvM`mgTBC$C-V_7WLtF(d z2$UGNP@eS(0L~y?&~m;D;+Af~-o-;dx76+9Pi?j@8~9UV@*5oHE6emKRzjcSn&M8>ZQ2sGT%*g42g=GRH1@u`;5n#4kPRbp zPqo<`D0ik?%3X0%!n+weep7?A`T~!^H%0b=MG!&*9a%!oV0E@DSMfIxmj1y~-sI`A zTKlaPY@^~CJy9$DUS;DN@_0!4#c3+fOG|DDW9Bnxx-efy8kjxT;#UV8*P#brw~L;C z2LJBbm^^Zy>cC`oGMYg<02P3K7;#%)3~_$bK`9p=qV(wa;PTOO1{i8;5?N*FH*LB{21aU#*{nZu<<$F5mHBBMA zBr^zq9B$V62GoRgLX4@F4O0ge9Lb7KRs%TlRDnItGU;HTMqgb{;YzL3oSLu|Qy~I~2BKPC7x#&4Z*oLs<))~nel!sQ0adU1!!O*a5!?azR~{3Ch5ZC~ zEc7H4G!=3rRG}1_O?8T9Ci5BkrNn;Fa1q19F{_TWS}Xgu=upbL++`io$6u&b6#Pvn zqD$&(ks!<@tL-n%8Lbo`YRi!nrFohH+?bF1vRAB`!lbfUba1j#m1Bwro zLmp$N9aTP*SWxWM8Bsvj;kl3w?P3?qyy3d<2+?-BA~bm5LBj_S2 z2(Byg!2Teb#g8*t@q(_7A_ zrpjel*~-$)WKO|iWt8w%7pl%Y=^_%Ji|N`l^| ze+>%2d8stml1N)5f5}$K)C{g7u=*0Y+{mm3e6BJGK-=Rs1f=-~h~VmFgOg067RC)c ztBME82r=uk$e7f;n0@oRsA7&tW*FFzQPGB3E-d_XC80TKEqsCJwLad~Q1~jJTBJhW zUzMU~Q6?+BX#tN6r!5LU>9$O5D+8e<6ub!yf|y?O@8C2uGJvyAelN+F+&hRgCB@8g z7*=psyv@}@)#bBJxf>_O=~-~Pv^azqqU=j8&zU|(Q8AxCi443PBc^v7!%%S$l z;|Gb2#sBL6g$-q?w#&YlUxLOwt6p;1vve{Nl#_NsQS$z~k;jNh&ku29cK#VCVWR3_ z2#_??=Qn&fc2*SV82bkB;&dOR7893Tbq+_(W=yTa7r$@ECTi73gUi*;eKAe5i_nSh zc24~hFeh9%eFEMamZ=P=*jidY{?ZW+Gbq7#R*?xD?B2in1irA4n~UP;Uwr=Dm?QEn z`btxUc>k2#jfZ46$g&p66#r@fvD}>kvg) zZU)W}KXgEl?o0)0d(Nk`>u9FzNloV-P1r(1;-Kzb2STAW$>>` zG2qmaP$HFxVd*QyH7}CQz=zeL6i7Mp8{T!U>Unxx)tF+uq^wq)&f*^~k+-l%?KI2= z1-UUu_%4EmZJL1|6jf=tRINSsvDNyV5PKj=7f>@N3g-zuaj@x3QBsOs%i|OZ_q1V9 zv!-@mjMm4TJ?&7%V6EiQ|7e>@z5TLTa8I=On$(msW3+gW=8>fC zp?j(H%J8cBWsK%uI!vO0C{raLBomISN|!m0uOxCXAOd1I?J3L#w_jznGFRBQt4HCH zRO|aeyy>0~GMF+9S`tVv^BLtmQP7O5mry{d=O!1wwiILxt^$(|S}(VgE)kt)$)83C zL?s?hLs&&#knVkjn_v{SW5hCIg0$Qum|m$wA@wM?N(uz!#2>3G;F_X^z-$O~NVCC3 z!d`Vz9m@}eV_gb!HO)sqmA34a3fnu16ki!%5`}Tfy?V^__+Bykg+C(yEae5w8}o;g zv3W_>pd=80>tuFN%qjcwl-D)TPF1C`4SJ7|H_S)lMbq+@s`EMIRznx38AV1oz7eJ= z&WSi*yGslIa4Y=XGF3z0!+OUOxl^>B-MM?a6-t6PYR>jmoXJ!P7bpUu%u-;G(06fR zjLzSh<_0w-ppR6QujDItUDb0XGri-T%s02`i7&?ldO=CUI!YC`Ei-WzSAjZM<0XM) z=5@4K+jac2>_fgJE7v>c;BCX1)f0RNAL$$}z)b?41`zxo(A_q0K@4t^6Cb6U1`m%S zjb6Bg^`wbIzmIZl#1^Y)0nQt?X z#cS=NT65Bw8m#Ako$9_X%|*Ec=NyHB5cj3sp;-`m7VcRr8Zq8kgn}XFIIxF@hWHh+ zou|ur3^C9nA0~J6X_9F58C8Z&sSmJ&(vOzh-&x|1BjEQXY`M<368Da zn3D})`$ccYf)R3&xCM@kx!30ChwGF?Pp9UI%^udB?SyhGUA&-D?bf=et&bwPiogtx zBIUj(sv(Qk!oxkwtrAo#p5v*-@@vjA!)k*zp5PT#cV|SvpB#h`#U*Z78dsCZm;OU? z5ixHU9y!|l(;#xu+rP(8x-7F(&XNYcj*~v!^aY=PcpLuNHJ5QOA|yeXO|6j@!@i$R z+oc9n31ab542iG(23stEgT6!tM4K-1g%HJmPtrCGCjAL_gZh}H`f+nJE^U5etAp*nYGmDsRUNF7nJ<;m^dWg&vtO>CA-yYxm6CX)_!jOp!Lc%pVmOA3uhr}BZlzp?q|3PeUbbXFvTuw2}%-X)D5$+=QT;8Io1!gUx z`$CeJ43sy;Opn2yI*y%B4$s6nXHkgk4c_;I=2q}5Dx`XwMvX8 zMJGNY_^&SD=5^8&pz@xl7T8PW22rQ9d4c!$p}tgonS~w}hUV_v@0f6N{5&S>>Um)=R{X_Uq!T;(1_oHa(mo#&o=jE7P z%|G-wBCwOiR{7@24*!EI(hLbxF`?d-GbDba4@5@Ir5vo`lKm|PmagFiA+D}y_UTk1B1RM6dgUHc3w1;YayuoX$sksJ4aJQrPgetA;=HU84DPuR zx+?xE@I{e?&a!iQJJ$%zm34Ohq~bU6>;2FseYQZQB2#3@7neMW0kUM|h~f)Kz;V<8 zev0|T%iwB}qOf=WV9}|k*=5EOK0VbRI>7C~L)p@R=YI-1UHL+@WiWbtei+{2Hu@Eh zj@v-ibwK#(?2s>QNo^+jRVp#Mz`mGWDnqs{x;^bd74Thi&%COvSwfM4e^@FE0Qid>?rbGz=V6 zUuM@}#6K&`tDQ5N>0Hm8+xFeJ91Uu(g8h0jk({YYxVIZ;tI?TDRi{LlHbG5f>ATQ@ zyw~6&96PMVe741OZ8>e9-nZi1HuyL! z%(qVk-J!6Hw%C{{=>l#pZdiB&!ol#49CpnXhKIgGnHY{hm;U2;jh`}H_ej^pwt`s# zl8pbak7odvb=yjIWpmVInas+@SQ!_$N~gX*uEB{R@R7b!XwBi!oqe#32YAfzo_u}& z(qg*$x>Tji^7ffB#dq}OoRP)Y;S4ByV|1qwB zROMP!LI+6fb&!q_XDdJnt72B)`_u#fUzg5#2l5p|3Gh#qoM&f0l_u`b+FT5FT3}^+ zg*X&DFyFZg;COFphn;UsbSq}G!&s*myNC}slc@aarj3L7G%S#rH;P(cJD?}7+I?vH z`aAn@h4=RbW;SW(aF1AnoLjIoO11T|yg|U3dlV(|IS-vh?k^iWIGq=TG8YVPkRZkf zGix4wY34pJql1(9e*|5Sf4UvOLfs6*QAIA+DD3`{AvDfFb&cJDZQDsn>qI)Wu&awZk zgi~A^doz9x^&l(jgRvKMY6(}SK@^ZpR^90v5@x~P!w_HYfM3B2%A#&NN+e1g#EpOT zFgKZF(w2>a4k;p&-US9W=a|m(NJ%ps07vpYKg7#i7W2UtsJ>G1f1iH;$}9OgA9?GE zGAOH3Smrs*x*pr1$t$VeBiYPYPV;Z}cdY!i63YcYSTiSE&Pqxts7Qs{`o4Ra4>mpM zUXf0iG^pCAvsVLGMH1Hnr7LF}w>E*TB>;=!!+*{b zZrAI^wTV^JZ4U9!j+G0)KH7UUIV27-__tpqxf|w3Au(9?=Jps>Y0|Nu-0RxFV_~--^>31mqE(&g#sEkYq6A!eKXBwixm2bn zphQV_(UED+kcZ+Ssx+f(Tvlr>Sdn#3DseB^Vwke`EG6q|XLry@@P z6_K7oK%fS5&uWl6CF8MuHSv7HQglh7_Z}`j&8y?}MiFFSuVj$~knAc{L2@uUca_lC zWefa@ojEHdRz#7o#OL~ z|4{j25}Ujvdq=WkbJiz7_Yntd{o0ikch$H33ki5k_yo>lkbVg0!hzhKu)tnjs1$mBKeT*V zE7iEf-eml(@mvel8s`90A@kkikzG!8JT+gFLmq*K3PKkaYjrOAv>GEo;_*gowiL9d zd5ph>hEql5+bsS!QXgMVB`Ft8PrP1O;Jaj^-m9AHe@4+7+5G^2s~@jc4B}yC9YQMZHlcch$}4R|&RX@J8s~eUdqJ(wGJ5C^p#zWbpcs@-Qi(d5wDk=co zU>nCwFrMJ(_ws7<;I0XXLxr0^J2h{@;$;7nHkmeb#y^zjrSXJV$3LIiNh@OTQc%)Ol2-_=U9q z$_xcE3~sYI;Exo+)V>KJEyJE=3Dvzra3xfgMJ0gar3E4{wzD>T-OFYR``_$UkZ$bC zrzQUXd;Tvrfr<&I4y{@7e1mwD@qpcFP#pd4NW%!b1H)Pkx0qaXLN6MCx*HQo07hl*KQ&uZ+6bB=&nqzK^W0! zjEs9fmc3!m^rD1M_eq7-$bVzsmWTow8 zfd~{vyw&(uh*>2+O~Tv9sEj~UnO!oWE5Uy}#Zpsg(F2^*+fUJQ=}zmLD+gwrKI^`q zBz_9_$9fF*oT8!9q>SVFxeFid0cyJRA^Wq%EFxMqU+RtUz#2h$KOx6Z?^us_CGzkg zoH#zYB0K%5idPtk9LP_LXkPB^m2ALJPJ)BOIA*2u)punM9kJ#?M}{V*7Li27V?_DK z`JuYjRQW6*c=+{4IG!atx9Vit)$WCjlR3nB&AiaG<}UuRg5xz8u0}w&!#=;nrePS9 zR!?2tjAE>k_mcsi$ix&l8VKd_IE^9}V7urgW)Ida2 zc|r3Y;sk4OxIOj=nQVHIXBQzZNx-sYf|TKrR0?%P%6Mv~%YKsuqnfUViA$%}InJYq zl-31HVs*;K!67Iq+9mR=s-B4;_1|PGf~tJ@uoQvhST_lWXhEkhoF{>w23mdPNHV58 zg>WYeWKXPww7oKk>IEFCHYeu{;qrLnggIRg2Puv)&3OP5->0T+9HjgyXtkV!s#yQa z@Tc?M%UO!CH<=Ao?LosGi}MOa->BpSB+5KbmAeLnG+k7MG`uXkZdVCH&&vv(PzgVt zcp$p9?p`Y0J;5peg>Q$8179ublg_`u)~-=S+Xv6@ z?}EsHu?@-bNgkmxTid_8MVKFOQ#627aO1PspFEg+uVLUT;F6>A)5X}_c&9?~!* zliu^vjH;5yFeqjUaA064U!YdOonhkMjkufJkvsj{Z+k?YB}{$q0z7h@Q2zG3d;G~0 zZ%^(-->CXo-@x6omRVK_s0qWcj~et!1pjgh(9G@%t+b!KiOOVWR&Oszj>khom4 zhZ`;-4$qi3pCpgU1egeOcgK;$7OD-{HaYGJepBgMTW`s#E;^&-y%PDcjRJ*tsW44M z_}|i^5JUOy!qN{dM4mHef7QJ~bKZTZOO@ zyfJt$?RQ^eZf+_pi8E~C1W;ljf2%0)b-Cox#4gtC5(KRVHqaU9Ky2~A(tW&>6YeYt z4yPAeJ12y~K|n2?+C?+?=IvWKhqtEM?TyeW8&CE_(#)k$FaxpAwjdTM^pxN{TNm1h zG(vCReTpH7Ycqt3KAIiWSo9Na(M;&0I-B)ECN3G1)z^{xlc>yt?IGd3-AkMZ+44K= z)?7#T((-aMVcfZ`arHG%B&TY!2#z$8=Y?axqa+|0a?xNf(I&oAK)Rht=bd7ZXP&8U z-vkXfS+Iv?(R&+t$ySxcCHp^*M)qvUZexI0G%U5ls-C_@QMNt~lp5saXYxv`WP)kE zBY6FCukm*!4XK$)Hj{(W6qmOfWYF4C8H-nVyHoRSw8$6hdMwtQU?}H|w zytk!l@xj||^Vc+M-2W6jhiCD-{Hx;N3-GJWb-z#JuG_ai#j`*J841|<2||JUq*UGkFG z0}I^1pNdqy7Aw9Jd|OJWgTmg}1~R+Z?I_^i)i;Dy_1gyn($|SJUGuO*CPV&4f_~BL z8S3lVA(qKTs2n!jg-_dqOxz{*yZi|E9%9MLCyhwmMMMP`KH1Z6HkPcEeBADK1rTXk zX@GY?Jh@F`w>qtG|1%>*Y%&Er*vLx*$@5XZmV>KGo>jE2U&D2~J+~peX<`V2pLjF$ zMjG2&Wbnd$hxYN*2{L(?HN^XxX8k1QU5$b=UUm`hcZaxg7E}mzMI$TDU;WACb7^Hc z)*tB!K#L2eMUJ$dx>cl*$@*0B&u@C7164&HEfrwxEN+*;k*LKnN_Z7Rr(Rf4_lLa* z5z9@I6XQsX1~?(K{Vrp#g)vG6ctT*Ymf(SmJ+Ln^YSUMOoPSFOV)1g>^f% z3Hx}h3%yY5?ezKX+O+*+BI%ISf1BQkDzf zlG?)CMlNCvHA>W~z0%Am6>4Fw*TBHwx^zEylC|-O&kc01Q9mxXZI9_*SZ%{c*l@I~ zQ7#L{BM;n9Ga+UYswx%*{tp6S1j4(`+hgvTiH~A)mPQ)Ir$6>3_NO+D$#>@*f13{6 z^kBRt-%j#`KkU47fKJQ#b8SCva$8hmNUi9j2RlM4L9b;7a1Dk@P_Pn9qeVz(-V(ND zatzS($**W)a;- zsSK4kxo+E|`ErcePoF#*yTU+A^mUpM5z%it$H)EtVYYa2I{I{{o0p(q`OUpI9TbCB z##KYU$HUGb!Q|WDNj!-tR}#FB zB0TSqRC054b|va{gMv|JbG{C~k8U$YT9Dx&1j6#xHyJ=&rTqz5QRDEMEiSm~N@+#dVO)R#!>v2ADf|5tw>BkAHm@u(yaxkv(F# zhf^>n5JqFRWLkR5Dm!$DG48MSUDUM5{wc4&ukZu{i?W@d>cD_F5IIC!bBUoC+0^RxymS_ew!HqX^T~Pbi#N_;mF>esefK{>>!Dd{ z-|hCAr>hEs|L^>7gLzpP;$Eu(RE6!pp`#1M(J*%KJMs`_%ewldxj3ReZBt5OKYZ0V z>O;w4nSmHj92Eh4j(lCJ)TS6{p&q(erDaSZ-nC(Px4RehO$qBb2V#49;B(8eFM&Jd ztcAu%_jdj=Bd&6yO?dCFx~!Hw7~a#e_x=(jjWjIW83$AU+GjTF$XGU4F?vt zWY^d_M6dIXUQ+!HCOKm){rEG!BG9@C?{j9Dg1GAHJq0I-BjX~`GY>3b9(G4ohc2T> zT}O|W4AxyaJZ-c4|b0OPsEdp|P$Ho!K7cFfZ^e}};H@;$2A6`+QQrwMVn1&}a_ z8%%y6TezXHOb;uHI0Q02(#VgS&otTUgaDVEgJFM^gUWBrT&qU;YjPP%$3{XjrE>2S zBGbDcbQaa{)%-YtcsjqNxAL22deu1Bt$>xo6ZqhbnoZN09Vju7&9-fGc_siSmVPe2 zfk3WJxm!HO4lzTL8P32gVCH)jf!J=LFD7L&>d;k^c1g43ZDhrVcySnZer(dOPS%ZUtToGK*5Dd} zzoX=#-`uq@V&cw=>o&(KhKn6KyKc+VAa5i~d@TdH`DZ#Jm+*aQL8K7Gl7{Z+SVdpQ zhS(5)8i$&B4Af-0$C*1f@`<2IdF&{;lFIxK$ybu%AD(`FdVR(!d3wlut15`DN?INR zzsAQW&mDOK_A-v&t!>OY9<3l`LA*$Vd5Pc+zwrC41_?;psU>Bs&iYn7yn*)9WA&b?(6GUk*KhVlFwKd@()n$#1FH7$#$Rb zIaF8qJGp{%wSegSH-;r7Dc81@dm()-$iKg12Q2w;R{$Fj5EpUa{(tQLdWNg%v?JOXH6ZmC1r|${} zd*OuZNcqpNw#Uxe!j;zN+@1@+ks@IP!vN58gMXB2;sq{Q7fp%$SSnpn1<;zU$<|{W z;0#0+5V@ub15s0kXAfByzrH;~bfvkunArC;>o`En%ck$l??Pv}+15@`Kwh51*Q?9+ z{EGr9PwPZ3Fs9>HfdkSO_7eB7OGGC`iDVr-(gAYgWZ4ZJulR)#1@CTa#YsE z0D>lqMIOg6*Vq4OwT110YQIlzi~)6YBQhZ&Q^r%Ct;-{%$HAPZ1MnB}V0zQZf}IVw z&mLJZ*28W_4&*3gSUm-QW?37crs>;+b_H>8OSm<_2=2ZkR*j1tmg2Cztz#a8x#%X z;po71L7brPhho;PcH#esURgBfHuK;WF8l!ZSG~jevESp4!?LY@BAlw34DqWwY z-KHOr<=u0nDQHA4HZ#*{0BcRm63zpkSyYx@k1)goZWvt$iUQ1_n&`R?@ZG0#|HTQT zZi2(jIj3isMcwUiHs<;ytD6>Kctru$niSRU6~{{cn0v zXgGBwr}THiKo;e3%zu*#oK)(P`f!o|ii6#y4kK%N(#-ST%8bAyUpNWqDrjUZ_;?oc z!Vulnn7OJGs3%xdN{jk68(RfpKmD`@I|}$al_=*SR1yseO^Q!eX8>JEHWHfU<&h?a zx+BB`ZKj)uOmun1E*p?j7Le@MZHClkxlQKRmt?j$=(%)v8Vh61Bl3U%jkB^g(H^z1 zA2IlEMu~DKjotO+B}bFO1r}u?mrCx;>>Fln2^GH5V~X@&!S#ATdkk`luHlj)`mDN1 zuD^W&L8{ct(0pG4UYl9EBWT&6tl4;?3%veGx3J0BqLljXD4#PT=pVqpynlLLwG^x- zkDMz!+l>e0qq)n%dB>9@!r0vq`tv2kr@3y(;p4n%Jtqbsz;BXxa6G!B@Gk8jOiB(- zdj~9n=AI;jSTpCQVSY!ji|xq@vMOl0ovH{j^etm9fl*i?ef+hC?63M-zzvP5dQ{r& zkT4gA&$0>V_%iNG$czUAgdKi=X_cm9?Ik!|baAf-xylyu;$n)xzmZy0=k#$%O0uy7Ez&>QlkSQ}MuX3Ew2gBqWNY@0dvX}q1%&){}Et>eYA_K=*I zdLB42^bFvFS(7&Yo0vctKZ_jUyGSCecBd34v#%%oE0uQSp29@TKFCDHFJoNW}IhLVesWgmm~=2tPizvdFh zT6%rcD$s0$S2J##E2_psFBFzb_o)t~)MN=WlIbWqSWMJ)%}<1oNAI!bSF*r3mOo)^a`}!u7R5#5~IHLE4i$^$_JL3+*ReaC*Yv177*s3#h}6YGD~Jj z+~lM{D%iS>^fx6zz_H!$TyA}`@$Nomb929iwJTJthsw;I%4)D!lP47$<(c0U{(08x zH7kP{Y%?YqUY}le{UCe^S-GX)hHiD?YTkhEJ|22jO0dz`u-a-q{Sq5l{OsI*r~?#= zs5ET11}0CD{x5MsCF;ZK!!})8r_&ZcN+5T-d`X5S0zl-7kGzpK<}byIIA9nHR0LFp z!z7VgBT_1=pHRo+)S>UwEaL7XQI6;(-w2Q5i(47%_+n6o?q<(PKx){TvL2dVMeClI zEn7OWTuhoN6twpue`<;%Nc`+Sm)7T<)H+Tu`}u3c=XX8e;!<;v_1QP_i3=ZTA$Jmw z!gy6Y1rBdnBWcLtGm1)~;*N#*FJ6CGUpnJe%zumJS@2c#D#HbWQm8-OA%W;M25Q~& zuDVLJ=+@q|kNJb!lvh@Ju@pR1sdZK`6n4E#R6z~c-7U1CllFj40tjCfJZ~s%3nCP~ z?LkR4lR1;P&_2)!H}LQ_we{|8{kf3ZlM6C+K8Zz%s zitc4}e$Vh}67bR(_PFxVDxdY925AT5@#|o4-pEae_yQ4_K*Th5fHc^BnOOT%1ns`l zQ^oQ?yXd361G+N@{Gvs*G5poK5cZKBRcD+9(uV7RYvr(V9p8BTD9>xX8KQYOSQv?h z+Uoh-H1$Z>@NbZ=g3>AVg35p;|BwnAlOhuM?T>B5l86U=gq*3(6>LGSd}HENMWL6c zo#M~}OSD{eB(DS8^yw!*qB-(%{#)K;-*<8QcMu z3HX}%R8J^bj@(rfKMF+-HtJ^LZ?4#YLO99refA=b=^e@QDT75Q9m^dJikjj2lvWUZH!WPGnw#hHT6hRQ z<^}K=-i04{$#PUtQ+LP`)b|_8@L>LU6$L?*y*dZrVi3i+;9mugUHJK{Sbt}NoZt4d zXei=kKY;#&0@&sRy!a^QTZ{!pm2=jtI{oFvhY!TX+(U^8jIdEKAp~zxo6aHm`Bb?J zuRQqjGRakBr^!PttecI_F(X_tpJEWi4(^w5?;zniXQbs7Rtis~3fql@GNcnYPnBCaP)HQmmQfORM znxna;%wNe&VSNn{5X@WaLt}M#Yy4SjYNDo!}<1x+*+6pXAOAh}j$Ql~t2G zQ%=sSUxtkUU-T1;+#Gi$gVw8z=P*K>a@MvG&xEQ>EX-h@9BEr8pO~)xrfF*=kMs36o(-oDpikR^H4CT8+Kw4x#`p?TDuoU-?ZnzM`-{xz#t%A zpBY7tE!QKL#t@-in2`>iO$IEaeLZTBoMd&=TA|YsTBXZ*nn;dx$;_B*py#Vu6B(38#Q72HXp(R_>B=lrty-!vz-CiS?2$cAY~uCpM zRez>)lOoE&*T|Z(23sLm1l4>tsnVUK{TO7uc#o2rB)X)8Nv)3U zN^ROiJ@YP-S0ddz#DH3#MiG2apvlsE*=6KkBUV{yZ@tOX&Ms$9e|Obop0wroR;xR- zz!_4iahje+KD>cXyT~SkmT};jEW~&b9qh0dEs={7G2xPOI-XDUJJ-Ka~0> z;#20PoUBAOK&xJoIoeb~5Z5j&W*dPWgq7^Xt>L2FE$n_e;iM1U5E7B-0GsSjZFHAv zqx7=-1HMz}NkTYAe(!4PdXuk!8=mJnomx($ezA7yf*WUYyT$w5qNaA0R`Y{M|BfZq zraqVZmNm;RE+wor`GR=@WMC@IdqYk69n*U;_(V&C&1Fj*yON5TXIcg{V8*A&3r$;o%8?@Bn&q`qOT+Vw?AYc|{)YU%Cr+`XF)eum7Fv6DIu^!EDQz8nV2_|Mv- z4{`p=gC}cb&@(PUKG2cIL(&yaz?nh?7?PK-_MMzj*cxrkgsN=hn$V3;j4RmAm@(EM z=%JJhfClCK1d+G>o6Sw2Uvd-+bs8HL`032q%a-c$TL7U;!3cBS0Y>Z04II<(JSDL5 z6D&Vhg$N;IYHj962k2$gBi&ui2!Xj|IZh`l8Ihk^&FjjQgLW{}$-mFkqLK#3OCo^^ z-J{sJKgR@QQ@z3i&!9(6C0lHSV5biAHv zWv^X?H2i%3xEFG>JvSw@+_CUQ3}aqLA(LJ%Z0sTtM~UN&dkz5eRhoKC)z5;`V}+iQ zp1P;Fs6{EB{?a{=t^CGJH!~(#q1AVIrKuQ{P4q~cOH!D488$o0D zf<$gztb^VEN-0-npdbBMpDiXA4K6!-{Gyf(2Td@IK5rkHzk}~U+FOW>9bZ_vh?bM& z|6)BF(Bo@zZ6ch0ypxzi-pw>;2!ld#o29gjtYO4BDckL9*CghB?=qCc&G!-PDxIc` z!HnBby|p9RLl?&Np)+Yj3P_U!zhe3=6Bm14k{>BDrYNTUY4PjcdLfMYl!&kkZzfTjNmwL#4I%l{sQyg?R;-{!%REH<2 za4nsX^D)zPV!{gve-WZA)d!LOCW9Rsxd9jTbs&jNo5IPxCXSVylP2X#U%+XfAg1o% zGK{g9F!f!n3Il8v)r9K(eI$7acO=qK2-9Aa%~_5556n=6g4Auv?Xq=})%Lw%DM+Xv z>1Q-nC5Dm2Lfk@~BE1b^@M)y{1viF;0L2~Yiv5%E;BFpA=xnRdxhDWYF`pLR%HxJr zX1E}-?tLVP>L=?JV99gROM*Y>YcMm>Qpf6OOcbN=#le}-+`EsX5I53B{xzowWP*=z z0ctb+Zm1&dLxxP@+;Tm{o|Tdp(d1K}UbZV#dh{AI*LV&qRH?!UDFJHrft{L=V-nMdmc{VW5wI;iBW`vkHEG{Fk<$F;r~-RJ=fC82QDX_z zlhxf7X34MLZ6;d*`(PZG-R7-`mfYESL` z_EsI9iTiu@T=7wJwzBt9r;%8d`7o$Ks6$hZor3+JjbtR4H6810(9($@Q$^z-geJ8) zRGA1V?6m?Yg`CRJd3K*=+#S5X9oTc{<`OUW@q5WVyV#k&{Ipe_&(r@q|DV}JxP-R! zGXI+=_>ZBo9LkS4=17PDuwre!&_B?p;AJ*U;~_R{f7ca?5cYbglF0@=7<&E2Lqsyt4`!D6rdq#Q|MoqCY^IN!M6I(c+& zr9U2dZ>8t+B|(f%?n+LI>3L9g&(e>ySo`JheF-1n2aj(;Fr5a8ENP|jVZtUJ5i z-@sArTFwZCIT`3QWlTQO>Vyo?d)kavpg{^i$A~o|FYURTQ=SFkz4JL(F` zTRbE;#asaPX2E=qPHabDNESa7P?<=)5 zZ#(6?c1ob3rXNk8NW3!GO)i&dfvpuVsUGd@pZsLL zCR+%OIEshnD9?B(z2h5+2&V{>gcM>^>5hw&-G-mV8u|R(5(?kIlRTk|gsNGE?>gmJ zw0mAS))F#@t_Kd?m+9H=R42d&UqzN#-dy z6_=mC+ZdVS{ckRtuz5n|5~?o1c+r-VVFhq+G24enEvs4%x#`=;W#TIfYc4MQWJI8t zK7>mdBjZj)Xk45-R_V2UORURDgB!#z?Qy<+$~Zmm=+x5X<8_wWj>n?QfWK-56U23J z4F2tk%DV$fyMmXG+C*Vcu~1O>XU}1_dUXs?4Cn)&!qvSM*wZ8y@0+#?i4; zdx7NgO;`~@-p^%4=&Hxa|8DLB{d$&a-nn>&(ct5}0!6;|2m36JGnWF6X7W_n-=uLS zV+7b;j1X4cXp|XhD6S!+CeTT$*2$@#*oom`REJqRFZqb+wcHe6{@>ieq};NFD>pFP zAVYZN+&uJT_&nh%rT-}E0Ix#eq~&aryU}HR ztne!Y&-zKy3ZN5sE{{CTroR)@5rm0Fs}MF(+^aztNv@8jSTYIHjp|$rlWye}DlVI3 z@C5QCeZGRrPF+n6q=}nc;r{lJ76@hGXj0(o$tVaAz zi2GZ0X#~5iM#0FqmNruki~+>1F-R&bIl}C+642bYbeGm2+F#!iwt699XK>O`MLO}r zR0+2K%Jhk(w;bb(W3!HT)>Gf4<0~Blf+AaDxX`CvZfa|=^3Ip~`MZ4|>1a6Z+yMfA z$tArWt-kd`fcA5f&H`7TcOwcz8ZZF5p;~9h2zlZ^P z%(ZPOyH_>;vw3;pwxI_CyGxo}Hug%{wj8$g8bIdZkG;=R1d^lU#kil7kBr^40>r4< z4q&R6v!_rd$eD-z1B0n-QzSZ@i;(2IubWFZYrTEFsT=k1Bscr{S#Yk$ZWl^izW=lb z7pJS5g1PbPTHUDDaDFi2d(+QDvzGrT*&(qO-Gl-_`YJZe5Q;yk&o<6Hk7Emq_(Lux zSg~5j;WLTF+&3Orl8|3&%Gt?6$Pse;_BeEj6PxjFaUlni7iR`<6gm6VRI#3A46fjX zKht*lpnS)e&nlTaS5!I2(UB;1GS(L`2bje7Vw6jn<*6Nb)T^TiC`xS9KSot2OClr~ z8Ap%JGM+tI4L#(-KMLb(mghhE8Nh#EcB2^U<`CcP4j^ZRL7xzLLpJ2n!5T#0?`pu+ z;N&oS-##)#E?tMaxJoTg(Xt@&r2dQ*+6}S_2(|jG*h7Bq_(5zlDcrTjT5vJ2nUFwi zJBXZTmd3j3HuGY=JwzOb{^WG^nEtq(w9L~npFAH$I3Qk<)Jwkfl>TtZs7zLW`XG#KNX5TbVEM3uYQ9QvdKWKIM-FQ03`?xkRv4iC2hT*Tm5K<+vuK-w;M(_p^y(4@aX%9QD{tRQiM{w6nCZoKICRw!+i4L@HsD-I)|5JmzoYi-q!#*4ct|!Sxmrix)Os$C z+1VR<=ehmmcQ=A!xkO#p6BW}9jaWSxMMNc(ZA+uGBd}w^1pI?_Ls_!@h0)U8H()g# z!BbDcZOhzI@&1m$_}m2iMF?ZTErV@Yl@#Zka2^-GgnUl{5!1}SPOHmcBSR$i=rZ>+ zX}7@5R3>i#mL|sSt)qr)$D7sOwkGCkLoz93of0m{8^Vu9AZOr>?Bl>J2k0xwlf%Fxw2ZB7b)$%-5rn6t$6*RAwmLH7-F7j=$NVe z{tpon<_qP8EGJJ2(d5AnK+^4x=%q3jz?bv0!G={Gv02n3n=8Z%AQC1m)xL{$*5?vG zvxdMl&D=NhU!cX-RJWiE*-*J^fpWbBTc^aKFNw?G8=702aC{bl$}Kv*k3EQH0a`0} zq_J-}EV49AEVFr!tE!EO70>BdY5)-F-n`-H1rP1fusiXt+6VaCs8O@H82yAe=9gOG$aUF=K}00|elttn#wsnBKc>n|BZT#g}*=t_c z(^@L3Y5l+RKf=gqwISG*a2pO9@Sgpe{hM<&CP`-1qp!$U*oSCxwKKuEiRs6r`tkso&QI9G-rm2Lg2AZ|q`=esyu zu$g+pk!z?(=|S5dg{cY;4ztb?k9%?lZ(|Y38rhW@oPMKH>MtjE&K$GG^$qph}sH z;IRGaGZj5jsv8vJ3DtB$rVfPYg|}(KeF}m`l1KK{BmJ+FjE$G!E7}r^T)Hl7Oi`iV zqg#wq;`R@_*b}$w^vSO=mQXlk*zH6u$)B4RJ;Tb{-F-x|l=3LwJV#Gh;<=`88x?<_ z3EkGa!7%YTbqJf|^5Ggy5j>Lk;QYmoFH>J4@C>G`njg9^pjf3Ak46@D`R#1EV$8`1 zsBM=loV}7v3JFqniWu^{ayoAo<}7sYm$cN!tRf@~2%$4i=Rkb=k#!E|(O1Xf%yRRG zNlO&6aR-jIl{8+hn5j9v6RAWH+ysMhBd|%$L`r|0U_}eP{FmowRj4;m_s@JEUSWzH zhYC|vsb`wBLe{j_sYpihUe1kCvH>Ice68~W6WC9G0G_1yyzBld667U{-Fs954 zz$-gi&U^7Q-pss9+1`lvW&^bySId*QG;Z8Sm+?W;1Y51&%PX!;M`^o}&F{-V_e z0XoIFa2U0wD}b+=#i437IPj(3UD_m4VQ!psayQX^(4N#25I2Zm2$0|hZ)=peBT zx)Aku&a!8-+QTK!t45?i-3?rjRqgh^QWaH>3xwj zU%NwA&n~&H66%^#`^1)7+;ZThw0TDzy-~E)_k4s{3%X0!dsN~?dNg5cLUbDsf6*$m zs$qKN5dVenvd;vx9@13!yr$XiTTTVjAa4lh`#Pn%#kseqS#tVA|9rK`vJPJEn9Ts9 zz`|*u!>HpQ;L@4?P{Xc`!TH73+pIz=69Rjf&Eq&UIRgIDF)TxESkRoWvvoVd)R=M- zsl<%hX*-uj##a-oqrIH(r?@JD3a--qK5yeP(k-&ihH@-&soF zr#VUt_k4;AQ5PAff_f3@q0PXU(2OOH5o>|jM^R}{5lDQ#FXqCcqHRH-OAIsv-wm`D zWgMiztoKEj?Z&5a=BMdk@cMbp0L4k<9+f{VA+f41)O0vJ+`(!-nl_-elKUnN=LAR1 zW$M=iV(CXF7Hpz^`B7ovnroG-hs!w_(L3Xgn+`9UbtNj%bgZq8a%3(yPJsO4)5E`e z^{qx~Rs#PNA=usw;P6rQFT=F~^`C;CT*yJCi+z3%cpBDE1a`IcvNzSQOsghJi^Xy; z%k)(AI#x8yZ!NqjU#Tjc#1**oR`qD~&Ua%b{vpNY!YKRPnu@Y-%${n%L zwxmu>9EjoeJTJS6J}tHp%rUo?!=|zjNJ;awpBRvWvncZfbid z?O#s(sT((cK~J~6O9p9w!Pc082Z!p8sr-Qv#%*xbceCGEf_{c6JV+(GxA4>XJ5@S{ z%0ws=Yu)d=dVwJ9^EIXzszth;(I6va|#qQ64Kw8WK?rdL8Q5-6$65SzbJZqe3VoL`GlP zlT;uv&Fd+2P&ysXXWw|-#$(OF-fD(*86GN;XvdtC@1uxgbjJIRH{b>>H~^!^G&upI zAODcDRvevMLTj>U^GUrD?o$M8NI>&;B`}_#j0`=L zZ&?XhG%={sWp$AzCg5pEM#$U)GbW=AdYI6dCASIm?1JhN&c%^Ekvc4jiUvgl5*_P~ zTs&z2S1!Xh^h^5uw+j7i>zdN*Q<$NS^FEiKaf#DrV^In8l*DFfa}Ln>LgE`E%Bgyr zkIYB)QyMTaPykIAbQkvox5gr8R%X#<&i@mHFb=D26hEHqC@q%oM8uAzu4w*_L-NN{ z#JXz!pp^`+)MawgvNvW8m68-}| z#u5f{Ut!1vMDT!%R)|`$Az%8)c!sQO+=)D!O!7_YY;r95l`TdZaQA4 zj2nuoVsW-%KC1DZ@V{$x$1>FP*~+6@ivrnI$eSHVYAN z7ZWa(6SUOY?KV*`a~p5e%qir0pz4tAas96E0os5I@le{JDhHI!Pv)~gDT<85-!=>k z1A?#`IjM@-K=~Um4i7N|+xn9O#{cjAUy-$58y%9vGZ0@6-h}(G*v@N_0^(B1dwG0e zczA%UeTyux6D3bkks}7uhwplNhdmt=T*YotQ>kfmkbW;2jJ9j+TX6~WpF#>xHZCqg z$WC7+N<8)GdCNxaX11xJy~p+Qpf?A_Qmhp;{BvTDDrbv&gf#_($zn8gthcZy+6zi* z7%T)!H*tx66by;hedB9O8aIHI(1dBCtELa!JO)*aHNH76_g}>#d+T|m$40NRHQT+U$D9B&0sAboRjBhyiuorg0FQ#_epmD3U9}+etu)59P z@V#loE#4<1to;m;gy|KYI1vBOA0>9`3~gF-y{=EaYZ2;w783ZN%z7GZh>3=LK9SG6 zDIwlM8S4?y2*`S1p$hg_Y;`haA;WD3d9r-nl#Ym|+g8Vx zpX3^aS`Bbh`N#>o2)J#{##dC?sI9sT@aycy)OC$ln??iFOP4j`q z^Uc@dTxzJf@jp3M5Etd7vk*@Rz8PGU)W&8$f^TG*q)t9mAKa&X(k^`q0RnUwGe!Ml zJ8D$`Uz2H8{^%M)DpCyv+oq?%TH5z71KTbg##wmc@&+MtvS+7)INH3Im}PXy&Kl@Z zY_toz8?UMM5*Rpd`Qec-k3g3`t1W~Z+_A&O$ZBJP!gXSLWn^n6tBLz6kDs!%^o>#X zB(!44`xUH3*lzt+!P&Zdr16^VgX=(LA3WCq36y$P5+@eA;Yd-mS-DVPr_`D!_uI3V z+lhZGd!5G_@yn=SWl$s0OL`OhFNPc=43t zzwmawh3^J|`onD=|FQmBB0qaxrV8kvHEbF+2pM4QNgi4gc*P`&&JNRj0OtOW?SOHaV1D9Jlki=;{(gDe`9Rf7+dr z;xc{OhN!A?V#+hV_8ka!iHNi$q^h_$5jhu1=r|JwO>AUFWLnyEo#72G z4V%c=!0K*2-fk#ICNG5Q)n6HQdDcbg8A@KR%SA%9q@=f>g! zG!+Mw2UelmtE)-05&rK1R6cmT%{qpWFQg^xcSacsSix0E|8|&dJ4Yz=n({%@3Kc&z zY1-vWId!wB><(qpneML~vRdl(;0B|9)~{KDHMf;kYDenCt7%0(0-B93XJFKKR45)y zd?@1h_lcCiP3Clf3GKmM{1i%rJ@?dSP1;s zC%oD_2GK_iufDD}YTK$(oS~1; z4^U9v0~5sIo=?;*_5!4Ek(}24mZ6vl*@`ALh77Ut;6 z{LAhTJC2M|fDhO`(UwJW%`S(pEC+9NbHMNPs|>=QCGHgrv`*ldYxH zsn#)M_)BqZ-&b4Dp)=2saDt&Nu7@oP$hi{+k3lN*R&8w;i?kNcj*Wwun+ArQpL+!AX_H#^g3W$ z%blCNYI4hb_X^b~O_}V9#8D1=?n@HrnI}-68m~TMlr=lHNmu2OUMt`V1Xv2_zV9~0 z0OR?u$uPAHv8Gd#$iT){9oo6guK#?QdeTQka-hQ5)7kJp+Rgz_f%Lpez%>T6Uz%Az;mlaR2xcn*;5H&qm- zhshyGc`?;g&Q=Sg>dN7J|JEDw2Q*U`mc0_}YmZs%`VU5IAuS0Ixo* zJ$QusK)Al$Ca; z-~F~2@m+*k%~U5h2CvX&S1ucZ?S7|Xl>EQ@|FX*B9${#nM6bMqpCjpoS)cajY=B-> zG#&&4Tv(qXn;YUutP%cniHTJR0ay`A+aNnyKg?KPHVPGRc>LC zG&-u@A$`{POZ)P1F&a#3YhHTmv}OO#r|hLD6cgFI-)Mp?QPg1N*@Mrt$Gn~IFluc) zVUL0HpS)&MBZn<^$|b6Kc46i0;Sc#`vehMGDxG%=5!baiN|`=$y;gHEWwAS(GczQK zH^$6XeylO{_xGB|scdo50GSvG#y!j867U$^R7zjVb1~fu>t7Ck)8eGSSH^ALm()mQ zL)Adu@|3!hGcK}LST2|oSM!x7LNKYDf|h!T+^9aEf^X9%U!+b-DLvJ!#eh{f-FMb3 zOeNTsXnBT8N)~(T(l3Ktv8odGc?9LEmQ>!u9pVKw@9aeLdA6FBdx$~~2=@R!q$(c8 zIDU4YQj+82rd^FoMcuNV@Fki&_BJR^1-nk@2$6y`2FN~BJFV3k10wWWS=6r_ZR{*p zaCwR?)8P*X8G$yxJ^kLQ!P4*(&D6!dt&hZ7hy`K#W>qB0ZsdoVylV$sxDZuwpn%4a zJHfb@L!cx+9oD;}U-|03F5%8^XM=~97JV~kSa954*)|ZrAtZPBt{lvJnfVQNE z<|ul~L3y``h;8;a?o4YYPgy(;-u?(XbhaQAdm=o!?rNUWR^#y@hvW*@U0 z)B};|eQY2M?SJi)CZ02h9LtlanC`eGpfFzviUq3n05(Xfq=iT|9!5HbPP?;u74sML zMO(|;DxUySGXtWWAh~mQ*i6Uo)aCG-bOy99pnA>ICG7b9kAyiBUhc-?U}g6oScF*v z9g{o{MG%_A8eXxq@3ksV8<5?2>&@lO%c-P?2*28X2;orbw>ecTtL$X!NnBM*fotli znM3axNXd-l3YBG|AugQC!L;|`GR4QZmiS2bHgl`nb@G^&HUKqFoB~+uEA%RD|&!QPQCgOBne#ez^|ud9=;Ih~0;ObWRgs0Ced!dOg}pNsInr zVyeS0Mq~%RfLfF0Rc*Q`CuPp<;3}>y!*VoKzx})_aZeMp_6zV7#wWuRH^( z7ugDl3+N0~z!2t8iQ#5ok-40`6|Uf6tr^*5S#T24C9As~|2x=Xfpk^c_XOf#8Z;40 z-$Y%yvJoLED2DPLR3&t6(EFrro02#$`f9+KxXI^}R`t+cKmch@wQ_bM6kBO~&!fUN z+aiu{fhXXd!Pd1gkX?$}WjqpjYL!8=Xx3>67@d+(J$)-e4pr@Sg4gc}uN2ONGb7<4 z9?bLC602^)jLSYlnUohw`al;k!Jh&A5l-igkxl6?*I=t6DWyP4!ztzSm6EckbP~)a zI;#p#bfZKVf`QXjQ-goJedG04^hpNPySm?qr=)^fT5Yk!2=`L|4W2?d-TAwv~ zXW`Y5mYqyhY*56oBvL_Ar}7=}^ZaH6CpGL4YR^N#%#Q`IuGh2yOTbk}%X~zkcQnje zf!*v9#QV{JJlch_H#3;{K2znjuDN!Ae37TMmxm8L9DktHl62|i7#+}0wK5476pr62 zN3l``h&U6&S4vKm3Wu}>^m7WRgs7f@R@DhfB&z&Q6doy;uliXC7so0=cM_^z8h5D8CgRZGRBdzcZbMM9R=T)Ggq}4^_U(=b2QsM`|5d zCHf_utbCPlM$u=j{cOJPWn@ce@+bM6`6HOS_^>X6cQGpE`y^rk(k5uJD*$=2G!oSl z?dFLIjPnG8D>urU($Dw;e@s5Irnm3XQ)HVCyI|3)JH(FSYc(ear$(!mh|+l=^L9|c zEk7^rLv6WngT%fZ%^*1je3i`;k=Y$VM9L?GdyH_q7*lFtr1Plxz0f$pqMcVVK2I)d z?8jY0-DpkwsSmDoqWhRsKr zOGIwtCiNtl_3?649ZroPu{DwX(J8(#rvDjcrEsA41*QL+@;V1~0OUgXsE`2GxsXBJ ztM+;-7Q)-Kkl$yyEA{TCpHCN*G?pY0Ml&_7QnMBa#vs4IQ-a0=!np-v?vE&{f3x2EjQV}X z%HKCxJn9~%W$DVoAXJ1bIlrq(Ts2*5RB|f|s+SJvKyhZ_MK=}RZN%xhqhvp8xc&?L z?;&tf#-0tDqo+#?W}uC|L!7~e@YO{N^fy5!7DMSPZF!;$mrn2ukY$eZSOc)Q_dEu@ zzgGH9Mjxo8GSlHl(|nk@hPda6wwJGeB7UJUk0ynu{3l`Ey?CQqlFJ6JMVg?>QhEkPVyeuooOY zH(>kAe`1RG*=YMczk5PlBl3qDV*8DXylE@MSR5UnIoL;-Y_j;fK@{HSu`>{_svy9G zj*-j35w}Y*5crAg&;fO9r+QtW53$A@7qMuDKGdrCDwa^ zd^0DW|0PE2T?l`|9S;sR;m+){7|f{JS*v_#&5qYf<@>+6Tj=NuKk9rkR9Z~ID6PBF zgJ@C!N8^AIQW1~&XVs><0k~4}Cy#TUS2B_3lPV6FK5ZC!Fc%fhn$x-JHNF%ET40Cx zj#GG+#RsLIFrx(0{=2ad?BEgPUcH>5A)xy%Or&!;@1oacXM%V%tE!(66#aeI+x=P2 zM=bgZZc#e_CeBbc-|%8*-`~Obb@$Ne1Gh(}^aTiUa++l$-I`;WZ%3N{JO5uuf?rv7 z2GwiHL;Ky1e24;X1%45FvZkzgu;y4EcoFrJr1@Igz>mRSUB0W0na?y5KF0CxZBuS> zU;=loO1??a>BCRNKW_|EIZA|6jG(s?WkK9!?3Tx8WgRb%U=Nyf4JuH&sM&2wg-KE1 zz)0Fyces-W7hr<2XwW6TJD09NI$90i^lJX<&4~f(EC|YSR@~#59xom zrrhq`J^T;}kZY5hUbXJPLk`xu_e=ErdB|@r-oKIAfwC%#zl2+U@Uaa(k?AF{eLCm{ z+ve;1p_qADUpc7-6^zBE*8gq&vLo43N3wOs#lmwng(E%SY;OKImQK8D8Ax#*Tf5Zq zP$8XPU$5Q*#*;RHInOSYT-|@~7_P7Yk1pR)txF-_b>4kyWoXiaX&_Vot*oZIBk4&cGwI2g`uYa<=I0(h@4Y{ zPr_$GMQ52Dq23oy#|Z_};?-!1u$KkiHO7H~$-^5O?H~4kkv;~0w*Ql}{%LfuUqm$E zu$kE$BD%-nVV8caI+{EpNd#yI={l^H|83_t-G^^q6!(HEq8YGKzo&Mx7(=Gb(wuAkaDdv^Td6|WJ zm_N8t=Rs1pFM;*|)v^c6Dym<)-1P9wr$dDfDcgmTFCQXK!=B2Mk5P|qjqOmOV@7in(fB!vY2vXvsMXLsdrPY*HWu__Mm|Bp>xhi+29DF; zVi!@$V`+NBP2Mox23$W$!&ZG4)29!O*g_&7ydne$Cl>@cI%%rl>YK4(h1(W!K(G)HL%nie$(kJx^qtJB^GVX#8KPecv9*{COG!LfKxHLo~qI z8rh(|?icu1atMHjG*MAm#~6v=xh)83`fxmxrBrt!R$b)TZr&3@7l5r;+T_o%z1+Z~ zjWjEtZs&)?)ym0GA36n+qu#I`ST8;;-u#yGlCSrHj-0_a$&>3SyGSKH2kxlyC8XMIKASNRv=wdI!Hr+oW3`bpC9fBSuS9no6z_|H5V~T*Q1c>FP1LR;MeO%~ zUf1jJDS``~I1j}&A3iqF{cJF+xU%RmT8?)M1tu#}Uce@82Sh7?w?tgy;hbMM(B&G* zc9UQYvcW$qR9d2hjR!FUj)p6o!pjb8k4(4GS4wI zYNV;)`7J3L3DgXxs2kK(;Ieq6T?H|scMNis+#LGR4Hyh^S$558_+aB80YpY2XfOEN zJR1U^-SpJo6cn)9g0-#|8=+LrG-451RhAkNc)*k6=(-VN7oTchbUD+o*0HbV9%UHl-6&*Nm2eDZO|X0H~4eCuJi3U-l` zugC7CW>6ACAKBSP$HK$~kBMKX%Badf^nG58ixs(li#s53(OYN8ORMx3+%Au|>Q*@H zf+{>Q@SxxnIX{Yqiw}25*P+knWGdF{ZqCFm?Dh#I{?Ic^a2!Nl8vZ$S#nONu&8gc) zdI~|CUM<~^v84h~JLW-MXWJOh|<;*8z~4tNi)2Q7^GXEb0XC<|u~ z&{{77qk!A*i{it7T%Z?u<}npVc4OmhCI)3h5&!4MD*5+KK+08^0;xr0G674GDAaUP zMhNiGIN=nc`76uUITOZd{?V`@m|2OmaEjQf@Gm~HyIxQ3lG*`^4+QBg_wr|aToz5S zY_lzvb*<5T6@qKcF7H^f2}gw&HePU&c@nO`FV1Kk%)yv)FgNr^WIsniq1gUizKNO0 zWOqg#XL~(8XLl~wq+)i)l6rB}PT(dWNvlJ!T||LIfY|A&QEun64_&>7-)$gNKO@@Y z=f<@r8 z+TG(4!Cl8~y721)Rm;Wu#2{;Dk>-s}i4REos6486=<|z~`C&=3tq$BE@|w>jh~T3G zVU|t{T}RWPo8734WShL}S{spZWcuW6Xn51^|#KNfj~1GT*)v$ksGir}_7 z49bKksn2R&z`jnEeHNvyf=HyI{omEQ7yy0Pr@#V)Q#KzV=;Q@2dW@DV3to9ejg{AiiuvT}N!nXx(6B$;=Xd*g%iprJh=8* zwGD|`YQ*xF3U=fGV^1YvZ|dj4PKGZYn@9!&dIUbI&-8^(*+{_H6Gt0GkY;xFVrD~o z!2eM&dylBWV#s(xDj~n?{-8;cIj>1eoQdSnzEWUsIz!>gHS!uEk-B`S>OSA^ApXRf zPyV=EVfLNitiz8(OrZR^s)Me<2R^Eql*EzERBpi23g8q?M~XA$CSBH4a42Fo3~ka* zwYsmH!|7tK$8x$s=Bn6&#@fr8KIYCt<0mLKL@SSM#5-Fl-Zs+kTqM-U3lNFKv-Z&w z0|OtsxUJ^KUaV_!^H^08%*g!Wt#kufn6p1(DFhbV^dCEt{h)|<2DntUqzVCOhLH4d zc7bXWO%LI@4gmHh?{fNLSVAZuR@-#RTO)R`v>P?E{|m`4)l)^@t-w4MY`0N8r>O?Y zQP^>0%|lal+eX7JqJT&Vu{{ka8PKBk0|b`KP7xY5$dY9iqfx^yz2ye9Y!hLFT`y^H=R%uJlj^7z;g1!gZF6)QYB>u-;!bN z9_xowvii#nl~0%ZH%zc6dBs@OI7UU^-h>aI{6Vm8zFdOAjBGDmhej!yTqW+>EOnuc zx?`BJcA0?W|9-3MoafwHnY7#S)^hfcvz_?adSGhku~OAN#a1PwiiD6K+x;Gq)*?sC zk*X#BWkh~JG9wAeHR+DlRe*ku$^Ia38<&(2dA%ZC;KDN|ar6?>4=(Zd$U6)sPH}-G zr#v0hW=FZq)K2y205H>l3W#Lrjm8{QyDBYTga_`c%(P!6 zTY7e624ZZMpXI7YQB0nI?7UjevgwEZH?vD6q)sc~GFr``hXm=hT_N`6|3z!K$IMLb zYDAh!$UXA7LD+S9>W&w8jqfW`1S*0ob*-F=bv%ZvAQ?oTdU|})wjG%&F+Y=oM+A>7 zMA#=K%%dO{;kH<%8i)YGp9sEhu;~N~1pXF@WQO7IT`cQ7n_TK$SQCsil=wx3?cQ7eb&)nsn#DghqT%Q;ASL1t6XOC3U2KSi z#cV1d7!3K)Qi7pU5(}kFHI%nkve=HV?yw|mUWo>_Tt(__YkTL}^kUX?Uw`_Cq&r8> z)}|&h-ts52^N7_$Ct*=<^M*re=LcVi)R7`R*vO%(IVt55Ak4}1VSr8CA&i)ut->GeHiAc zbVizA0D0qEo&Mo3hDRv$E`&IC9fMi-b(v-xCKqp$3LmZ|mmHD|6p4qa>@;{?dS9{( z*RL&EpJ(L|*v=2S>-($xQ#NN$AQdg}g-#qHC?>O2prJ(8e;|$&SVKU_eI{E#216D> z?iB^%LjKe8*TL7wFmCVhFLGjVLv5aBIZb3Fok8lYfg6ZgF_$M$*)4=C5s1iKjoB26 z{Vlyxt*7RQI&!>mOc@5>u6gT^Hm3|3*Whb+zXj+Pc*MSCR%Bh0!Ff9gC-1gOA{sg? z^9&zOi8nqSOgq$9nY*~E=|BzaV*o6G)DnSDGecs-8*mI9RmjFxPmILT=5{$lSE*6P z+^EDlQ)i!l8SLe}hD_EgpkT#KZ~XsvUo?;qGL-A4 zATQQzk3h-leG+1OTM3z%$STO4O+u>n5FT(%zk$WLT;yWAI(J!%k->s5+>}@5Ge!^H zqk}nLur8_~6wvKuw9d9POn~+5Kvpz(ar|JOyYF)9ci&F%Pdp0>;8MZ^CqANu&CrOq zJu-u+;1)G!8H!zQde_?0CJ~;vfKSKIk2tyLC(ObA*yBavyiVNQvf>~5@A*#e{>%`Yti0LxKH{CMIS)zj{FV6EbGh*mx{oFI*v8D{*@(>f6i(isD0^N zG{0m{{%6bhe?;7(w*9yKfmNZyY5gEFtjo=ww%i}nub<8#bKvo3*?$pcj~4?DPO<#E zcXNU@{o>Ga7Mw-x{=Gc4HKqxx`vlO6WszZTL-H%~#8MRDNI3giV3&pKETd-(iJIKN z<6G0e`Bc*KQeqk|!E^XYTjp%)YB%L_HSH)Cr|nVb=*mu7u}+NCZudn4eBxHgp%s4A zgQdvBF~gdiQWmi8I2CjzAn_NwdoSI3HV5VWy$)KB;T74EUBwOrG@2qH4w>Bx7i_Ud zzp%0xfhT;wTh1gVZu{P%f+Ua26(N=b24yf#J>T=rT z1B!u66?>Ji)eg!!)(ZkqC=DK$%GC#5IJlIoh#k`;dZyn`IJFykQJ*En3C}^T2&I1~ zV&_{M+|KGA$Cy38Af&(>3c6hD`v(JY=qKT@S-)zC!HB^t(K+kkV7aPMWc}!0x33Y; z&XLCjv$7zgJe^ahrQW-qjZ_xc=;ug{d=@RAO5|-oYsZAG*$Ug z`^c%0uhgXS@&Gw`F|Oy5J%l7AECp#pTf7gPYEQj|HssMpitN9xCAy6Y_!kS2gV9Z4mHs0O1 zPwb}6Lhr|$Pw@`G?gv%>a2fKTR1z2q?Hkl$_^ z%6o?S+rDHO(O%xtOTXm&rC|Tg$P0Z*X2c#DUh^l^TXlG<7(S#GW2_wqu3^{|C399A z5z$uSk_iJ@sWI9e`LkHIvu|3s_K4h9`#L_zL|~}M_?Hj3;c=`3pR3g#NNx?gWPY$; ziMY4jcCBcVL7DPzHx?*kgr?IJW)%B6t$Twg9_z$@1D$v%^vZ7%w3Z^}@C_=9_aN7& z(L-&=E|p#_4^?H8ez2j_gAufk-YaUq=4-eJ>7I8JePDLYtrjohsjt|m(?ZM%k3k)g z@uJ$9Y4e#Fu|tB$kdfT#hzBj&<6wHOJ|HBK$O6gO9n1_^MF7(~A6V$*W39<)D^mJ6 zlEPnZ6iF?dbFAOijJc%nDungZG$0Wc$qxrU< zx3QiZpzk6UN(&6IyZzP*OQzx(WTlf1&msawN#Ag(o#SQidd&rS(TKSVzFM^NWbu-s zzsvNllo>?B4BP!>yXbp2gXk%@YQ^v3ylEy)V)rq0QaetQvOu=3W;W=zxWNy>i#{(J z;m7AkyL*F+4JCpi8XcTUqV!ji!x{(Cec#%htWvoi8%)jbaPuC5;#KSVhwE z*3B}qU4sZ8ZqxU;$ha_=@Paltp{!7~GP&4^w*kzF{`gzgj-7sp=b-aqjs6B9UEQ<$ z&pWmppR4%BT-SfY6RyWIWz)f71_Y`cl&QNkA-<8Ecr|-7DG~ZjEupIL<8uXMA{*EW~ zE}ZI`0K;=k>%=K8I@nz5IY8F_Q{5>xYAy^RhmEiIK5a&F+Zp5ncTGxgU+)2!-?xz- zUiI_0U7AQ%wisNrh7Ty(l9QiV10YYw_y5lS9P-PvFxSL~$YoF!X3(=Bvau^?V04h# zI<(I7nkvt0iMvYxeJ%?JRkaRssCp&QxEwUbjx??dJ*LC8gG=AAWe)*{(o>bxoX5bd z_QhI8lyg54xB(T(29H*vkaL2ZbaUN^&<>#Rqu5uTnh?ki2lMTso~1s>)9z6gUlCFw zNlB5UJNu;%7O~=Jryv-5EPYL=Kq}VW1GL)pC6I7jVv6^PaECckP*A7jNt4TvMoB2J z%xF>BY=p+M-W27^vaXiQk1JOaYA2uq_hRNp5HIpBEsfc;v|w{juLbuakxS&%h?xyD zn9#?;jV?>9pvIlB$_cDbTgt#7M*l{jbC5EHaHo`x*g-Ge*5!Xr*(o?kgIoQ?#j>D< zK(3?C%FFHkX+p~1B+I)TQ2F2IDUfv)aItmU!%qb&tc(i7*Vc5Xa7je+<$>v#T#vBP ztg7P*KpTfs*_6JVbYpt8DfOY}NBM^AJh&<&Zcr+~0k)Tm+jv=6R3muJ`g_Rw71EOC*cySoIe!J0uH@#eu@TV#la(Qe&G!QyPt8Kw&HA@2BWSRVWik zw1uiWR0B%g$e>xV#{fx$qHoouA2IPEbF7zxo(v>8XV6y89JS$U82%kFx+&|KuCwu_ zX;j2Xqj6?;fb#EIN;n6rSN+;Yiz(|nT^ggHX`1`!&HBCWa%L^?b$-Lqbkz|yb-gxk z(gqlp@1o1wpjb2kx3QfFwE2jPl!FTN_$)~L8TJzo@tl+ap)P87!+|yTa|D!f#}g|^ zxyx7nV9As2!vl2|Zh>qszjew+XsEc#t9L{%Q%&snL&mhcuVxhHm>|OaL7Y0jRl70`{f8`ml`Ukk+KRGQnV-CGvT0jqL;Z8a>H&k#66+qc}d*B^FR< z?Rf8h?h!7CW(_HMsfA@5IES(tVbh|uD{6g73|v7}kx*2tWtPvSUF}7|ePh(*^<21h zu`3rX^8AJbE=3@f(-mx(cb>^LC&6>e+rP}pjcy*P5%I?$fF>4cZ$(T$dJ7BHyi$P> zlD4mT`GjSN_NFd zA>eLU)(i2GPD5Wq-X4@mHvfItgG8rCRN6!j$UbKWA{2i;gvIT2u>etNFLQ7I293wT z+>XIx?0D)G{PcJKG2iKWo;XmVqN}qB2m-| zHx4H~PbTd3BFC++VR83Qw##7oMoq70=J4z!386|2q$pYq-0(D9pZ41uz!* z)TBQ|)3)Z6Jv&~NKQZ2ETc~Lt4xGTKFby&HgVieozFYASu225zR=j z1W64d5(Nw2Kx?A&l-`l_(zkd8$bdCN_87EM#jMjR_ea03oL zpvAS7F+kT(hvtsDKJ)qqsQLBVkE`Z}HjMT!C!m2O`*2W_XJ?Vk4?FQKZKNke2em(U z9vq3(|1qZ!H4;MMSPrTaA;Thbhmz_7QQ;+;X;A`jNi`2QrO zId~opQx)Arm;R{}LKV5_sZ40fo+!I%zY{%@e)KSZEuByWhD03nhOk&yGlkd%P|DfK#2;O zH64(+_50^z8ssnW{li-NcR;S53@PhVS4md3MQ*9r5w~MT!TRMRHc6NkEjdlqu5J!l zA8qtKWw$3K`2#H24|YmGxxbW~nA0^v+gC95C&_rrr8}~9Q;@vp<7z>)g?G46sAeL8 z>}T3ocFs~&EQmBW{E%s^Jw%*kLtHH{JI8F#Z_1yM$b6-oELzjvgx>J?Y~ZaUh7YO? z2ZNVT{Z!t#l9bn^(IO!(qL`C@ZMr#6TX&fm!I8iz`paLCOn)59%1Hy#JT_(@X>(nR`PaPC72uc7*BOY4!`AI@_$fgVo-Qynr^Bk2edoy3APpVnElA;8E&H`> zLCJ@CA$>2E`8(uB`&i8{mdjs}*q$5&#eRK}fv}oNL?1#x3#&iS0{OrN2)rTdos&P< zdhWbEm@>s?22$;#jB%pR&1r+OxQ%145d9guWV8fs%$I2@fkxwbhjlJZpq52|NXUuM z9+-q+GVTq?J_^0TUaRD{%`}yNTx>$gXl>2N>L&~GHd2O+7zAq)VtOZ(GDC{*jdaofX|1g~pLOJu`&HWhtYMP$M3lk0$1+U>( z%@}14+FwhN+q$cN&`jhgey=8|aFpP;rf^jU>{jQCO!4mmzJVj_J;#d?STa(20V-{h z$Y2hc3(bVI*%V^9#1NkeMf~+;Cuf;{1B+c2#Q@LU8of;46BwC}>F+C^_&h=epW_kQ z{IPKFJQvPK%9nGWGh2JuW(sR>#2D57W-Al_mJoPsUrCUt;bfFWU4QI%Z%r_hJCznZ z5E#hMHZi%49BSKVt8i|J(9=SaAE|Pv$#eRAI-ZqY)@8UbfMQiaajMsxO}e?$Bj$MT z1YWdLQ8Y@j%7?SuBI3^P!`7H$SH+ZGs7~a~Ye7uUuU&1xj>d>fF-a=8tI>wThMxQa zxsB76B!~H2Z|?RLbnnDz_C0quy3-zk&Ku4##isakA3zph<-w<(3rAomn2JtS(W}J) zu&Jsx&=lGkxO}1OEq1iT%(EY~dE6z;6q%V)d)&!l>*G;~IxZX+q zs@6tor=k;)L~^|8;u@=zMAScW6QxZ$Vl)JB6Xp%gJ?QzCv{Tkhq?fHHWxQ^DaX0Gw zX0Wh~QN|}g93CpgtEU97PejW&daqf9EUCWN3Jh|d{cK=*CR3V-EzkZ6Mz{#_ZC3^H z#z^^KMoDZtT^wUCYW$eH9VgYT0E72vJ9U<34nxe`)<%%Yw60|QPwejIXf$aSt*xgk zB`dX!9v@9h72&nb@&rT!PfPioTX(L*Afr7-LRbXLfz{-?W2en)b5!~<`+eUUoSXZz z$)VTQT8ZgnETa}iiB&VhT=+L2yJcfcp#37=3%MyKLPcSoZWC-fhj!%u&i{tzmNwS$ zi8BFf;OVHUL58T9Tlnsj7O;2*Ib_jT^CD37CaLy#J*VMClUd9mMi3c-i!|Z67;*NE zbnS^u#(IbiOrvOhCX?eE_bU?XmR1* z4Zu0rguxRm)obFD^6|lJ5JZga9}`l;808yfwa2sFf#D!p4sR@VoJJLt;W-`qM#}dD z<-s)1F%3Fqsq+bMx;cwJ&4csK0vu@`<@Dv~4PeI%Vi5wkYjdDJvg36PuErRFT+Ro1 zU~Fqn21W9y4@sf7HP=t}wdGFwtgeLfBO9&+={RbgM*MgyBs6;1VngtOGPMHPt!fEH zM#IG@ystJA`qkdlvVV)^XFzmxZTdC*;7`s#1*U3(oCP78bnJs3`yQeyxL(gs2!OB* zL@KEE)z;fZarzw|8>-vV!bMJinLnVn*k4mA2o}Reqabx@!}UA~VXlxbp9k^Bo3ehw>4v>4h1J7jQ_0PG|JHyVLssAN)9 zO0Lp@gECV2!8mTXT-nUIc~g7@U#+6XPJcfZC49umgd? zDWsfmm(`}G(QQz6AYb7FOBg9BmDu!jynbiG6p=G;&Lshw(^4eIi+S*G4@>E!ykp*| zTd3KwLBF{SL?ep{B+x5~De;1LCR8^%dBNHs$oD+n6j{9tKf&`}kp6~9;`3qfrbeCI z1a(b-`EYAPgsNGEw#=2fnn)6PDJO0zSD#2fRj1Z|QsjoG`l*Gq z_bk*8|7yc4?3OO|NbT85rms0y!+fQ_9+r6RvHjkqex6*4%I_g$Flcg|tK*iHo zPqN0E$lXSYfT-G(@=E>20l6xsgr^doEN;g9Uj`Vp+k#UbJ<1!x89?@Ul6KhofKUf) z_oP|uA#`yyY)hDP5+UVdqJQd$cj0-SzrXMW(1&+7vCZY^>a=AQO(gnza8}yNFMofA z+~-DV;eRTxp+Vo8Pnz1D-Jp3xB&jfStso~~)4>c_Ma#S<;_rGF7WU~p6;#$C@w$0# z%+>$O95i9Fwy!2LBdj!00&;1jIMp@fUGwO73EoyrevO-UNfe#(mN4kKguBbL#SA2@ z?U#O1#nj4FhGe+YH!;*s)J7%4xuk1Sn%{svZm+ya1;JV)8Kyn%GF7F|E5KBK5W&C~ zgtZIu^D{DDN0FeEzAzZB?BURd;xo(w-jFE~qAW=Sl64eaHci_KcIf$!fQ+>xTD1s> z9w`3|tbGcLKj4^Tjh8f13f*|-QY$N&%f6Ul`cd=D<|TPc2L&embQ7k?-9RHGEXx95M;NRad z{5}Jg9cl91#sdDRER>bN+&*1eZLw*Y-{6L4)!!@5EpG&G-n#+iVU8t$+!X=b zX4oHsgxh>{eXi*r?n5271N_-N&iVn)iH@|Z!;jKZ-va|0p{zZnRmP-G1a6j|s*S4# zOZBESG$|HKYHpbVDuBT-fnf|VttE;apHxoEKV-Mf({m?**|Xq{xF}gb-OX121GUer zlk1T3%TvsXwdxd9`xr^+sY3z>(9Hc#yKiP!j_wrtP(PxJQ+Z&vHp%K9@5i8k_u#9S zNIM?fb!)z-aGK}J%9U2e>VrP8Hw>xE?Gg#%3!SZ9GmTm`gA(kH2J=@Ix$s6$NB!k& zV_-d%M_aZE4zvV~AW$H&mq1bkE*MG#CX=pi#Bm+>GD z%W^s|)}``|E5qhRI|a5onZnx3@+{u4V*3@s0IHls##YfJuREh2?21hc6^<-To{ZyR zVy!9$BH=YGi-S~2cZogI3b|b!!cpInmeNTD0!=BSOmuG`nd^E&Rpxcy$bHldkqvH+ z)s*k4q6xE}G%_N_gXF|^=HOfAYk~GycOg%qs%(9xA#7OTZ+okO$ZSl|`fWoJYRG%hAn^K|u($xsWHA^W|qB&)mOxhj9>~qWFzqg-*9*Z9%OXkH? zGO8AE8;|=lLq?%c-Bcqm0w(CkQl~ZCpW+WE39)tX5SQHgpt2X0XH#XvTD2xlY@_iV zEY=h02nacXmu6G9HMy|Xhg&3d!Mz^wOht2=M^U&ppyZgV^JC_P?Ln}YQ=b=Hv{fW; z4`^JncU3$!r<|xuyfXu7&m1my*ejzlLATJ7Q(eDqATD~kn6=8AASy&_OE9(8pl|Wc zu&(SE8HA;?!|rvb0(PNHG>!kg*wtlem#CkpugUq82q~4-w{q}I^IW(O7uWLj8lZHQ z2ZD4>rS*vsYi|Nx{{aq9!lG6gRkYrqUN>I4dZ~^}!nfWZUXy!p(kcyY-}n-^7oRy0 z+jb=R?x)CwUKFU2PDGeSBMQ+ibql26gY9(6OHx&fQ@8Nre`8q`$GlLiX!t7y-%v_m zy>|EhD2YDV3a1D-{*4?g?BkQjjHnp6eoqzel=^ELtgncqF<7}jh{q5?J5zfci|C7~) z;K=qv`ZrszR;D@?bZkf^wA=LYqSt%0OFT^i+;#sWFjwa=&yiK=RX2|Ui#}QLSfCue zm@1avSaliy@A-dr;`*h>rrH)D8(vr|uB3jSV(U=G0u9m4xbz_#m_{n9e#EYqt6+BR z5`n%#<_Im*7&DAOZ025v*IZaOZlB^KD-qb}rKwLMFhD4l&3oP3FW5gC%oVNEH+`X% z=Y}ZQq&97j9ZBywMs*X{s99cH`t?*9NrWo^G<*>)irCguH<7|CowyqT>kkcWTausD z5vYv2bTR=$Y&7Jzy$_n&^2ML3dn-`Eeuk5~x4!}5&;?}waAoD7DnEDJrZbBjt!N2Ur;de$fxO`^*wr4*m0Ii#=40iSZ?L*Dq>mfd_6{0L<9Spm zyyn~@90;%e7>sNs#Y0SY4rJKfIu6!c2yF>*ZHgtVF+v>C!#i0pt+AorBvxyW&i;fA zm`|86dfJ9fmQmJCs0MrbQo-9XDyPW?IyjHh%CPo@g-R$K+H%iSu7!r+J-rZ{-vC~| zdf>ny`N0^e{Nx=22E4VJO{ff&%2&~woO4N-1oISUJ;b1t5A^S=>mW$rk7r3BX zi#Id!6to7}w?+FD6Yxw7ew5>mEM zCyOabfx|uwIuB=mo6<+Qj|i|D#&t^r!*s*CdLu7%QFG&9^VR29MDmp7^^iLauC06O zeAaqI{}6urECugK1S-%XF6AS@qNc?6tZ|CFXZ6HyNK=RLqK-0@%;2KBwyuvtoduR^!v)Ymzvqmc(r^%kuu}vS-MPfK<+iM*C64Aw{rkZ& z%sF3TfmfL9Um5;2VnG1*01;hwBT#?cZo*@*+@Uf&mbj!{h?l}>h?aAOlPOiq@cy;- z6+Ga)#0e%`qtR*SQGxZR#vFq&{b3h_zlh!5e-{`q{0mT$X$sfsfP?Ha0b)Cf?4UmXz+91WxF`Bb4FNNp11yo2D%Ba2QQl$fKibYgNfaf4p<~;&ZQHhO z+qP}ncWm3XZQGuGKe4~iRb7=%(m8%x2Z43B+!1m*BGVz{4mWF+*{H##UQtaV^YnIY zPJo2|R`Fd$tdG<(g~o?gdby5WEd400@uogAx|6q(o(;7QZW_HA4WxaO$=|WWjj7Z^ zSgNA7R$__o$g`{-9{e^UmeWEr{<-FGAsLlLu-IAd7{s8DBCMKyck)feM-JX89rJUG zrS=EUv8JQGmEhqa zZyRO1yj&06w(|x^1NRW7g$B1HMmoxBQ}M-n()DGf@?mF*OKFQTv<|#H=sYjQ`u3VS zIZW(3xG1Sh?%Q&GrMMl~laSFHI2=D*jqsm61~(Exm<=VCUpX7Os>nf8HU%0N>nhOy zAxv^(5m#r=LnU*&AOn*CNm}yZ1ntIMY!8|lxqaK9fs}x{m5ym3$?c2K)MiZM3%X_9 z%`I1=`%{*o)Pln$YuGl)dJ}U1{j0{^fb7U5ls36SAn*8^8#XP(ONln+55)10k&P|x zK7k))-J2X;dRPil-!2Y)n=!&u@Bd)X4kA40ClbcQ0KGSM!@Q-TBwjEy0X0M9ga{%v zK&hkWZc#Kv*+0uHI`H?F&hj1SJ6Hb-z#n6 zu75U2p;jK&0V?w|a!rit$7jbugwn{ddiRlh94W>U24Xr(@H;i5VqzyniH++1CT zj$Dmp=>+t6A?yxRRhqP7p=(>Yp+PwgCK(~+fW{mU`2ctWtqx_NApN8^n7&pAC{?13 z4h5Yl74yCo^35)Zf0gCVAwJZx!aBYahw7uQ%kKLMbG}@L-pAMb)A#JHN%4~s1d!Dc zCvN3fCiCn93gUB}bjVG{zn<>_aK~pwMDDvn)i!*}}Qa`^pW=!qM{ro#QP7 z_$ttK`GdtrFPT$Eyb_E|%s6gR#E5d0)OJc~5(p7%iLL6v+3VbjWJLfT<3RR}ioo`B+VpfVh56L%QQ1LF&e+|ocC?}^M z@5L+FO4MHl2$j6BoctxHBGQu>=kI*hX4K{c4NDcBW(9PfYn1Y^KQ`&RDf-o+3rJr} z)%n05FLWPeKzPxxhySAy4Q&jHw3viGY3OU-ca(C3d+Ve9A#~>kH+!?9RSII}l<5N5 zRX8EISbvqQ{@d&G$Cvz4*P!_}=Uk5R+aprrs84peCKGH(5gdo4o-Y!-F&d#w{G7Fp zm|p0h#r$w>B5lwS3DhgzhRIkK%g9wUa9i8@k*VJC_#%j~yPZkm0Lxjdg^tzer^o?e z)@K^t!q8J3Oz5eX;TCQvY!_xgu$bs?ffBd|6)1@JctSnM1J1-z54wRSOJ13czs0Z( zM)0i5)_N_m8itG}^_i&z|7HsvWJ~{S-nT|i zg^?P~=FUFc5ohB@*ZB$CfQWf~vQd&=7whODyB}J$vmdb~u!uHGsCJO+z0T0RId`$d z+TJnzo74A~t(0)@eI))ySE!5xr|SkSj{!VRpC)Yll>4aB)Zs7)WR2$F#OWALZ&JGs z6oEfG_WQJ`{+4FDr;xd$Jg8?OVZ2TwN_U5R;3+d=*iJj*c2Na!lKdy`VaZrHJvva@ z;7-Y<^QW7+2P@?_h;U@geCV$4GF6=U#ALr`x(0cW8AT5>d0(IdgxA(rHdLCQD-T5j z9efgk)l)5#SUb@DTe7a##6Z_K7a~TTFy(6eO-kXm3=NX25Xw`kQ8B`@juI@vXof7o zSeUyU+(XEy#Fd$$G+gfYSHz3b;2AjNAuR&7=(PMtM2Kt~U=ebDv@r!8&v-(giE~I- zZX2A(<050JUKvC4k92R|fWJX{+3pre=2x$P(yA%AYhTz(n2(Th%_U2h zjbjZjZix6?kDnjx^BkF}6cYK2wb|>~ACJXteL~Hoj&!fn)(*{#pMvN)r;|fk5r{ zGgtv4-+g--RqX$*|Lyq~Gw^}TNGH=LS&;Z5zAUj{HcG|!S;Wfj-f=e+pyf62(;KI{ z25~09A&>9b1SO_f&tR{v%0I`E54L+3%Bj+H_`$Pe`{||L_)k;^QgdDIc}q=slIB@! z$&uby5W?+luf21`R?sI(n7acELA@$%3JE6z-#PuKPlFC{iHO758C$Sc5e(8`ls35C zz64RNVP{a3P|k>f)@c9UHY|S|V2(6r^Uv}r`99t4=;l1?R+L_0xHJhv`9_5PTQL`R z+bi5l1P9+Vi*d8)V|;qPnReHHlmyjD)184q2PHSpTgFz}V0Qy72ty*3K$W-xut^qa z-qh8B*-y2mLFfb{mEnUi6J~lC)k7ouYD{#i&cq@bo^IK7E7eGE1Z|Uysc~Znm(o`k zbDYgUjm#+Y6QT!M8o*2Ex7d`m!@%45nD|gI!n1A^HR7h#Sf+igba7fsLb$d zxJXLBdqCJ4w5{(ZqJcY?zIojdss&|np?eJ=Fkc|_&`$T)Dg^JV5r0J5fQhdiZr$$3 zwB#WjMcLtNFbXFEe#g{&GQ*8C%8hA3Cdccn_=HNMWKZr^u3`uj9y&7}xG25(n1wb4 z`WafN&UTuGQ(*yS=(@Qd-dHMqA+$^yc$23q#{MFBqPgM*kGD6;R2?=;Zw7cgmNbLK zuwdJ|xvX6W4vL1TquERiD*J^D*U1uICBEl{`JF4cip7GO=kW%X)s9GjucRElxp>r!iyWJ&wQT{J059q1w{H zNxi5W==$NwvTm}DJ~d@=nAvp-(2GfdwZN9!?o|2F#yXZ*ezqCLtH!%hjL*>O5ki(rt8`sj9O)TLJnx~B?5J$BGKS$F z;G>{01M54Xh01|)FvO73vA>mC&^N^^e!O{P{)U*nl4L>}KuLcdBwP8Wn65Nd;VFq4bAs>COb<=feU4D3!1@b&8&O@AJ)vC@FMXN6xZ zr{9o`inpWMe?K2FkhB+^8*EO5`94cq5=CQ4fk>$={>7+D$J9m?1ztD*HN-5)kM~8K zV=efoyQ~!i*B6h}{lgs)%toX>WT7;2?-sGiHTHQ*5i7WCA7N6TJy>ZymrTzd0$(xK zCRJ63o6%95YF> zb$bR>Clq|_O3pT|c#ok4iEpr1HHX+q`_e63_1A3w`n(wJlK54i&vwfGWB0e#f2W~- zjbopni<#*dE>Be9k@BzlFD+jVBR*)!R_S6Q@XUJX2d5~T3apC*c&rNdty!M=FH2!? z^2(+1R*bG^18YJ9O}O4984_o>N_JeDj}cHEMO`|ahK?t)$qn0IhEjqV~SiKm*Yx&k@< zDs3QtJrL#O@l%~4l@dHq{NY>fRv`d(sM}6fz z!}o+^P;0pdv?nXtqsni$)PkIL>%K(O;UZ!xjh+)?`A(0(P%9%$u}Hqj@w#~MAS;V0~WT|Y_>AgFnyWoPNO|oW2n)Y5_7{jhfT+7k z7&Y^hjHI=+%$~ci6$jw`w8iR_4k4Y{t8Ggyhpm4 z9RAAKYwD%YdL_p$@FIgnAQ%krxWth3e3kQ@mIUc0&l@25ad{@$F$_p`5TOJ$m#B94 zVNamMC>P2`&)Iude{^4j&(VY6(D)y<=LVHGIPyQ1=#SqevTsVYKh<;@6kaL3a#8gZ z4fu^vbFtHv!_?LMK@lCmRz#XbJ9vhk1dS*LoIW4VMYj`WBLeiU4rWu<1h9g0()X#F84pSZ%F|Hhi%&8mtGW(`ABWm>UqhN}4(2=Q4UE4R|+} zG0CTU=~RIr{{wZc@X)twn5jC5RsU2Gf)w~>xkMgnL0QN^VSx^^Zpj%m)V70Hu5b~b zbdc_w`-R)j!RkJh@EK+D$P~FGYEbp+@w~xpwB#a2u8=<`YFN&&9Gjm+6f|}q6b)3s zO|8*mBNgrbI#cVal-XbUHtXZEi6=_{UZbaS^hUCyQw&PugZQpZraDusx<)hV$9Ggv zc(~?%p4KI8a1p@$zttp{8JimER!gCK_(}I9b}xWR8mh3A_q(J3 z5;yOK{=1t~(`#nApc4)%aQ@GvM`pBtiB_`3{=&@)v;XO4dV2r$v$M1+{c6OA+WEeh z_nU7sz|fUXQHO-#CXWk}b0-cq2b4bITL&pvs>P}Ezrl>Jjpyj)9#5X($WYji+h{j5 zSZIxbNkk5EaTb3p!CdZ3o5BQYY>L zLnpa;vpe`%h1i8Np`><~L_>SGM4OyAR?`ih<7tTj-nq~cT0;8ATq?1$S$mCrhC7^D z7yxD*P&the0VwAtoyg&xZ!&T6WivyF`!*Hxm7!VHD#*W;2sw;t`PGsO+?Li(wH}27 zl0YK6<4IfvH}d3z;z&m+swCAaiA!8#|}37YCdc#MQnrP?5oX3ZUX^><64)>g&d2pd5zKCY7=}0qy>hrhDBqd7RTn+^jnIKp^F1%YdI%OPwZk zwLYD+X;6UcEfhj9f@GRryooDyeN^U7kIyyVIPW!Gwu|Z8Wbiy31{+80#Yno^)$fwp z(SveyMwfKXNvAO|d@EuHWE;J;DWS%FwR{;a2>I4DUdCi2XAeS#A4p3ngq;@rEA!}k z*76QZB;j!_Zq2{GoF7COMb5Q&<2e5>skSG*{S@B8s?)e;3w^RMJIS%MUF<;g1<7mH zR_JO&3}uhg;*os&y-5O3HALmFaBr%MAsB~vuIyrzSMqG)wr?LV8e%J*0GdEYWNYoM>YY(yj z2tN#%2G#PN$0{t}jtF$P>sm0{*SRP81}}jb;bx*GfSNHge)c%QGf6|8?lBLma`n*P z*WfPrbIX}UC1mr_EFoN-%k~L{WwscO2;PInsX9s;j}VF^Q_E;j*N)7~W1NATPqzVQ zi4YsEYI;4UscfLeQ)pB<)}ha!0~PjEx~Ap7;}y0;WCRZ4lL@~*xfD#ql>&{R;Ck87 ziW1IHA4U3vhmJ>6_sTUQn7*?~%3z_Gpc8KgD( zGKRnhA=n@b9l)e-K+v?HHcI3 zFYbf)X|yreVudcl&C>R=WnpTf`;XKQlQ^Frj-t$0ZNH)$xqODzjIqZHaUaSBm+4N7 zS&DGW)yBU;L||7@n!AlXwsT{8`e|BQ8{};s7Gkn!T6WF?>|@fC$hW@yA1K@O+3{5~ z#+8HcfO!N7Cj)2ipDWfBUg+_7iR~0;FTXw^OBVj|EfAqSxliq0Rmig8w*iv9M}G31@z0ICi74*M9UCW&!j3Fe6sWfkBCt5aOJDrrs`oQ zXuuVHDuG?KP9donp3N8>F5{@{<7i;E0Jv3Tmj^TW$Kg_Eza1GRSYZey6ktTTqHb%!x?#b0^uHpH1j9e)Sk`h#M7SN=?IrG>E=up1fKXxKsbQiHT0JOFc1`mbC<%PtMR5~u56k* zWbNSbXEb&ZJEJuk$6cZvX?YuFgM6h&_r}HCC6<_=?V>OHw}p8Y9TqRr*{>2C&_BHH zd=?9EBi8?l*5GuOUB5yNgyN=I)|GPz1L%^mFvZ^5)}Y6Gkqo^e&)~+jr%=jm_#FD$ z!k?oU&4l&a40LBd=bB{O*rkzC-@8@}Wq11>edQ`+2K|ns5<(9cah5y| zTpr>)%U0w65!Bul(xg5b=nIJZx#+|@_P+1Quz7(KkL0|o23*%t+jO}0{x;EjUsHq6 zXl80_V>VlqA-lNX>Ris^Li3&U?Yt&yo93pm5bKWFr@qbRHJQFcs+h(}IkU0sO6tD;X^-a?OK>iUYTpF_yns>R7=!wo7OM*phxbA3iV)ox^I=^snR zZ{)b^!Y0f5;*ef@nb0_vyO553@;4k#=j`GXt%dYIV-?G)072W66Ahte*<*q~!osh8KWeK+;;@_R9~WNYZ?j&6S$JiMCp_X$ z@}A2n=Or-V1S4lgRpRKKNJJhx=dMazH%5JFGgKn4504F%Zhs$R?K<{I+X*Q4Br#3@ zwud9752iZFOC86_yOpw--eAUlzMh(X%)(dL%|1EmTfW8hCiP4M@LqvPf z@#oW4H!HLpk5C$NiEin225D_H;UJzCtpWEy*a_M?=H8lMst)A?$K2a%2Jqb2b*@($ z88wzww`{|4OgrJ}^-gFUQ64g8wn~H!P$KH3JkpKPeN#D!c>x`}`$vT3%?FK@>>UpxTYhr#}p=J1!L$@^nZP59gSb=xv z{_`E34U{mcCpCYaOylQ@GI%ZdZ|%%2?{ZU6YX!j;GWS;N;hM%v@&=g0ycR;G4Prxwm}j0DJ@y*v47w+K|= zG$s;e)C}vX5wVY0z7%??d`wzvN{BV^#Ewq2J`K9VvLw7d2hbd5?b9r|L<4Cj^7$bS z@Js?a)^cjvC{d%b@l?WGFKyMXZzAtvv&Mn$vMi91Yb@)R)5BGx4Bj#HlTSe*jN#Z4 zQnjTgFbD%UbJs=vc)4s+p;g-v#xU_l3bQ!pvY4@Gi4T9F7=~#+e#o^TfY8$xrD3J= z@|RjdkM6<>e4U|@p!U&P#MAA7a9;9pSP+wIs(fZloXe>u0bI}+&?r*CkK1#kH!z3N zS20OLCU(;@#B7ieT*_tghOUu!6oN=QV?fSF%!;cAF88Um1Jb*g0-?(365vJufI>)Y zKI})!z*iaBirTa(kzuU2Pf=p=!a|0ED7#1~Je3}2ovYj>AK5T848z3VVA}Rn7RGWo z{ghkfQKX>wMTIJ->YNfMje5m+L+I2@Q4`GnGL}LF^mF?lZa2Pg7EU#b7}%m1lh%f!(CkrrREAM;l7a7+!Ac;2W5@2g1^bNs*c|E%&S zLe{UBE@8qmzJj*kVG@hds4rO7k*>cgxvP=}OKv_~a#*aj>$(yw2&U0qglN5yGIXn* zK`#$SUQD3pLh_=;zJxkqSuE--ebM%pb36~ml<)p;P6p3@=@oTf|3;?N0}CW^SW^E$ zy4~mp&XS&{eP`GKpoSF1AukT)5~*GuHsj$guyX!1YEARSWA30sx;bj6Jbmr30-L#S zAL=|4X$M|H*siLJoX1a^@C4#}cl>2HJv()3Eox05Ct^vhj6$ce>b5VX=HDBbt)16b zV>N&biJitC>+(<~>Boup>an}IO_a_;*@!uqJV^L4jihWa0qwv_s&VT0RJz z59dXlopf0b=8<3}WGg-g2!Ln|bl3Ls@n=@MjO@swwW7Nc+h=slnfTf5kqX%hIrv6K zYB1=d8WQ^y`TSs_zBbQMaeg4+2*=v4TxCKCiD`ldxyXdqZuHWd3vV`D8XPloQnBw!r zxw@H|o+vpsDLSxg>(_|<1NdkKr>-8hCls#Uf zI2bgXQH#Ay8Vpr%Z*1#gHbvg?gMsxYT%oN-q~_C^`J@QsPvtmBw^p{=J&DRzyPe7i z4C#XJpnHpX&E5xG{NNNxhJ}DGb~~lBoXOPy9f)=0DZ4)$UtcT60OB8YO&R}(!1lP` z0W*bVhHJp-*n>Cef$%&oqfiLxX_6)D^5W|b=;W@22X<;fs^PyTLxS&7!#?m$7$jsc zTHUg&YGN$?Rs#7P*$PCndU8p+u!65?8@mJ?SQ%nTE0*dYnDW}KO6ajChlC2kUNr(< zn9R<(_^&T~oJqAl{OErc*fWfFoPVJ|YTW%n!Xg@zi|8MGzEHqI!=Im4LX72&~r@y6iY;q6;EOdB@V}KDS_S9bgfP`X}WEG=3eg-9D zsV4Uub(F_g9GG%jhL=|N!!^VeS;4zRQtSO5Q`jt%UW8VAN#y zz@vOqk>3VzTXU2PRJPWWud!zZ$cpw6Mlm_LIS7D#K}PEKiB12s+ zZ4kb0O{>M{jnl-5a6oR75e{7(gSU@wdxenYnLql1YL9x&JGGDHryzNRM=qr3x zu*srCkbj#jv~)4YDHYu9h3UqS2J)>2w#MrZ_UJ`c!g0y#C>L=37wT1$uLP-r1e|+f z51pV*?&gW6@qih*V3nIzFu85$evAC;HOxucu=gE!I{9pR3FOwjsnG{o$_jCf$vQu`*9EPSETkZSYocVR4s8l+(gtP6?FFV&bbe+aFK zqbGgMK<9z>;LCpaYk;F1-(4JUEddbWMt*VvkBuWb`{hdCvZVKmN10UIwCEAv>ysVT zZivXs%M&gOA*q{+?(Iysm@M3)*FVM>xVfpNmJ_MmNh5UeYvr3neU8fDMLd%RRHej+ zFO6ul{(uIWDOXYTwCFrin!C|PL8l1b%RhsA3^;yc&wMHD{G4xaq_ft89!P$1ZA|{% zI$Y8}pRf0rm?eY^mp7=sST4)VEd=F_A_|vPd-Y+!MPE$XzP`b}7o!Rfp+DwROsGd6g(qNIxt1ohuN#qwlz2>?F5;sConpXiL4ak*+lL&eSFJbPW zQb#VykynW`PyP-w8}l|gh@M7$1TL&jQ7a|Q znjw67_zOCa$;o)8Gt>^?mN0P_;7|B@YvHCkft5Mp&$UC}E?AYaYQqKpFrI(#OHO1} zzD>|)Pi|fmoxejG2L5KwCHhA~sCKZrt{3a0Hdmh-jjdOA#7~G>S`9i<-5@mxGe{3|gzhq=urLDSx3@VK;X7#n{Pch7)3mJ1M@8-BQ@vP*% z&0qUPeb|f=GI9^HGByr}4QWmNenB($`ipKOWB0SCt_s}Y<#9;?j4AHK>H*vwNgWr7 zal@cwxzUYHcd@UzW{`c!2^#Djd-c}r+n2xwrRvVoqH7kGwETU=D_2;gv-VyyVAwA_ zKM_&Knud4=&LdXeh$F7jR)eu0G1J4QM2v-(!Svh3{-sKeL*BMtJ7|!)ay83Qe4+Y4 z0bFcN-$i|lPg)Z3nXY{Xddd=MdbVcpNVRZQkDcpuHIUFJd6&EogfElosK3Mf^OSpw zjKh*}H9lse7UC82DI%QZ_L3!PE?|T6zc*#7c66QgUbAW_XN>7tUfq>E+YJB$XICK_ z*TOY#4;KsTYn1}3>m>VThxb3FoSzc^==JeG1}lxBPTeAAB=ZBe$toz)N9$73*owf1 z!Ard0NoX6rRKg}SvWc^|E3bT~cMg$KuzYr-MYFM59=C9`M$q7SPM;>+grbw3uKc5d z(NZXnH**%2i@%>4hS!T;!}jOFE;gV3O6JdMx~FFu6@%HC&9D7SQ(^WuDx~F8^xZ{$ z4X3Z8y5BS|l)UN~e@G;vtd^R76_Ad&X(;x;Gded>gtZ<-mxCg8mlC7RN zd@=Riz3AQ(FI)Kx zGXqd48IgM|i4dVvbwPev0nCKn&A1X&xdL1877~HWY za3ran<7#5s$S3zfliM<=_dNGv$tW5f%`7r3`f)g838;+|ogyhaS0x2#)AAi~GmG$k zccC#Pkz#Wyn{%rtXUYfVHRqUcvf7_fsE^QuI%^c&e5y2pf0x-*On=d zeE;UCOJq`&2TMEMG^^a~nMVJ&g)*Bm&K{ryMlQJo!ah?#*}2YooF|Vn0Eu#`ZC+b> z{go`SOHWC}%&;paEV5!J3aK6CjcViLN$P23;0z`+!0hup|8LUq$lTb0Y!!q5C3&DQB7brE6H3>;0z9v%+?{m1JGh#XOc4B7D@{?K)c@L?!3h2^eZAlagX7c*x4dC*gT>N@P7o zE`^d8BxHryQS&^Z7!$mr;AlHmNvhP3`AEW!n`3SL#CZ5JI;9GT+P^pjno}`P?s`Jv z-wQHLAp^FBfq+iv2Irg@!d%}FsGrKMXWR*8#PpOrBWRI>3AHV%Br?v8liag=5=rcBQ1@=w*Z$xd>= z?A1Hj7(r%K*V>tpA7!qx+aby&Zu`7N-jhUvqVj7MnQdozv^Kf5B@tLkvq{9#JQ*P) zpEpSvM|@R(W|kaF;ZGmBvg8e-VeABswM~)KX@o8wJu(?oc6zX@X^QCmM{dV@PwT{l zCayiw{fHhSQzY5>3Wd2&&$hOKu1mU6oIxw*W?lu5e*5Bkaq(1IX%(D4AF%Q>>Yw$t z01{~%=`ys&MLk%PS}u7{4jg$;(8>E{3lh%W=HpJXlH}@43fKjyh!46X8ivD_FHE zFfx(Rv|g607R!4nb_ z+ci)($kQ^MW}qw2{IU#}edF9Ej&wKi^du{(0al2sy&LUNLNNEl(8T3oqEjH^?wAa5uvNQ~-bq8|_WpqOp#MDE zID?WceU$_bs>yX8&29?BDhfH)BAp9;DqA@3a*OwveTJPIa5SCTJFPvda|`G@A)Myc zKKQ;_pu89(2YBE)hPtUo8Ec#S!+}!Fsn#A?jx1<6oSDl;33@7E80UYO`-Q@#or@qv zXJa1{mP%W`vG}CVLcSjzkCFT!L03&Go2je9{$fZgpdwO*35 zSt$YL2Uc^UW*%jkDe7O{;MEh89NTHNzyR$ysLW~wHk7AW(DptAwc~@mwTzkzt!{sh znG3o)h474*FBC08IAD(lq0OWayS^g{9r!&aQJPtQ#jLFv)ukS7;+fPIusB+XT@r~J z=vIi2nSmziRS=beLk6{vB#rNK3TRZW7k7v19JqOJc9dkrk;nJp?veTk`DsT{ACuhT z6AA=Ca^x-yd^`3lT3H4VYC`zXeC$$x*zJe*)#u00D}9Ox*;NOXNLjc$<>UFRzj!;sI6+BWU@#vz&Aaq3-k6k2mzkKp4-!SsDU3&V5H$q;qgiq8 zJC?=o>elP){jF)6YOh#j4eNyR47viMIiY7&k!{`R-%6cPY>LJU?Tq+qxkEfW)8=7a zwYh!NbJ1~lgbbN_NyNV@G~eLn*p2JLfe{gYZNXj_fC7QDR5SZBAuwcTN-#Ic($7g5 zD55@hz?!ixap-1^iVb;^L~txMRhj^c99x~Wq!rA#>6lAA&TqI95@{e`8-w& zT_+=TQI&(tW6*+Ng-lqa(h+NWWuV{v_rgeT2FB=4R>Rk>!kEQ8o&5t(MQX zTJqU$;u@isgmxx4h9GKfRjE3Okun>nFlTiQR0Com8ed6)j6jgjORy>#)Mh+ zc$7lW@EgQ#!2#)s zZtp0~l6|}%*lWb=@E59d!Gb-$O)sJya~BDblquYLFRBy zV=B`wmxjYmidKHSbpL|o@{%XKCr-=^SKdGO5;I>uBL~{-Y|iK3_ka^|%02jm`&JHK zoba0@xl3T`{L2#wo{}#&<-aH)-Ei{@7RVp_a}46&EQ7<$j-RG1^;iAbIrVthLMBtW zXkcq6cJ=xSw-T^mydtnFtpY~D88k@=IWsx#2&QmA@*_nk(_F+@UIVC=vWc358`=BN zGfki`*KDy9=`rv1i6i9P%FPKhthI5O`Y7*sI3qX92t>5~*YQ>Tl%Q;xQ{Hbe>Es($ z0O8dJSbCg_nht|*HtDOs!_-fkV{=Yu^$-?!6@&U<#blxU=Lu9w{h&!r_lqxT8x*!B zrnOdm%8Vx{k$r%ZsRTzd0;3;hgc$wds9~}+Qu~c3&0Ur8I4=vD^(b+_4>%4u-J}vXRjc#Jkl0m z6>)YO3+PjP92U-yOz>H#BUn;=I6iAI9Xvf?@2rzg3UeRh@$p+zM!7QusB@zoM8F3u z6&$XS%#V~y+<-g~HuyDf8Q}*RHcL{hRW%He#L{|e?Z`n`OVpGpzX!cl(3@Je5T)e| z57bD1og^|XE{8^_GY>f{N<)>Aaq|+TAn!RXPK*F3FZy)DR8n`}9gv%Dys!PtY?DN3 zj)bXkgs9zSj0EGjoq=sv%)qDEn3=)pq_2@Z4k4!XzOpARd8FJ|ly1#e_~tMIT3Ml} z&G0Y?nNC1s-?kZn(WGuAKvlVR-RN54qFfAs6%c_f-Y<$1D|%3mMZ*MkWT0MFZ&mXE zmE|$pq49hW%ju#&y3dsVr-Y`&_4T*%@$mWo*8h!y1x~vID#cLD3zV9VyyrdC%DI^c zX6B0MfJ$@EgKc_8ny>OpqL$v=4295(=teczuB4qi07bTHOMnSZQswr|L6!O<7!26k zv)|Rlr$hON%FGVuR_EfOem`BI0XQC)+s#MD)(otc#>hzo2-253yU~#rU2N5q#pb%E zjLEapNu6ZBV<^p-puLoqzH)~OvS>ZIL$D(`<_!f5!^@pPVV@WgZ)52-P^~p~oWj#4 zG=FO$=&pRvR$;vqo=cSziCPwUW%ucf1E7G5Ai%hFC^KzbT<*H6v8p?9?ft8~r;F(e zNLNiW8{PRKaZ)IidTF7Q!eGMoT3BJpyrSJ5j)G{f6g^NM-(8$&9_+FZY+Uh?$@s35 z06T5#clguT^wizH1RZaD=f#c|KUUDFoh+(yNs>;P*Lx3Ftdy|f_H8M8eJna01Txf5 z^F&T@(=kp1QDab3voJ*MqTZeD&2z_oqi(Gv;WLeQpVxN-&@AS>-4`u_s*@d!0yj)1 zd0)TdON#6P;1*4YOM#gzwWQv_ zXxvmU0%4{ODYz+|5Y>yxr-yoD#O!%O_N|IgAg0P(TENiM8vbRrXix|Lt_e&>WM~Nd znnaJ%qP$Xr+Ap!OpjtN_&oWIp>xpCBcW69FKogz3WMf;LNxqfM2aL9v^H=pmvDU=c+K?)Nr1M>Tv5qJC02Sf`c`NDZBFqNAXyXwz}-mWK|zzx zWz}1AP*lGOa*Yup&ph|YA`6pn4z2&%Geamh3Mls3w4iR)+Att2ZFBrcrHH33UxTE^ zmss#G1|Zq-u(5y;p*;ILR!m-|7taSsXAW7SVYNgDsM#dm&=v6Ls05FwRt-rZlKl{9 z1V_s-{r#1d%rRkB1HVF#2dUSwlqN8q`%R^q3`K1F;b?X zVch-0$qVF|UEK@*V6e#(hR%?gW}JFi<_axsr^f3%(lUH{8mWpuJb1Tog8Lk8_W zkvY;P)#Ht1(3_a&&iR?> zu_b#z3ztPf4< zm2|NMuDJv%Pq~Vqu%fTeUlHeZUtc5b>j!owr8VptS_}a#bZRFvz~po+?H19cW0hMc zcMi?}Al;Ax7%i*GYwI+7td!uVs;VL0;3AZmc6_qUKIDF$2qgyVU^Lh zAEs~u{7>($k3OGp{d2s)$5%z`YImz3%N$Cn{OqdaC^Xp0FYUNs@JlJe@*@1prx}@u zn3&|`spaMpcT<}H@efsQ3h^b8W7>gP+TG2hAjyd`_qpemZ;ep~wN0&Wq)~V|WXWv; z-cghSJ;+QhHIOasYY)3Xm32+vE=9m_lF^6B+Op`|4AVHdA>3N4-Fz8soNLX=I{d7_YPHao(BgVQ!}H=JYCKDVZbi%HDr8${re`i% zun9*VG8c97#QD%2ZEQwPu&|ayy{O`523x79YmmDT)IF2Or z34-=L-<_thuZO5_sf;F z*4N73q2^Q70;n<)pJRwwtB%HaVGI8u+=lcv(Te&W0Gs51DfuEnm6K}aUoi$-qs}S> z_dN@PJL)QzLs2PF-GC+nYmKY38Eguej(qK#y35zOfFIk{)|Nax|1kGqzw62du^BH{ z+XZH~-xIc1j8mn)xd!aV;c4zE_{OlnK{EP#8(l{R|6r?rcxX)R!M>z04%d&F9oACb zA<72|c=w&A0`)5qiv?@Lf4m_8^2IG;NMyN+E354FoStT{MeWXs{sX-+vYij5+fMN{ z0FsWz!M*fx-6Saj8pOAR6JhNNQ0yr^i?dYRF$Qws1yphS@3N$#otq2`_Lti;mvRq4 zp?4MTh13B()nLVY7)*$Vm)__N-RZ%Bt2H;y5(ND&B{%+K&I0j+JFH&K5Uf+G2`L>d zEs#2$8Id$OJ)HFme;W?J$>_51R6T$NFk^6>skfxe=Ny}gc)%)-Fl5kakI<&YNEf)G z_?-I(1v{RH0{EYeMJ=Fd8<#Iu=a2G?Uyb;T(H_K+xUO4tqTgWu%6Y8S9RFL2{83Zi zV=W~BF~&P0RaYWQaBvO;S{}_Pc_N+N6s+NvrJE9s>4xQmGnIKL>6s;d^OZK7O?iKn zcFO>RwzK4>x83$Hfstagt-HYPjDO@WWSq?ug1>Q63K#q(Na}!qM{c=`A~tQkM%KZw zCL%6{8n*e}X!nU8g;J^_$*Bhc6MSdFkyV>^2ub_eiYLX$DB^+{3U3O`HG9}5B6W9Z z@HIU9L+n;Zl0Kjdr$hL5o(ul#vSx(E?Zl z>SIs%K^gjWs)w6aA6Q$7^%$ zUWfv_Cx#Hm1c^g66UyQ7h0V3kkW)VpKMU3_9GhDk?XN0uA`aB0J2wm@mU^rRnV1t_ zK60#L7rxtl%#nUommPus=q*PG_2)AF;#=r6a~smSKGx?$jj&_|YqH!WecJW1v2I}k zWo$hD*zC+rB$tpL#xcx{Qu*d>F$@VB(IAZ1;-rodgo~$zTRZd*7h8%Y41>V$22Jf6 zU?cxW+c{u~vLsNjZM?Q^+qP}nwr$(CZQHhO+nT?L*+f>Htc8 zN0h?um%T0YTxWfsqP!y~0BgCyH>FJXG(sqBx&KLFkp0Y_J6PfcP~y_7cE74Tceno9Jrv6RR`#V#**;$04=`v^FqzBp8xF~W z3pK=y>;X*&3Wo{}D*>aQ8fRBV8Kx#-t%o|o6Xc&j5ci4+=mznp4}hxxlREDz zx8VZyPsBfGk);q)phO-kpz%-^2EQRp!_cV;w#iNVSFh}tvQ;(PtZUy3>Rd3gK+iDQ zD`|4na6>saQri%E_&}!5GTxJL@EdK1y!NBMQ3NjM2$dIWT>Kqc$VhmgeKPF^>TjrV zOc$kT2PYL(8^5UuH9!R%N2e-LCvZxo!;76!I^TR+ysj0tjm(=_a@*;2sSE;!;kX*J zmaj!K9gMu~CO>;-r2G{;aXfv?7VEo!n5?AVf(jzUs&fTxwmIjmBoCXo0|@f8wvhm6 zM8yB%Uqi~%Rzat~Q$?fCI@8k$pvmfn=DwOpvrkL9=3*HKc}UX;Vh;Uo!y}j7Hz&KJj_4Y@syfsF^N$&*gbNsBHHNpB1=}7xbfy0tF$f&)LL0)e``&EsN9u zg!sPZ>ikS9RiJ~u=16nA4WY0(Hg#U#WoX|`GYQzN1{q0eHcx)g9ftTsV@8NF*G z?+hduaN@Y}1}{OgJH=fO#Xjg>OqAm<^UC0m%HS}7&(1Gc^Yp+Md*Ku*3bp#gV6Ls%~3@rgzt8y9U+kf0ZFVJWZ& ztODF82iauZUf0FuxsF~wZ)oO#b@`gN*iL8n;qVdU^a{i4*xSOwrYcggJ;isu-wKc+q&-C<`B zefgl<BCxE21KFLdtH!O`jp;WRz?(A1tuy;IwAHI`g&V;AfNe>s)0Sk zwI$eOI21nbE!U3W927|%8SsR#M}epftG`ZRZ;_;*@92_PsWgw7Lj*2Kz+2kU@e|!5 zXia^wg9vGcI&mF?)sU}&ou-aLK$~Ev!X^c7nQOrewnXnfbbK7Vd(9oK-+&Z7RR!#? zcC~ZQ5QoOxPgPhp3cCFky`~((;xASd`DLkfh{}uppyP%)Q%)goOQyAd^CNiy!Vf0d zD%WdY;?*2h)VTMTv70ycE`vK3-JbjIl5j=M{FMQcTk4l?e?CRR&)v|~fWjzcHPib% zB8gI34GrLd=Uq~_WmIyqIs>!t#N9A?US2#s@&y4^+qT=K?DupF8EQMFSXqR>=l(RF z)pF+F1(Rx>CGx7m^{_mxG0^1{gCGw@ItZ9T;5|))SJ{TLI1wR&AQ((qMR5>Jog~6I ztpG7}lqax}vtw<`%WMcp^?l-ZIW@TriM5A*C{`|+C!1$I)jSSv&=%VCfIakK#hO9- z#%lTUx!*4NMub_`bbzPVF<9lAKicB;xVx2KD@T{@_>wK5wQ0BBO;3iN!oU|{R24+J z;DX~d_E$zSP+v~+`rCG_BkWu05Lq2pN-y|&fs&GqCWsS9B-zUJ!FX{%@5b; zIWJexwJ`}(*$6U&z37G%=y!%*J zw2J~qFnoV5*Ct`8;5qEZhj-AEhbn4)afcGgc(1aq<{LOLlA!TAZS3ZJgvt$O+2FGS z7*CHmP=^fbp&m5A;fja5d?eF}9TexVLb^ZsfQq!1D(F9FF+uX*g0c)E+lpdH08HCl z27?ui}!phqjW=yg?!6& z)V@jX%%Txf7e}9ucQv&rDlg#64U+@+OX<|nzr_mz$vb`DUQKnLqx4X=U9H|m(A($Y zl;ZfbG8Eavb${>DxRD$g*mOhscw8uIO>lz`zBVbxxx#IF&QyxG1hpei$`odtXToxn z0U+&*NdSPxqHK7qAD^rDDGP{z>atGWy!Idi~+CH*+g zF54{cJ%t(bJ|AWjLWenhgU7PFd|LTOQE+IeN9Lb)_wB{#D|cFWr}HfJ*2^j=y%v2vT5qCcd$y`H$7_JGLAzu9UD225vA$jNribvx#2bsT7Rzofe68FqG zQdNL&x{}zDowC9&AJ2gr534vY)ov+FuO;&zeMIfS!Sj9Kn;f(dRk}zil{;Z=FUZ6S zu{7^+2}CdFFiKcuUZW1v&7p)TPP#Q9PpzD|&uw!KmZJn)gTr z<`IQv;#*Z5R2PeCwb^TOYW0%F3_f!TxNpzAyM zreQRcGQ6EJm8i%~pN6qnE`st%CRtfHfd@&g#rCDcB96sWA357?ZAEs`XHK_+87gz`j9~T?6=X_{Eb9Ki0*(S_z4N}#CRG219rALona%>BA1~pSiY+7@1>Y4S>(ZyO6G3hyC7?8~l#95wD|V4# zVwCJ(u%>_U%lTd=NHb&RkB{?%>k^GPmv!r9f~&OY!)J}Tvimnq*GkA9!nyuLe&{5U zs>K}CjlBNG!|Z$ap+MA4eCfdUIaQi*ae%%=r$020=D6m&g^CbsYvhhrVS@=7R5vOL zY}`O;7$Hji5hjFik_cd&KJCH#?CzNgL5V4g9WN)aOm^?UTzX$x)unCFx5m6rCpz0p z&pkX0`IFN`y^)g#UqGsC2`-H~-XI4LR2mbv4CU$pbwW?Zf;k?X?_$HHJ;;Ki7*?O@ zJ+C>oo)W>>)2(mfe1tx6SwlvlC}$Seh!4z;2j06{1&5PK+z7jbKUo%~+5emWi4wU| zqU@hgpSBN%N@GN8c|BqRHEomjmtaAJNR%n41;d{U*$O8Au|CQ0}L$&IK#@o zw{;+i1_A!iI`v^gdCdI;DVM_V$TTu8a~S0FKP{>oXFKK>I}C^DKW^${h3Mohe_2J^L^&T`uI?+=tD*>S z%*lw@rmddn(W7nE7`zxpOxwD%O4V@gwwI5Z=q0=pmivw?@m2)>s^x~!1RCV$ig7(n zV*R7QRbcv)$_FY(;IU<<=jSq}N{I^ilEnYv&{c2)Y3LhEFcYLdksdLr?_j%+Oi01f zzPRYd+ywjS1r?ni;OUvP3Q5>fNGqUEKdrko*LGehZT7nUVHer*G>1Ym>pY@xQ!ByU<-BWORe@loMb!&kwO01B5OnxV-QNDW#cZ1n=nFH1}d~44mbt&ObK?yZfrzw z7Em-vVDfr!Pk{p?f{)~h-}jcu>JMgZ$5|r!%h-QOl8K=tLYOxEmud1TK;b#nf?&St|fr@`vBm5X*9|Q@w1XN)!@>LG{uq9@!^{dck%dZ4O#w@lP2E@oe9ljdg9irSMfEMH*$;sA{z z?Q?Y5-w($GfG92qcxO~5ryq+2Q-irICKCxhSE#7`+uGt~A?uVy2?TmqecUKYf3^}| z%E6so=C8H>2$WRoizRx>~P| zuh?ggy5@U~x`KJ^CcLIzyG>UrdzKBB)*pCfmyr3mui$`w zo0n}&0(B-jN6DN1!tM?(nXcv^N>-C-jCaCklU`Y95h>66fh>mpx}_m> zewa0q;P(G3y-;Q>dmF8z#CGzyRSAo>PAW)VW*gl9(XJ_*DHOAE&TK~1t;@Qhh*m2u zjgi6D5=bR^&1AZRUEL<>xtY4*j3~TELgmG``X>GaT0#cdzdlZX z{!vkxKxj*E-g}+)N9uHc*7p{VkP6pqH^?t(@*0B`al##Nv;^GY6&nfIk=h~LxnBc< zglef7u?%T{*K_VqEVpP!4qEqSP5pTL_#M1fs3Pt;d3l8*#Jd~E9UUEgsB3f@E13Xdts{9t1*c0E8ywlI66e{CsI zwD|$wyp#Ibp^DUaL3)BWEJCcqdTdL3h_&c60jw>TbetfK8sElt7azsNsfuPxvEbQI zOCbu$8z?Pfd1YUf`V$yukK($GtnYbV;GWDu=n8W0__I8n`t(7$qRZ244s2p5a-jtE z2wyrmO4!zgeZ_T%s%w0SNl>u?s`~81$<{j9lY!WS5fY*sY(2aB4|@et4GX3TG?hy@SPQX{n(;58qZ_^|7F|WC+b_p; zUjB$C3Sf|2wbx4#jSixMTWsGyGfKrzvb1{H)k)tA*TTbhcl;jOmP<(L#2V4%I-5~$ z^tcQK1q+&SNK`wCZOzj!47G`h`H(;Uc4lfNBz(V58nOqS%)3 z!g)|wu3I7Rjdup(s{0^b`(EQC_bCz=OjB9-3PnC{_Iu|<&dKG-YTK7C8U(aV(RHy| z7?7aOaDS>~`CM}I$f7N&TXk4|b3|yn zK2Z}4^VGG3{#z8SbCdTGPa33Gz!5YSI)a*`NV|RT8!wIenfHaW-5*TyjGb`AF+L1Z82`W$<#I-qA zlf>oP`;9A?_VPK5AThlvNa7-jj548hymOdx>vLh)1BEqz7V5!`O?AmLwOElC`Ij=wvIX#JKL~Co6 zdmJP#qD_}k=-RW$hgmj18%sSp3M{M_gTbrK@sR=>#8ekGO8GGf^e2XlU&;E&3{X~> znAbwdnaWn%aa9G`Q4WVMxopex83w@TOC7|%xdtHnPYww|U%nhH;RUz5Ve-u zqs$-0CqTsCp-nAj`*Mtq5#0oT6@B%tIAlt`LpX2exTx(wq3fPF=Z8N3sTXsfvrX^o zyLU#{tYUq3w>oZixpOk;q5<3y=Am!Qm#fvKTTkENCLDO&GwmM?;068BY7D~v#Fk@` zC8C~G!A+o5B+2cQepdj)U)E7tQx@xI38gXa%3w{KyxKln^*sj;eYoYfq~YLRf)X}+ z>bA}Yw0>7Rf?BABptnnG1~YyLw~md=pEa|5W5}@ifDJtx`zH20vp72GM%#pXpzb>g zqsvd4r^q?9`1zB2Ig>^HhCb!x!^H0W6_13_wuBSKr)hw+Vl?Hu1oF)!R3lMxx<%1V z6Gyt<3Z0zvJM;PAEl&?NQ&XB>QD?mHhjU&>S9;H6=ECvGD(Is-Jm8KM5ia%lM&x^- zyM~!$>gvN1e&X_ekKfX`VF^oB0ll*XtcB5FEv3~n=Oiu~EBbTdE**h$N?!i(T8S|O z`XYnI;@j=~o=X4U{10Bguz>BNo$r2(1jI#P4R{+j@oaslnmtz5bk^}6tS^c>z56Oy zpWmJT^o7Z7Ckgp6p+GIIm@Lj_!^vOM^z`y^;B8Cef&R%1^Swsb9h-y-jtrL2g~3F5Tx3f9Wk5ro2dD9*F6j;BHs7>@)(1RavCC9H}zjqYI~Xr zip=BKHlisQliI45OFR$G+H#Uwt$pHIcCNyY9V7DRbgC1L z8`dtNWZ3gRG7~{yf-Q?+S4T~)m7u){|vmS z>LHMsuK}SI0yKw=ynd4OxI{Rhpe{(z#h!-0h9ag(>k7qWx>Yz6N7&SV747;E?U|2 z$~dZuiGg$`g(}6Q4U|cYWU`BL>__cb&CnPV#fTOHn@dA@SA(~0wv}_aawck8rlI;c zE6=-7(FnkduPtgwe&==m?W+Y>R-dW;D6sb|fpKFQWm6h(&k?&1|5v!BCST=ugx|}CHnBZc~cb# z;c{r7Kl)ECZ5b7>cYG2@@0vFNx_j^C!W_Gm_Rp#fr&M-WXamUgg1q>5)BczV07Z?M zSF_dEpof1kphx_2#nUbqR9v1V>}zC0cspcF+CG^C3x0Ht@6;p%*-m@_nOluV^7*Pn zu9*U=>Q?05Q~XE}Jy^L(+=j6J|c=0-+jBY$59O`eM_ zpiLIcxmtzZZTY~~@AJZypii3t1&!LL_>3yfouH%%bB>QpvmwPYIr0(Ec(XDpLN@A| zHp*~1(Zq&A)DCWElBg`%-aEtc@=_Y%>$#)a+-!qRj@xg`21!=33UFP0$s8>9Rb5?# zPX(wmYelp%F2EOGh!lI2mhY zQwvR$oinBVr-*md|JM>>HY$w0(+eP{JG0!6bh^yzeV`uX4@4n{YbjK)^MMviO1?ha zH?UEh-hu#mX3k?}`T@b0uqOF4uU}&hvx0Z3rZ}Y%nyopCNEE5w42c(LF{A#H`!W8s zM{6#*j#1q>kDUmrS(nybr*mNm;)$-)sUuNfLV);{oAR8<>2icq(?^kGKX5LudE{m# zu$9OPHBjIU%^BNKM$RGsVLxg4_J)OJVa?RGl}}`tHFI%rbz4H{7%18U70H$T#M`nW43s<7<~sTXXe1Xh z9=nSWLh(mS!xsq0=&)3h4m>o@I`Cc><9WVh(GkeyGfn^Hy~y)^B*^F>vbDeyV%r8b0JfRRyNT!r$Vre zMU3b8ezOUw=EiNtH zGDmHHmA#@DXWQ^j1&k877&J~1GQ#Bg9u1}q!_Gs)b5-&kdLGZQ2fvyyuq>1LHzEGBp$3%mGXl)Tld z@s9igm65+*`3zAnIAMu}ybnYMiSbI@K+TTOen<#Rr^dnJTt<$^?o|B5*%9Ml{uDrv zP`ZTgr_(y{_zgbUgG#OTVf(8z9?c*pw;`WFk200uzx2VU5lgba<9??C-;N$j&Cav* zApa5Bl4zq76?TEZZS_E8*L$^##T}Lpt^u6H)tHaq8JmZ20BO#TAfBL|%ftr?oc~rc zCrG&q)}VG3ckG>a z(PnHQQuh%MS8Jh<{-Jts=LPU3KX_w0;@$!%UTdbghKil|vQ*%!IRp}c7tUJYGof?Y zPy)=Lzf}XziNf1}(&BrF``_+mBIh>c1hqo09_yKMsiQ*F+)wThopta_$9Xvz`=|ox3na(=_1;n;BHTU_j}{xrQ%s-y+eZTC z;|q=;+~q?s=qRHf?^>-*tmA%*Up=ra5#m353y@r4S0fe)V_LGkNv)E>1xu>BQ3q{T zvXM}*aI*pERjtBz#xB?7z*?9_?tP}y<81cUJtP;VcNzIj92TtDTYbTK@4Bsx5vSsx z_#=mA?tUS3X~# zO#_M=o^n9>`bAu**TP|(6voEU*r?T|lG(b&eQWF(&}{-30gN-3T_FJG7wrRTj~&_E z{VFa!ViEX}&&LWoMh(al)&cFWGYZ#w->Bo#l_>nF5Y4vL1a~9f7P7pUoSJ)3DUp3) z3ooHWmK&fo1!^FRvVPc}Ll5{Yy!MTsgKFjNa@`M890PVQd^ZJt3Ko^%kzsYRWetreSIKqoS@O*H8`m|*o zD%+0Juh{d_j#+M!oo^@lR+S9O%vz^3@-k=JMO+6N~Wn1lX}R*-2~G zh19*hx>35VBrvs25EoUiQw{k-##6s#@Br%fzOx&VG0|@=AbHVHL@pY=Rq3;-C*1(- zd(}d$dIsfB?F0^lKgRRmx;5}qn}5`2N@6zdT=ZtU<1RLzcHw$+3=XOI)|5&0UA6WJ zk`r1s`>H>}^jhT^WmjjBDj*XKPM?ifz^$U%W=dc!hxn#%=!8BjoNNncB`S4A+*0s^ z_OcH80&emu!RQvqJfqe0QCuLuDP<9J9mfaOt8~?*ey>56nFvh z{un^6;No7LGm!0lTWvz?$X>?;K@xymFkK%Bk z+@k^0gq%T)c~6GJ{r!%B;ze)lAt=QHGYDc&k0>}YnG;1~ShrpS1A$Wi@-TA5p;yOJ zK2h~}?iUswFTWTk{Krzk9_-lU4-LWMu+`i}nPMH6IGU_mLd)kn9aN#%|Fa5<<;6<@ znFED#ybzdHvK`C5=G>|D)s|%t*UdB$96)=&i$E0;=SF?6eNV?7PUP2;wURWy*?Q{h z$*w8xqX^KtasPt<9QcKMvfcg89R#E{&zor`se>dzv+=iE|0oWho9#dUcKWqwm;gd*7amTR2$SK9v6`@hN0 zEm4P#Usv}l{k9_jC&^w)A2v^8aPCw&?V;qKWfH!N=ABrI2V?F;&ivQ9>i|I6pUUIy z=Awh_8R?@(l>0N~+0{f)BP4g4olMU_=GZJk%G`{sn>UPdb zy0=p2m=M}P6gJWLR-$grdnL9lX8~QqDEUUYxh4zDQFrqP!=J9!tgh!{hFie8jA<4i=C=A8szbP|@3?GR}5$lf#p^r96AU!KT&@C%cr2nz< zn`J4z#OrT|LxCW4+5@fq3Nl@)d1WZc@p2gXEI(9|q*k2sRkr6~+a?Y%VyfMUDyiBk zrk}4ykZ^_dNToL&nCtQJ^|jTOFYkI}78JYqpoPC$9GW9>9tMOUn8_5veegH`7~{rYUlgJtE{9zK-vsY-;d@Am2n435wV z45~qh@ZQ_atS)J9Irp;nE(I_si<<=zy^R$re0Ur;h89i{Rkyn4Q!jmcE%xQfbfgom z5NW2+(8YPo7(M=|TUyJKxz)L7QqstD+h5c#3}9b`@?t9wZ$79^tN(ztSHWg(9cuHy z#5C&#%ihy5=PRl*iq!T{5YoH>%M|?t9C;ZssX~qI>kBr@pQ*_@ul}0(m5O=y*LUsv za>Vs`v@|Xc)tRIfUVwf#!E3n-u)tQpfA>~0ci}43iVz)Q-_YgP#jx4%G>#^bb{kV7 z$^zApHm>MZ!*fsT8`gpC1KPM>+|PWkg1+XzKw>Me&$hwH5NharP_?i#x@dav?ezDR z!WBwa9alcfp^oQ3o?pE89kgh70(?Jh3?+7B!0CvVQH`!%`|B?aulTha)nZ zfyb5Z9>mAflsJfru-U`y`%KXe=dcuk3%)A-@~5@ zWcW0O-*#DDlO|_Ums5S%YDoe~K*A5b*)+;Q&CQ6SDIhM7rBUf!X)kpNAFl>DCk*j~ z+NTtkqQxLq8Ii-OrDBFz^c>^*bf}dnmjn<;!Y^S*zE~q*O`F)g@9krFC^~5T3`@aA zPkMV1x^Bz5oCMfa*i|8M@k6P}k+ zJ%jvel1uH{`<~c(d2W~i)WR0I$KH7`kEg2_@HL0=)^%N8@=9F@kZ6rC$q~B0KF$Zb z?yf5FVV70~TUQvxoWB^bu0RJ1>ao?GU6&l0tvtwv|4_<+h$KECv!>`G0jJu;LK9dj z{v*UwB}3BEJR%qax7hj&*0neheF++T=F;r0B|q5VoeE6Q-A^Uym@m0wG0AO#-DhRh1soeFr2_mppfEOrFHp!zdz;iux z4GdL81jM;1RwEc(15MApP;#wCDwOO+#*ov^@5^SfL?he~pGmZ6>4q_azgVGTf84U6 zH-M))ReIxuO0$2N7x+tjpKV|!L1P3Q}8jQN)69ti4b>*Ri*OcQ$tQ(7CURM~8x6eo)|qhUQ3n$k%x z?&YW$)Nj9DdH7PH_tW5y-1rviGhJw%%(zB@RAbbzU+Nj@IMUqbO7?;wbB`OCc3hDe zz32(HI&pu_DVNt#ieEGqr=uo@^PMv=pe~r<$BC(ii)eihW|?QhWuWGhR!iubZVGFv zEAxz5K$a+VYK6bd%m{Un0z5&s_q-*6pY>A_Y(V*!m^1wkF)gTzlB=tVG#G*5#5BFo z?z4Ev6^g+$rMRi*Oyyre>ulN*Y!5%sKi}nXYrUB78c9RfzkFdp%;v4OORYV6c#PgP z%czyyG+x}+SI9o(CR&WyVZIu&I!03-u8FmobN>g0`v~eJhI=0pf>;06sV}^nMxHxn zz|$z3NIxl%BMb7O&BzqU6v*b7wf;Weig0m&$h~;@h5=EPM9Z%o*-EGE;=@2PqO!#1 z_@peVuvrgX%?)D`4?WcODe%YgLlae^(%-OuhGq}Dm=QT%g}P+V8>Q)Zl%|dLq#6r) z1S9bjD-i%gI%&*g@0bm(YdM*oadgF~JMz-i#6zTJUZnCUUzovuKaF#U^u{1`pcq)i zQ=w~ZPD*-lUy!+u*-Zvs(wn5DdIBZjpb^ZH?RTpt9meO6)S+%qjWelLumH*2ulK*P zn2fFj&)dIsdE$#-y!F2V)!#VB3)bZq9wvMbbWVS536$Kw-w^kKIi%vV$JpU^Ae~O4OfnQFfhUWxet1+CI+7 zHX`P?bO`)+zlgxvCL~kv-TuXD=GltgH4ItGz*|)OOgh_hzWgG6IzL(|KlodC*98GV zu}%AO+h3az5{WP7U}4-(i)Ov%=&sYP?IPlr-rf;q6(5gDQTI+}BYgSD1E9plMq*FW z!7OQ7JpBwz(}To$FV9(aY>|ShPP)AI_VYos&)nQzr4J0>euL=WhaR3`lR)p zM!OM#thRx-s8{6!A4ZFzzzXeaj=XWVVPj?|Sw4nE=G_AW0RAe`rPe<|o~7csQ!JiW z+H3dK{}?LnrDuEKLtsX-ITsoO?O@+#H5TnG2E)BXXnQBHyZb@#q?*)WsgyJ^^2B-+ zwe_1YCr|TS_Xmh0kBuxlJ#{uS42yR|M`KV%Do`P(Co9Uh^Qz@%A?@U(QthiXl?Da6 zz5FQ>c)F6*;B6rU%&3@8H{r1t=}*t?ABtp!rgK$Su7IttqT-%dS1_aMpVHZ3kmcYwhf4G+i|H2FM~!EezNY=$IWWrfA0jHo{}{mN`ir7gI+`# zRWqq%`M3^uXGx@Lb|Jks9Pv!sTQNIBc2>n`r3dgc%@T*tYknF1-5(sk0X5&7ozr)R zPcj7di84A=0o?C+R-B;{W!O>t5KJLDZ-)RWD|2m-+NbOnB88Fsc#+RjgpB>7UV=&A zUW-zOku?wwf@TJ4Ob0A-SWBs?fpgUWzG(mZ;Y_cg7F3K}a&*rAXWcaKQ`Z}BJum-=t4fV_Z zpp9iJe-(y?+RLh5D6ttS-2b@qL<%uoDZ>)Ks?O-Nm%iMMql>B_GJ<;d*WroB>l_y- z+j6>u^@`!}9bRT+gN9GSXhH&%&QMFS5&xa@5F`Sb;oW`4tNDLdq>0A&0d z5-*y4cWP%;HwqO|66{vIYbF$atjR^$Py}DR*MhFf*yqdP#~wV(u7k*@;DY**cI)F! zlhxy<1+wgSHhJ53C)J?ueX^X2u31Vrx1Y69mP1xRl`12$K zrjT@ShI#;70v6f@+{hu)b*?dkhrk>;4LQZGR!&^`nh$kIGrf6y)1G_VK^B;`AjlqV z0bY2(!SxprVw*4N(=m39Q87fuXCAI@I*HL>seS z*AmQA3|{^`m`=zFqEaxgOo^pT9B0AyQWSHgZtxxW<@!)|4qxGBd8RfxQD(%nJ;o;N zB-!1Uv}Jn`epPS8%C0Pu{vfHd^vN(o(R#L~I?U&8sm8$oZHZux(vf{xiLWJFs1+#F zZxB|_9FfQD=qLBB(dSY-oauLW585aG5uyn1NR#9`>oBbHtRmCpLM1Q$ zp@+)_0#Qz*Z*J^RauXSWt(9Fioy~8l2WHY{(W>fzl`o zoi_RSy0&Lb9+)8tPTI-LS~uxI`I^bu7WC=!k&TuyZIA!j_%#W&cR6_K5JR-`t|Cyo zcwDm7r-$k?)6F3gyLh4eFEBmahtgJc3Ox=3QJ7ca$QS7ZR<62(6e;Dv;ZN#;*y^Fs zvtLIXSSTRr1T(gqgJzru@z8FXbgSZJfM46_OBOhxj$3Jtk(t{78q-DT_R`O2@q7&< z`oj1}k*ou1YGSbU91MKz7*C0U@QvQtOW?&5jvmfRG>dABbko>9UMHu$UpfzhX&^F% zX*s*Wv-fp7kXD+5rS6sKh<68HioBt^?Mm2kGw;~kJC_D4pM#b1yy^fy2~xC={9=!aU$UktTz+OqtcJ^ z-ku0kKN0}KD=4Gnut$& zWGa!>_E0QPGo0v0Z41kFx0UHAInK~KyLtZs}5de#A4Am*PO zO!x*g^)(z`v}n*&%ra*NOr+P9eDw&eHYwzRED(XSZ!*`>K5qt;NeDsFNh{lW^qvZDTwif6tVt73)F~Raans)z0{0XK{+0 zEon+(WLTPK_JC81niPNemVDAIw@p6r+ON0UoJax{6ixI?2jFh!r^3)FxOir%jyAJ& zW9sq1STfg-<4yB{`b{_VJttgpFv+T8%_t=5f*BDuweL7YUV>J0a>Do1i&9dFSJfuq zp8!x{HKt`t&@Q{rBqQRqgeAetLn&u`=c6enWP$-kV;P@9zC`aMCL`NyoE*+!kc0)Y z(SO1bFQFRri`==Yts=**7)1f?!C}?YaHoj;=Keg#wBCR{bwOb+$^18nA%2?=)|F`c zGcwlV_5lH45;k871f)MFw@>>%6db+>>=1Kz_h3`2ITy@DK z%Dv#!kOGY zkv>epB)MS%-hy(9+l1!5I#-HHvyvNW_iF_U+8FH@^^PZ>_#+*umSkNyBg#&H0-D*8 zQn4UJvQ1Y;?-1sXV1+1dDiZK@o3Z0>QInbc5ad_Bky6$E7K1Oxbxa62jD8@%giFB{ z;-`vqQlq8H)z=tKICgg=5O1aXTq2rk4HGqv4&;_-#{%8e%dmm-ELdC4HFKR5~8-NJ{I>Rl_Z#=8)PL#^(K zLY6R+zpMbpDE3l#WiYM|cs7SYtC+T^j{lM9#NZkm z#cVq}O2vk9jGK)^)-va?xnAzoDHD$TzO8Zvrz_*qYR#eNr?YAAOmvL#+nru@s38K-o9LtkYP zL#;3KV8u>QxZWx3l1H?kO@k0&dF(YUPw^V)2k5>8Wlxw(KcZU!A`w&>#=VOmvA}0j z#WffzQ$;6bxmK6&YD_2|G(BV}P~&11;7o7x7&M>&6?p$bQJE^4pnh~!!8ZEwV}x~ssdfRzd=a(WqA+tP~!r+7zz-+ z9SX;OAhg?DjI=+vPvJ^^f-^+0xx~PXCJd*i;UDRUO{*<^>5W?I8nVBxiGDY~0MY-3o+tB&9k1VGO#=@14 z{>S*rlf?@fJoM(6IucboAPEPKX5hHPb|s}Gn>X1;dl!>a9CWI|ab5?6x{rL0E4@em z3sq3yf)@Gh+}b==;b-KUU6PY2?&wH^5n97|9cZr=}-E5(WEOl(9G?lFss38WL2 z%F`lE`^-jgSV7b9AIQ!_w>oFTG8pq!F7({irgy)|&Z;To^I)Uw{v}^(z{0@Qp!X4? zcBhS-`Dq5yUL!d7MNHEq5%s}% z*yI#U>?m-mP#X-WyNJvAkmxB;phY$lvBkM*uQwfsBwx(@23mlqX2R6f>RHkm^WH4P<)p%mSH7ga*xUP!fvjyj4N5?loT5kg?)0?E?X zsOBdU%gOsFRGNH*H#6y)i75&Gtvu%UD$voc)hw(?$a7T!+RGCaBa0F|A^-UKc5Et$ z=?6DC9k5lMFB?*Twukr3G+KmdF}+Fhp|Fn&irqx%Ao^C#a3JvWD~cHtT9ldk(mIaw z2i@%ByC|BbSq=vI)z#$?nc9!ggmbhpRyE=Yt?L~KO@^uf8i)+8HlU-K7yvsEVd4~2 z$kRK=yF|>EB88TEpGFL{6o*SYRT+I1sBGO_UFcXLj?N5;-0`8#7-KxLJ? zyWnmD!y>n>hM-fY(hbYnQ!Z)cW1Quym`p4jvbqF_908Dv_Hd!>Ddy?4TGf1@>+PRd>#^RA(?D8 zSaN%)MOroEr_3MXS8#UGL~SSFli>2SZftL1WU9_@1O1g$&7_S$pF7YRD{b_}5*K!NO0%Qi*P_AL3l*Y)XPQz#HYZ3^9X4w`bE%5QjO;q z&MEZ``*2A8`I8uC006zuc~q@oLj~HNNEVjg#b3gS--NpS{!OwbfF!r49KYstPq;Dg zVL7}Y`SbZ86&;qs)?ilU@znD^B5r%Ei zRPa}EW?waR2eYBylEH59`VTCyVMolkGcu(XWs+5&#e5NXxRiMFZN}HoWQSX05z-33 z4Od$7 zO~-{R%h`5Ln%dO|1jjlLsc2XdV;@^aO$qT6&fZBI(7cbyJ|wUGfcynx*5=tAr(Flx z^0jOY%KqW8d(h~?lUL&EuiEH|`yVyD3mBd(6k+KTpOO&3IJ z|C1spop4i(#i0cv62p>!s6aY!%*SjVz$?$qNWIav$RQ~P<`zT2NOjEx9|$T$N`*>e z)IyGpgmm5k{tx>+(105C3lVX~$gJ_l)pvSp|+p zsU?{Or}Y3&xG_g2XtPmObPJT++=e@p5VN^dbcV1}!$J~D;^gLvwl!?vy1DzL%E1~` zO8qZ!^QD2yWV50zV3kz>6_eul!PJZNl-NIzdd}uHg`xT3u(+zy=;Y79+PpHa_&6gz zC!b{2yS<@uXo0V5+flxfU|M;YGs$;?7d>u2(YgzRp!Gc5{(F}TA2Ss!e`VG^{cc(` z7$0|Itw^Vhtr+tfd`&>zjP`Ct-=%)Ce%k+N669zk;WwZ0NiI)zL&AS+vpn3O~ie{06aO|YrmP>~-t@w{k@zq54&o3!Z*IN6u#`{W z;W-|lh&rh}l(4FdQOX-n`-NEZey(DYT?MIVTMXLoxKX1lT+X+d>58uEVgWtO`5>+a zC-HvoSWcd=EeMBGmSzCllSbD8s z1&_7ZU_kIVvKQeo-E<#!h`jp6Z)OU0edbreRb#M%UoXb%)~KpKXFkI@EbyO*g6m=cr4HwLkc*TVN|aGz7O z67bKLGW~bW{j~6hmWk(udE$mZ*&G&K{){|m{xrA4YxxX)l#yzP&0d*!5GgQ%ag)7dX=|*LVivR zgG|8-DWKa;>|IjI?Y$EIY!1|Pt1wK5Hkd`9Z4kPsZgtGv1>!D^WGiFdH(syG&g~AB zu9iG?`8UW&JIDOurH0XgQ?2DN)Wt5!P2MIoK{R5;NePd z0D)U;n4FR>QEw*5S~a_DCP}UZclPv=Ladi57K~~Iuc^j#Ogk7wDcnsi(wJt2K7k*J z!ImB!a2nDuVq~YgDJvGLx>rI6?aYTe+=_sDQzG_CTXF7h9RjziCFED=^z9jNkIm&q*Bo_jFID@;22T|o4FmAlLbb4RC z-@XvG96(p+!7shgFHP!dw3Z|fZrdIgL)`i?svUK$`0I@hJj38pVtqgfEl~DDcOLQE ze~s+`+R2>x?$gq1sAE-yG8w|~?;$!%+ZZ$TPZ3@idtJx)h31-bq$3$PBXDuz1O}BG$)wUqStjj% z=kI_J#17?S5kWsx%7}7S*07x`-d7o{Z{8Fv++8-1SE+de}XvUw|uaPu_lUWtnYJKcp%?&v++W^i`gVlGy|7U&o!sw*n?LG~ZnyB;GiKJNNX*F;gvtV|iGOl@eaowagzd#eQ2NKey;Irs z2UH7UFTh$Yy#udPN{SyHhu(FihZ%VQU@$F$SWdbbWerv#0*5%SYxs2(5ablLeqjVS zAnre7O>XOcBW9b|-M>l!HmO(C>{RWw`Wr%xPJjQqq<}?Hx;eVl*y^RN6mTCe4bdH( zoV-Y82(!*Ip(Uxha|51zNCYM8k5(-a20BR4m7%;Mg(0~v<e2tsyC6zQO%x^PL@QkBj#F-~iBp8pd`A zNbAX(bNY!gKsb&~XL`nMyrM7hBnua;9koq38*Ae&-#^#$XmrMuu9 zven!v(s)V%ne-U`)@a*S2PNr zXWNX2HzbGQwBVU<`lA%M7vEj+3IOVEa@JbPV;WVfA1@05HmW|zxeQfyxxi~!jj3LE z3cmDC;C2r?q;sV^a|l}Mj@uyq)fQ@Wee)+3=>Ycb-TbPBvA#X7J}=*5l5VtyXlyji zn~BQbwJ9(@vCht5P2R#c$puwN9b?YF8`3HI?4l>Q_!rT&MIXtYYM&sLGp_J6qOWMU zk>79C3=sdF^pyS%TvNx-Q$4#{{*lIFmn$4nVWU;PhWXs#*wr+>Y(fZaZ3C`~K@aLd z{hmDxPd=O>wZ32x$DtX_KOzW#AZn~~Y`q}Z#O~i4rtpunuJ~WogUC33AbhLyZAG+w zV$e>HdAGqUdz3fV@1Ci8d}z#ehDXcCqO;YInREMv7cs{ADywFYc*`%r7vHg-O4a)B z!i>@)(GBP>oa@*M!R-e@fcy?+9uzzL8QSZ3Uq4&neU9*{zz}DV^4P z7L|@sD~*A5ykT?!i^ph``)!_H()Fiqk@T9h z{47fV5*8s~HA-?tI-#gi>^Z+Np4zEv3ui{D#ZL6Bg+1)DhANGZP=^o8p6$~KpHHe^ zrLHqcd~Z_Mi%tY?|2?CFno~!dubYh(wy%Tq4VFt5HLZGvZ1maQJpy+{{Tbmp>=2Ap zT^nEmKFI$UYw{Q5S9usGgFtsz1vnja@yXko=rJJ|!QBl&3AZRPkjDmCMxiADTnP=m zB#0N2O)Hb^a-gFv8@GV^yzomEJ)@lRR<5O`I4f6`?dfq*x>9EnwrxC<1Ay;}!$6<@ z9h3%|893~M-MhBcbtBzZzcoH7D!bmf>zgZ%d0~Y3f+@H5zv&FLx&#I%BpJz`YsY#L zz&V8)523nTHp~VVPYPJ>+h|(!H9cVM&6=9~u;q1Y_*ZT#LT$tLtD zh^hYFrbBkyD@hkOu8&s1IV^hp=CPm@I-OHB{od(FzC1L$&+W5%;AY-8f(1)H09X2a zX|R-KV)~@zC0U;TLc0@Hl5l0{Nk_M_+})C zf^7Q}JMbj^)9|Gdo*CV~YIqQNu@wb?^*OqcB z>8%!NK}8G-=3X6GW2Ofr>Y#8DkzV`W9n5fP9;;(LIT;pnC>&J%P@_q_Ypz8dgNj`j zXNIpedhT%HnkEVG>BmpGED9EVx?^gWL~$bu5q_-mpMSZN5{ML`lB(#-%|RDHr)hO6 zTGM}X-`1;QtDcDgW7Yt1Z>v|S0l7DW4}3PW?<5O@OcY1P%08&M+#WH&Y_I@+y@RBI zUVj(Fi$+ z55(-nLfltN2WrTz<1P;ezu>Q&edK_8-%*VIf~9jVy51|)Mc)WeDTx-*ddAeuf>%0s z^2x1hblb__)9NARNxY&b_|e>C(Z{EA3@Yl&YlnFaEL zZ+%T6Cr`}pcg$N# z&9g0}%aQ+R^ZW@%Z}TTX3RLn!esd=Sei`& zYm7tyyN?!!9LK}Keif$d-NpBUxub<3Wz7sYw-fy*vj2Oyw6Cau zrn&rG)*2z}Z|z$~b_WkvZh$q6b2u$*194~kJ^DcO(uJKWck+&X-F$Rm_TJ0S0m@I9 zuk(054$(ABPw1DKCtWM`ti&un?y}Pf-N9y92O*AQJ2){~n9mRX4aHL0@bPW&!L`T3 z@yiI}RQlYM-*xHn!E@IkFjMHrtu#LA^0E?a+gBa!K+<|K2GO-7`G@`B$!_>YyXsB1 zz7|YKpX9NyMBKbkFYu!Xdfn;Za@}|Ln)DgOGb!IE_`#e* z@vA82)t6aY9w4_o2||h^^SQYqQ3n<77NKFybt ziNR?Zvg`t+EPP1({ic=y5<~d#BalBkQbSsY7Z#x~djT^L8p9K!z_D&dm($ zw#^(@<)RT^x|nw*&S{RG%?6rv*$z=v773NiWm1rMR2fISQk-J+R0G2~ROXHi2=z4EH(jEaE;54leY*^Cc%J@&MlVT7? zr%EvU`tu1{4G!UR{OQ`}q5L);d0p#X#XyA8tld_2xfo`?{dj0NdnvMZYQNGT+kVVX zNrwdFJ}ck!PpHC=&24-&=m)ZHmfLfWuAUP!qtA%}${g^KOX={dlEPsa8^ckQ^N|@Y z|6x|^07w7Zw7E7Q3#30#z?9je12y6gJ--vD%4L;eBvw25fCz5S*59_Cl7y8?wES+x zaU1w42>hHWT4%&a8T*kT$^)3S$p0Io=agVZ4HJCITne#af~CU+4c;%}&AE z-{;1G1dQ`alEMq+yUhNh2%N~c?&M~8>a6jF0)*iH=$`-RAOTvbpNkzC_1%uW^ z={Rv}%PLf@BmO?hdGpc7{rDRdsVG5>g*BUax7_tH4vNTd4!k@MkObt=v0I|}J0bACOa_d_?J zNPsW5vULGJYddMTol1+1!><#2N0e{S@6`wcc^pT2)ut#!@_l8N6yWA?R;ho$U3x*G z3`8OGmZ`7p41ZXv0m}@e)Ksp1PUF6McOCJaA^GN4(a`ags1g96-ge;h-Fv3&3gpQR zVP(OdG#?~yLHGMW9-1FCAR_;6=lxqMF9)M09Z2D@IV}>6)#Xu|nk9eFF7%Zx?86#zfO0Vnd}yma@|UQYn0%Vqbb2VU72#}zvdX&J(B#_cbyO&_*Vprx z-B@9G00_X`B$^m!;|wZ|o`Pwu)Tey~BR_njii;zDtu&jX=rDHGm8lK@VO?^#b#>ck z=x^v*rdVPXY_Aq}7p3Ec!=-;mM;RKIYet<3x@&3CA)@oYb4y24gxXn!wGc^ zp5W`2hO~L3raLtHV5*y?;>d_=ZEXhsJ;-ECz#*B7 zGBOYG7i{Lhy`qjeS}Zm%>mOeRTc4xyhIURa-T8M{qa1H!Tp3n%iEX-kiT(hzZpvhlG@*N@DntHkYSaGEF)j_;2j+cE;JbLi?M_&`Exb*+TWC`cOttU^VN} zo$7+m{2;~Q6D@tG;EL?Cp6y>wUkNNrZwijwX{|$Q-);jzoNdLk{h!^Aa+_CLNqg7D zDRiX71-r@Q1^ul{GxZO&F68eOGZZg^GMxT60Q-XjAOt26KQb$NhoDlX` zcn3=XH-X7UP_YZU5$wF6^=p&au&r6q3K2_#I<|X_-_^$k{vwQPD9uFRp|c>YM2@^8 z?a~!%in2%*7>+Y9#p3fP@VWylVtVpsfDM~7&H~UjGwd3wE27htaDLQi9O&F`+@vVQ zKAP2Hp;-jNB}yvd>{Wp%(xc!6-4}%_It~DCfDf910I@Zx#nmY`vR+bp(VTxKp?sD( z5a6#fr^#0*f}rFd8l6m?b1E%*_%F$~;BX-vD#t;1&}6dp^PV<^6T{7)HvFN#7Jkn| z1y;QsD3aMU@S48l6+%vs2osBKNUCq5alG{N+%QEh)M##mTeH;E9X%Jf=XJoPiv#c9 z9=p$8r78HjC)_PmWrNW1*x-3xX#KQHVn$aAZQ-E-$?Z=nLHKw|Q0A8TqthPeXX~ZO ziIvFx+W?#IuN_>`AE`w7mv`c22#%&D2#SN=G9>m^O;z8VK5aR=5Q@LhzQgJo-Kj}d zc%-D?Pno@g8p!9=#zd;tyyAZs5gBv|vVj)(27`MgxWxy!(mbl z`@5#Bi{W2LDV;<)F^Ww1q4%;&0vB$v|HKBUgi=nHaW?!n?wrBgXu{_dx?$(T2Ep{K z!jk+6y9D|!6})LPcu&0G3m6a2ZUjd&UDVPf)UD@049Bl=v+r@Sf1eDvn&(lR|Y_$p-MawxF25Sd7NXv zqBybU+=gkhOvp55?+q~F2~5F#=1(2sl!uK4YFf4j#IC!n)D%SdYYkaHc(v}V)g@}F zl5Bex%5Ol+XRmIi2nPbBg_z5YVnp@o%~f#iH1_%(zMfIps=IA8waf* z$RiqiXpo zTjn?S+6T^Difn_H=85{KiHB(Y8CF&M(2_&<@vy)my1<7-`{QdNT+0uHl0Mqr+ntus zFI)<}&6WZz5vtvQf4gxu&b#`J+1Z%oPIO$h0T)=xE=0mD}_bld=PMs zWd+Hp1a`84ay(|m-T6>$;0!JbbJ0_zY*;*wL+AN{sIfMF!V!AkJD10TG_={MAzj)! zX;oGbh)53+YXOAYdGZtoXy@3PeOeOO;hw_#FQ{6zjvkhmi=nR-dYOJ!o$z4KpnflW zmmlR15B%4aPQ<5Fx7jVTi{&qs^B0dfPslxb<*CwQ=f{$MyUnEx_<^zfcT+Tst&(TSYOXPXCf!&Na@azc^e_U+Nu-04jKib=z`)!YpVFJHy%k7bHlBk8|8qPPY-j!5`(*_B%v44Gc@ z&gksi8TZ(>%=ZUOV88v42-H>_aC+Oc4R=$iei{4imT2_B;+8B5F|3^arv8x`Ektls zz_a9lnRnKcYr3fsQP57_*8rwofnx*fX+y%l?HhYD*00xmr^2E+v1EPn1vrQ9RG_wc zwHVI^DhuTBtYgUEX2?bBk*P>W*_neps+3tadG2tD zT)NYk-D%(=EJvhg3%`x*&V8Z)W~0G(uDay+7HNgq*l=eP?-LIgpv6`VK!vcB{KBOH zUSO&%z3KIhEK@r__|AoT9yff{f_DEiZ zZ#u`%*^x?0;&=sIqgrVoT~Jlc40AtE4kBV;bja#Q#@}%v8FhrOPZr~VzRGxrCq>os zLr{u^nBgCD`CsS%W|WH-_Frv>F=PS=Y?}sm40sC>@T%g3YcQ4Wb|OK}oHon?F_ZP?u4n#T&H>J z_<6V?tJ)M$cxxdDIN0cpauuwcAC%dTJiJ-T3IKS|!(#=|p z@^ZRXW&x3bx$>d(+^Ad7zYOI61(0I*^rhL=5if`27`(t`&c1}A_Sw>;)NHI^2O^g9 zkS&?DYWi# z`@~2Ex(oa#9PM8!_e{il++Z;1AgcO8=HjXJ*|p!T(WL3*?^cc??2Hc`hj#$C<- z$`=yt7SPQ{7~hwvYWNV?HYyhn(=gJsIGh}T+8~t)bHl1yje??`pU?=HtXi_`CKv;% zqK;&2>kLlP?{X*>!CT%UHwp7~_6#gK%1VjK%2;7XD&Mm#{C(?H@#nFtKcLnj(h#~P zz|zuh-rSi(&U)!K%ATaaypTC1tOS~W4`8dfjU4*^7PwIGjUjAb@9-p-vsEK#sgbf; z!a-$!x|1g=tx{yX}FuHw}O=FB49TdwKp>>n3CEaMIJyuI@6|A-* zr624=3l&KB@HUN2+KdNqzwM8^Lu*!`0&St)PQu_dX?Kb~-@?!jILAIXCCkrb*a#;O z`Jxt_BZ#4koE{5n{4hrVh&$gW8LK_3MX&A?3niDCVrY`R3!Az*SYAKb2Jwl(#i`VO zvkZgM!8pTTU!)OcDz%khjqbjIQ+h>_zPXuP7VQ>W;DR;FvNAn=yFDP5mz=z%JP1n- z3YVn}OV$rVlHR(BR0()Kx8KD76?mi<@=-DwOo|E8+qt^uiGye#Pc5y&K(aw*+- zHc&Yslc?pATxT~izirvSnLjTqXZ`FVeK;NXEZ_Btt4Afyp(?|n7 zq*ye8`X@#Um1&1f5$b$R*vHw?9SGXp8blk&5S~FT%!a<&bE>7AevhDYSu-&${N{7`q4j5#$=L%Ku9>RvkMd`CDvRueW%UP7B?n|=tGf_0u_Xc%k{7)DBtUx8 z#PcRbR+)9X8`pv+96KbDEMg{vy&zy9{lmmi_1^PVUmKurRTg4q$-&iEmbi2F|MHY+ zgOc#P@A!hN-o(Ft2bwac7P^=fkr|TkivNfHcO|)SBk%`{gJCuc-#EGdqxbT{ePX~2 z9h@g=o!qAyEjNVx7&S%6p~9MR=DlCN9EGF?+Wt8E5Khl!GG*RUSn9Y9C@co4wy z%O&!r#*>hKM7AWjtRNhmF{S=1vInQdR_!qO!46$Nb;`O1L#+gu&$K+$P~6ad27hSq zW!aU^GLTqdMzW9&$_ns|h&~m4GzA`hp@J31;>(Y;Oshk*Wd{ z$1y|>=OBk2wqg9JawcnnW`4cz4V22pH>lElBS!%|^ZSSo_5JJncJTp)nJ(2=VDggt zX~|^7HT2|_a{qJDt8uWTYeS}ej9IX|9~!_tJJ1MSje(`f25`f zAYK1qaWG&gIoBi)y{VGK->{Yo>zB6y)Kw8Kqw5ljLi6d6{Q8o44l|*33&CzKW{AN<(XwlPD%MP zMXoIQ6lhkoFnuR{2SDK`2RvvOEG~R6{{u8|LdfZ$qTog!i{2vHP5WfzCn>1rOImH4 zvRbX!gJ&SfNEvj7XBSNK(0}#5`tDIUo_rj)ZuMu)g%y!lTz6nc>U#N^eJhPhEriqo z8wCNORa0R-7Hyrf0C?ReM~XgckF%(FgyiiPpQFpv8G-_Ejl$pJcaCrUY=lM;xU0dd zJ0U1up!nTij~k{pY`Muq`Q5djQ^u&{nSXCCJ~Ju;ldNFyzqe|nF1$POt108Q;2vnZ z7Bp& z)x$miIs#RZj02!Eq-!bYgiX@*ex4{cbj5p{$_C-aqK2W3))1uQB=KrQQJsHN&LCjN z=R4M{sS}{2Di(g|+FqU_kXlU+^?Br)0>h6LDkQ^m%`_t^zPbrfvr6xqZaia=KGhp! z;48nSJZP3Ef42xsr25m8qdXx(z+FgnudW2IUe}Kt$mV;1^S;fFwgBHFN0rv*GGM)X z3O-(74?%--TlTMmL8TdWnF=TAMBUJ1XH2SB979IX;!9AKx|$W_h}{cp&6GR&JA`2? zQxnk^)0Gm(fkCpkXYWf7n|9xQ>q!`=!JNnn_1ogpi{qC!F8%mEkWIQaT8oRS;VjAHZcS0JmAv%Kfn>9by4C=SW>f71(rzGKiGtVcksi*4dRY^qoW_&65z z$xp?noa|Qy^qRWc_&VZ*Kb66WV>^^G1dANeu9XjKjt%Z!Z+(0DgCn03h9TeGPKC^l ztEm--j=?9wjP3Al=8ld=pQTE){pMeQY}Wrl^p0J)ztaN}*WblElKjd5Q0J5VVMfD? zrn>9yL}{#)AN5?sSnT-srfZ9=yVO>n39d=yW4}nl-GkmJnT7FgIQrq=R$~tRk=l+Y zpDDjMPt=EG;i!78n}s_rg?J^FM}N6&xUO)6{3!c>V&tU{gwd%(MW4jL+`(S8X4t7)g%g*MRPgBkD)QvbH#Z zuT_?ub9BW?5wP?ay>APD(&@f<+j`Y`5LajPO~UdUkqHL>@ackx*hN zVON|OKP+I{C8F;~qMQquEdsaxGW{<2VkrpHpjlpakhvcVGD=k{bDMZcDtya$8wlp3rd4Aj z(u1^TV%E{YyZL0OFnyvRglL09C~s_e$@w9%jGguWX&OM4Ge!Q#Jk7n`;`gr8lJ*2N zjw#+pphGv=7joW+9V9K=Z{W^(BqOpxZQ+)^m7YK0A=U9LCK(ber9jY)yIX;{DRe>* z`!WwjKQy|3ppQ1o0^klhf@B_REs&wlhN_nqg`KIt7g`Q`I&raok>4@o0!6cuTLkpb zTvV&a7gj<3QYWz}oQg8`8w=l3d|hc5`gJ5;>%|M%#$}>o8(sqI5$?II7&`>wDb!C3S6?*W%5O z7LK1AI5+X@RV72hKw+J!1kY%w_{<3eI*qd|()LkdiJGgzv|}(( z#sM@f(m28LEPHLeqta$^M>bD~sOrLm0zjL=1o5ss`jTys2?Aa~0^WCP@8Zy-a~#uk zhHf&TJSEHL8mxV6U4dtT;x*_a!P_XGIh+%~t0{sLtz~i5d(}mzl*%1;nY+z^a3JRxE@K>vl)T_PD+Nlt|k2NW?!8VgoyHsZ{yT%+kEjQ9UsZ5yDm({w zl##<17IWDrEB3q1?CE(3RDzQ}r)XwF z*iV|&)ETXRm{L#vT>ot;ypbsE?K=B8z~kTy1hv)@+^$qYjl-Wc^yPZKlB1MDDH$zA zB_3QM$I`PkQgg2?R*FHl5!!Wlxvd;YKHEuZMWsMeKXX&@hgt)Wai8yMl~&*!c3Vdd zaL>P-ai+x5;>t0KqeRgIYLf=Ghz2XwLVOa}3c9c_3LXhN1&dYfbmjIA;Md7qklmO( z#JV{G3J$=%#g|9_XLOOSCGO5COy9F7iA?n5^zhEOD~*IkDf`itWz$8N{D~_$VNi4| z!7w0yMOLQpDx43g#qe&9$w<~=)IXMa#d4VbNcgeO2HG}<0g6>RrWu=wj5NLlkPZ20 z$*z=vE=MK-g$9?hp4?%(^@RwHD2HpeC6Y~37jJ*SLezNd-c?*C#a#oV_H{cq6x4>P z!AT_hvHSOlO}L%PZ4CI& zRlnK1&UK^n%D=A$$82v@WN#=(U)koCJqxr6phfgzQj)~45q0Jw-c})W(Z@irGrmhs zuX`mdt_dkN`V_YWCzJezD{aeSynegXcetBa6n7}KytXZ6`$)-pr6RY~;o?pJrf`@5 za+EW;mXzS-{R;jw^GDlH!OLwBx%u~s0_x>~qDq;j*qI}@JxmxFQkW{rOT|g|E55?l zju@nCuh6{#Pc>kLE zJRRa0_b@}I-(0ORbID8xA#U6lJAp3g_$RR@WDvt?)gN?l_Mgtxk}bdKTit+~Bu*hB zPL9yn=bA1oF|V0?$;~uP!kHk^)HQrK069KYT4-XCiN_?<)ad5&#H&=g-s$FPAP)J} zPN3vBy7`t3wjb=p6=G{MTUGP)I**S97_v#i#Rw~q`Hg{ir3NGbcPf4!6}fN& z`UQkx*;UBcV9>YcuMCC}(J`D;P6%~7 z1%Cz3ZM0T$&x@p`Ef6jTSSj(Vh6 z7KW8P+}4Nlo%;*6Z~{(O+|DH)C4mJ>AXJ0Sj-}KminhuuKj|YX32+gZB~AK6n+a6h z)C|Pyh!o`V@=K+-xy0YRNdat}KsEyF!_n8Z^m~cT~0#^6PHI ztuq;60$}uPa+%7)ess3G8Tnh0^Xx%%J=PQP*)ehhM zi%ij+C$3JoS?h1Ph6HKWhy};VmlP;qse2I`Q>E5A<^J4b?5CW%n#bfT!ONGNBpMGU z7K&r7u|rL<+1gF^6NPHDt_&c6UAfC*5D9eVWt~TCp+h}-R1^)zjaqC z832w8jq_97*twW+V$sXcStY;D{X9~a@uPgi;cqBMi3g5W8PpIU@O1zFf*`-g4!A3( z@l*%=&ZaiM``7UI&Gop@<8U8C&WAGE?%|>CH#G#pwEaUV^$DpCw~8|@*kB4VRVOqL zNXP5hz`ReLrY>xDwUI%T)uip+_9zO}_ep=oWuTz~46(4)q<)7CZj#{giNDF_AgocG zLb_>wHmwM?#y8VwH31eS?##3U5OZ}?5 zr`MQlpfPM1njbkUghKRbcYD6$>IZ*s0wE1Rh-S`vP)Y*;ovBiWo2Xv7youg`^M&gG ztX(2%#bU5@D`vwv5nR$$rXCiu--_gBR}AmZK#J3#Z8&nuRF?uQo(%D&n^F#^Rqn6y z-Cn7>qAY$=t{=VEI&%z8zlDFA#Guej$Sb}6gj+VC$zcOZEFd6}B*f3vR$nnSj!56n zR(2y@1j2%YxI*$;Hjiy^`?k z)|hWh?1o16iT|OL-zXv7>o+;RqBtX}7x+hjJmX3op8wG9GhW2_;Zh9Qm{aA|Us5yj zhK_Uav^pg4S(4w$LrdK)&&k!dUSu$_>8F`WYr$`pqq&*eMcIA-D*%0ia-tvyf4pP? z1VVN12HtW4b5}csH@|AY|wN2I%lwr1XjJX}_6V#If82*G*k+hcMywso`3l1kHDqB?S_@V&@=6&pCvmdis>OI7wrAgcg)e!pQy{=gW)K#ZdS^j zYwCt5_fOH*8+%QSy2sZh` z17nwr%EtlQJkIz|^fmxX1Sy~)qnk*;hjKA1fD*giJH@C`G+s>&q>bZ!J?HS_Lz?Wr zT9rSq5xd5nf;$1gu{&mVd@ovrK|M(SzSxYvZyC*GV-TtB01%zQ)4O8N3^1y;ov?~q zakR@f#d0Bo=oO~ME5{=qT*_aS^ev2utj1bQ9Gt4R13rRu@yp>9<6^R(#aBuEoWHpi zP^20+sZD>`08hmYvmD!K?>gfA@$)$^*#Bp;zBVNre3Z3>{9F$daGlx2hw7^ifP zRXiE`<0~6xnu_8QlW3tiT)bmHuoBPC9*=bwVG)XAE|@uHP9minMC4Px($bCfCM8Qm zkswZ6KR^UkIA?6k$9b-hi=TO$aI@$?h49J)JoL+?BO?27+Xusv7%5<9po_^+*!qsN z7^MAD%!vSH2fl%Sl|Jm>Sk+<)3@&~^@Dkdd?w3f$s8OStt)RDs^ zDQ3_Y18}|rnTr!bgm-~sJYuDx8z2SxI_%gQ)`1~7`~o0wX8gwPeCEgJ>Z%BLS-DWr z2-CVM+@N-t=Q2tmdO)_R;aDL1xH?`}eI*^fFaD*dn}!sbl(Q_@vu^+{oV|DUFbD@QOF^{rhyaL}Of9XfqU zNJ=5Pkg$rz9ZfmmvNSy`gSyQHS~sYMxsh+6$oo-Oo9T`Gk{^xKzea3Hx=+v^1Z}v5 zUSEc`LsIS=LGt1^(lYpehj-}RF(Ay`6xeMw_4@ib?ZnwnPO;Y34g9vJsAU$Y00xa8 zhxocp&i0`R{O1w|jEuY+VjzTyE_&p9VfoMbeJfO;S#=I@wn^0?A zoV^5n&n;SCPVzX@4IAJAw-VI?3hAWhzBdX2=vc85UE{)YIrHGr$=7l9wO$(a)Aq^P zAnXa03+BG$Bw*Et@12(>dM|*W>%?D^0R^#Uz&or4?qPGeew6}S*7XL>B{`ztOy{M= zn8{bzD6*kSXT8-~F`WwcptW!(%Hw2umVr{Jh*?NDO~E*2hrR*nJHeFOO!XUB3_!hF z)9@;lkqQSQ9NZABIW;CsIbwWH{DF~BTd+vy-B|YHLbyd5XhI*=yIvap zH0yf_aJ~R@S(SFB#t?5kXxh3&fe;1XN~Ql$!)wohu9rkoHzhBinTt$C2+-pZH zoB=<72}~GLzpOu|*6W8)5U|0ku5YVWQf2~999D51&yH`qF!)S1pF|7ELdKE-sZDcI zXq*_4Xjp7q!@t-cSn42+ad+;Z(Gv!1J;RtNpvO=g7G!@Sb)j z<~X4EId;F*49g8cumKZz6>T7;iewLb`PK&(5haHI1U+?ot2!KIc6BEjw~@SpX8vK7 zY_PA`3+6#_cis%@5jF5r`;a2HS{2|D@kngKL;IY2gIWFZSFPc%C*4vDWm(zQrUJl+ zV+<{gzMLc<@w*+Tn+F_oUD`R+d2z*B5Ucik8QL)fAc|=wMeI$GKRlDhM0OUpi(v;*6 zJn%c*kA)}neBuMIe?f8RzfDl{tp-vYUOQ6l9#3dh+QELkV1E*|!I32VOiR2`?N1Xy zjTO`%A{X#)hs!-Kf?7#~q9A0vSQrMHPnT#lvB(N>Zu%G;NKr*Gr>KveeZpbOCX`Za zh*5SU{gtCV2R#2$NJd=gR1)?bXmrd^rTM4vUC4dDAeoun!b-t<2hPqu0CP$eW$Zj4 z&zc_LM(`VOG1#2o=kqv6x*1LHP5cHe(fSk&W0&Ts!C$s+z*`G?VSt8*5E68%Q1KIZf)V5ld@NqzvgbA zlU4BeGBORGJua)Wb`u)(J|`%?+^?tXU-c~hI>=#T_yrbW$@+|K0So$PWllLy&04-% zy5s2G){-UDK(`D+f!fG$5GQ4%BoF%w(=@#JwUVjAkNaLyFJT}z^++bs$ z&K@o}K+dlw>a80UYWI$MO3~%0LKokIDJ^4CWMUbiM}3RLy@MPHbyW;f(wkat0|Y3O z8Z=&1^1@BXBjL1Nmr)z=WJ^tp$>8fLE@Z`B2e*XG|9)Afs=E_8SHd^AA-mwi;Gz=Z zB+~p~8NlFd%NO>b`kL4<2qHrOH@u4#u5k^+>-=LbglU=EH`5IUj(MkO6^nzMzTj~*Tpd|^36k4k(^W_|JZX{K}8P$hnRa!F_69KMgYL( zhZ;FFRlxmmhI!7s=DTB!%sjedK$CQwP-DxO=@^O6Jb1RE(rfB39FzNJ)beyFeNz)y zxkK9)u|_3#Bu%W?NN&%BItHKYex+|HTNX-`!E5%UDvnbPpOb}FOOP2G?@hNd^^`*b z;_+~yOfWBiqHXVhUws(mPLe)%=tp#X`s5mDtEGTEtvHHvdTiwp%o^X2meC|R!om+? zmsq7wTMYd1>#FRmQz_#DrdnZrGwB2>>I&yX0eK-E;j5?vUbGdSiwdt^wO~S6%0pWa3K6yL7)mxA8ZY{L*Ey=lP3GzCNYaj_08Oe=H8xX4=eHTDI5>>>I(h!CWL6w$ueO=;$Nfb6wO zGDvM6y5{gpd3(X*$9={P3k-vbI9uFzgVB1Hp4~3B@l8lJq)2wVP^4bZ)-#KH=PI(e zapjVP^~9~(^q^`IIxCfui3I6b3y^FF^Ta<(>&wad0bCv^;dOF6JvIumTQ*Dp*GY^h zQg1ri#V@#fV8XRZtZs(??>VC%8^P|N#ZX#!ClDt$5#Z#9Wy7!Xr%0Cmz5Kv?yJl=f zn{QeJM7_JSe!OSYQXr^&MViY8wHgQ0I}e-Eu7G7C2!x5=!iEUHf=e^2V^n;__+Exw*aD%m;ZCp|MNG!l_0MRDrI7VVVP;H!!@Z!~%gLT_VT(xe2 zN^|NhBI^|!ZVS^9{Z~`d{E+j!0#lfR1nBhe(MipR+5l>qQtwBF#cDzU;$ufUcfW3< z@(3Uj7xuj&w4Wg`Ihsh8Qf;`dF7W|1Hbm7vq=ZMYx>8nK6&)p<6c&}(Wr z?HAkE4LJ?)10S;4sw;!Av%KY*JL9(XC1czvl!8vAN(P}$J7MF(A zcds0M1O_^|w)g3lH2F#jj!K*?&oT8wANxLB%o78*#gZva5ImX7JOI{e6w`BLI$Ckm z*`V|zRolq!C!!VUfYZeV+mIMUV_)lp#K(XK;1sip^#8z2G5y_8<;8~bG(I*POi@2AgJ)YdJ%q`aA*_bzP1qaI}DT@ z56-po-Q(4AS)QYD&Nr_~gd@91ku@k!sd^dy$BA`N?GA*#l&||W1+h0({}3Pg z?x=j$E6Wgc_XDIku|@EH2jqjWuiaT_#XID;qtk@{bkN3yBaYsG0yG%(KZQdwZc*bD1jXg1r z62_*ChHG1;e7TnGHfSe;$AiE#-QNvEx-`m}biq&`VavW|8B+pWsH**NMf*Vjp$txB zyiuVq_B~qSd{KKMfmfb21p?F@7j&ubn(g~l)><&+A@;UqGUVCSK)gg~!b6G~SA~}j zql)_iwj$sYg^+;Pn=)HbK8qhO1ef0DL38>t;5(f%w}| z@-GqYSR^zvjgog7Cx|#PK!SndPGK5ws=Yf9BChPAF<_!fNe37NQy_(!-V`r|PxXbA z*~wLG`4F?4z5yKgBQWWzNS(rY2GK(!g-D za?QL?;@cUC8R!j3Tmdh)CY}|UuEuCf?3+dM*0YTGb{`_n20ix;s?0H=c;YB^-WpzF(WVM_oA zxp`cY#oUzSG>`u~|I@yWxAPF7c*Ms7f3eH*da?@^kro_p*p8n4v6t^NJqEHhQE@pl zWa+y-Ywpu!6d%%+Hw7s{(7TE(i;)WISmURqnXV-S>f4Lxc!(y$OyI_jP~QC3Q9$ZD{qKOs&qwD-Kox&nVz_s}O{t)9^cH6F zdhtt8YX7%FZ3G-U2w+V8ZLItPf3AD}%W72LoDWJ$2_0-~G?>uwINC~Nm8uSIso+Mf zavYp~d=I2FCERPN@$$#+dsw2ow!IbL(dl>}j5;dHFaQ=DGyh`XtrOa!fo5SUd1~zB zt4VIRJA_>n@R5=F?EsDRq(NDe!kidaJ>=s}q3@ZF7Tyj|8}%LyJXw|9P}YiSu6EKH zhxeAs#*^M#)@0b0Fz{On_!PK55H2UqFWE@tWnnfW56N1ZAYRJ);Qm%oPtt`FUc{LhD{8RcK+6`P2Lv7e1Gj~%?|P9?Q0E1~ z>>|_qo$s6wykHBMURu&3(fP)7?&m5h>BZH5^5rY$=AbxUtEhhuHG9H`xz+AZ#S&-wUeiVfqL-1L$8u{ zVFBUlYE}tyLu=q=eE!ic0@Dz7gL3wlqii)2dt*(g9y3|Qf$zQK;d&*oUMMsVXW>7k z^l*hb2Vd9Yi8eJWnBmQCRyKPuM{p7{1b~A~Xt%*OPu@NOOISGI(JW66g>Ie-wMx48 zf*EsRNkE(v;Q_G1*bm!ov<@ZAY!wAGqO+JEa1PKeNnHS_L(l8AyLHw(XJvm5%^z-v~6&w~@ovtHs7p!5@Ifh~D$y zT_m3wuYE~&i+0uGHlN|jbJleEaR^$caCN|;c2Mh&3)HPNkoKLowQkmapVF%FO4PYB zr(5;^T2UFq(MT`KfKrt_1W=d8lkRm_R9 zA#O2pkR7XXvF`5(*_K-srbVU69xdYi_Ii z0}bJHAcLaR#e}o&TW3czQy0Z58pA((__?7KbmqHD26|7Za(ckahqSNnb8sm&65gcy ze1A9)(?q>SEOICm`>Qs_V-i9&?z7UtISyTC4;fq69~M=ab=Ud?=c-5_AE_DACx)PU zN=fT1+Y=H8E-?r3O2PJqH$PKqj<9|?NwAH0-bDNxz7&*7@Qt-$hh|Go04s-xRAHL^ zI1YO4jxaxa*VD5IY`=D&KB%e%?Q*U&$Jtgmq&ny!cutF%>9$uyno2g(w8k*Tg%;nX z8ZWA(?m{$DReK_BXBly@`>Qfuc?A(Yl<>;_Xf?TXnnL)^@5Vs#9Vad%(O_iIv!0wt}Qv^f+8wf@&}f zoQ_cN4tW|4Nz`xJ^*3WPDjKVd#=E<9aQ@G~YLZ@BHikSxavtG45xc~fAnvfrCgBt! zah|mADBQRjN&k^(UY3Bq_R$Q?qAe3M{W z(!|s8M59_2yO+YM2t?|_5r#Kt^WO71V&!DPkBN4mg zOt2*kWDuvpv;LZ#DKoFFM-kM`?d67F?iqy@^abz?XoJ#YAiDQfe_D?4l>_(|p14JF zsK`eYM%9p5>+U1D>rtoAUSt*c4K)tegQ_6VElM3}q>d0)4WTAtBC{fq%=e~UAjCQi z8WWe=Ygb3SOMSKB-~9l{ZdtK&I1m*e&*V77Pu1J3iRJ1FIUKN1rDQ#td=xJwGDG>9 zv2C8f6!srZQmYk#^9)3X?KVFu z0u}Ze8`NJH%OvF6)2dM9+IsaD|E=Z8-G_Hg(>kJdU^wYZ%Q}QKo1c_~qHQO}YR!&R z*wy&FHc#8{^-lwoHE;Bme(FP*8p+SSp=&S6$?hB%)6~1{`zQ}nDAck;qIeMuq{uYC z9EQ@M&7~VQsaZ$sBzkTs)~CzC^cY?4mK%4ml|VZV5NKXFVXh5>zdw667Qk~f4Dd;a zkt7rq++2~l8IYYp*;LUQV%rN5Jt&Qh4u8SSx7G#orn~@wnQ_i~g2~W>Z~Mc*Y4kiv z$r*r(@Z!g3`j0P;g%Jyw1j=l0vd-3Md0?CYF}mIv4EDkmzEy=>lpe$ibdT6)N~vQ_ z{ci{3&+G2sdGcd|s5uH7-oPxzo<&nW8{rPk!*)H#s688Km z56q44jOAFzvyomIf)Rp}`@mH3(FZug>e!r|CRZA`qm3XeGD{%IMw4|G_@-pTI-L(J z99zobL9t4S*RtbrToAc`xKAcOY=^~ycAQgyeY2D{q?v9qSZ)ssN3@8`U}MgH>`Rad zByZzw4z9zyXTAWrQvT)>JP)})4mSMpu|_n?dM7D%fb+H+61N)LcCs9$IZMoav+?mY zK>>-5rRisv2|vw{_l<0u?N_qC)W;D=QJ9<*5Y_^|HowNb#n5xfBTd;t!aL@pd|c%e zF{D7B3q()fo#e85WZoJBBVQ%YO z&L3!%pYwoOro8AyH9uRznblG&z1s381Cu~(DmwGZ>|W7*px|M3S_2~!pF$KlkBKw4 z`qgBc!ClD3KeckIo=V;?i_VS2v@+k0ZeF@{Y3PA+#PVr@j*^rwl}AW@;fD36p{nUR zl7c<|WPrM>cc|vrt{>`S|I(?J_qN+R%*t#!-b6!SH9?4=wtvi8_@U_wjn+0$9sdB^ zH)s{N2`K{$mIgHFd2=WdMqsjV)T+`?QzeHiTZrnJ(E%>Z-u!~9E}lnDbb@r^0JiIA zVLfX3I5h_$k~(}nI#Ez+qQ)uObCO*Vd3;xra@8XT@BtbxCsDn#mg*U_B;E}gdPDE1 zQUa5e+KI0YY_5^uO{$!8NM{ZgCLNUcwTm}wc>8GSO7(@qyy z0qMs=O43bI7J{RiaixglkN*g;mj#dJK4s(06mWL$jpwo6dU_#m~BQGC+H7ojhamCGOg4 z$g<$ES72`vtH)0{ef=#HmBi;Stmfug*$^$$`C-`9u-#g?aO}tpQafLOV!1_6DNO0p0R}<`OiZOFjD4#{q9;Y-=r_8f!^ zL>t(EP!P?vyr$-USs8i0ovSf0>Y=TeF|KQR`}oq27{Q&g{K}un#MizwXZzi&?8dTM z329BdS>m}GFVdfR7o8QqlDC4Ljen!U3k-dkTKH;NVr?aof{mxWKz??}V;v)Y^ko-3 zM#_y~9N@(g`>JcWV=#3)YVDVjH@-b<_U5|7z>1iAH}g|WNOhxkhYq{Yx*smruBe<# zjSBe0FJb7VSd2t(fOKGr!A@FMGCz+}ekEqx!Jm`3FtycTD!`J_Q)Xoc_)%)^l`gVf znx!#-kndxihgHla!+1Ua0e)jMIIAPHny-f!+`gsirYo|(e)(H8_!b$Wfb?NJ!ld~F zqRk^5HX{6}LZ#%7m9t8(ccY=r7kfMWa;XprUQ1^_LU0(-B`4*7+-P*38)4aUTJ;t9T693QedWYhA`btM;g3Vl&N5_ldS!&twjkn z(HD`v|6O}W3^I(!XJ<>9?a+%kUi!Ko*Kn8|95u$LSCn1(;m$MnzgB-Ui$YR`Jfs&7 z2Rq-}NJOJ$9L%0TjtHnKrC2B-f;l`Of^&vzI1?`M@7y|m!cU2V&%dl z>%CAGJUSQo<&xQWM07GzOq>+%i)C`Cz+aL6Bu<%93y8m3RT(eGE?^*o?H#g+hj+o>S50Nj77JP5xay!4K;r8Ai|;F`ccUyp|gh z*Vdehe`x<*81jDSISa>N_6B>}i?pRc-r9m0jp~aAb98b>`RsH?CuNg*Oxx&zVCXh) z@s*WAj@E`uskUZ4&+~a^dX4XGmF)6hho&bB^L0$Jtq<$GGDuKAEYE`(b-%dvv3&4| zn304knb3VKF-mU8YI5+)$3kXQRC;U>TOfX2MdE^Cqy?P(ne4J7iK-Um_+>geY>nHcY*D85L)p;l6y<4Nlm{wx*8XwpG;MA;2W{fYJCAcPK<6ytknp?BJ%bj>3eS&XW?u@$l!v@Z)&Qn| z5bAvZa+;XYR?S3NLmyihHX$aKloO3^wT6N`JULz!aoy%!V=~)S55s0aZe!%!v}G8X zxteGg8qxf_HLr3O6C=U+7b-3D0oO+clXcG}=rrnacPurm*(z9e@R&zr>th)m z$u3)h<@`o5L9_^GTMn!^#YuAnVFM`Ppre14 zqso;Q01$Eotio8{lHo`=d}8%PoQfw#h$EJ6iPU=C;ArGak(92c@2D}pZ1EwDQQqee zqu(@6p6pVn(&uk}^VE5z(V1vCgVGPCciJ*2DRbPc+0wty1+;|>4oUDv7q9x&gO!b7 z{ysxm2NMAe7sy`36cvD?X=zS+p+gRlZ!8atFm#+3s)3Sy$9(~Tc)4C`n8>nEqnR{S z*fdTI(|=taZv$OzA+M1k>pm^)lPBX}w$vCcy!J|W8$&GBj*k3wjm#r?wZf=k2 zAF$G_{WR|Pd0vCB&mJX|X}NBFeL8EInJ#ZXT?+gRvpt!tak1O$*pS+KepaoGZ zMz>va9L`GOX=A^xYvZ;!470(th&)Bp(_utFL)df=f&frbWjGJ$kbvXQGr49=)>|B{ z!so(p0>3i9@fn{TKOFFKQN6iG*iwaKE=YIdQ5y#b`12srD*YG)jY;c*{_pueF*=GF zLBF8{o$F~;??r$fUpB_JN2aoS8TzsHwzipATGtArECrAPMgF|)2tZ1MHNIuTner!Z zJIeqp$yARI$fsFE@}O1<pr{**)sj~KeVy*}OFs8a?P0bVZU71PWfXSP$y(tdG@rgQe& zT?K`*&n7vS<4EQr$1E`9xDO1Zx?L6sg?cKbkwdE8*a`xI?hJ+86WH{Sn^s(`-eMx` zW6sN(#HisG5vq%89r~V|i*$z}5I$x6=+r9sHR#m2`WT?vkE0B8G{}$&w<+y3Wz@H0 zhGp#Lv4`0)wW_zCL#n`MKuh|I-p$Y6(|#ztusj9*^ZF;ps?e#!z^ER=n+qzcm>sU+ zUvyFdTzB(;RQsDYGBAU`3)p7->?#n$ow0cuKoV_JjAte>kdmHaq-ezVC`0%o%KXfy ziuVtou#7=A>xDtY=|b2Em$O8yXVk1FQ5>+&^kz zR0pua%Te=2Hw$aaeWSQLC`g{p8k8t{1qsa5r-&QfaPp$;&{-{s12DWn*ocQ3spPaV zcZMV13~PXuZc|M7g2`x#LBNTb4us#{TRM1~dyY*w{PI18-ZaQ%sEe`--7f4Xg`p0s z636>6kiuW&)ZBrab1m2BSN5L$BuA6i<5~8j&O`x;0axWp*>;Pfum}^_2w#>1dWu*; ztX_M0hHwxng1UeLjMRfrU)DfEpPfXB%83QoQ9W|e%}gPc|+yV zTkYoYPG1?a$o3VM#4wz%jRVaD zIgf=>DBvXH&{S68X$hu?@9|jFD8K5Jh^zb65f<)V#4$!frmMq< zeb(ShGo@kb)bKVQVQJ}SmSs1xF<(#6zZe%$)7{Z=*G4Mg;unnwa!bGr(C>E?lE>XA z#?8TP3uF`5(dm)fD`o-<)?Y5p($dMx!Z&f)qF@FJPT-%}-$RaVGUK{0n5j&5Z-esm zesSSOLtWnpnLy_irm;qS7qq5AYr4FJZ%B#^RxgFru@T{`@!wXKg%9o4e3hs+7%4~$ zxP)QhO?Fk8MRlobtnl!*b3bgd0_7m0don3^=o>R#mTigJ0U{t|_2Oi7pHYV19)9$i zw#0;0+QA@#=4N-1XM$BqM|+kGDUnHPz^Q0;wbz9ysQOm{n&^>qEarOSv6$MSQ01CV z7pWyrwk|!Ci+1J&R|N|-YRFNARY(@^VG?TFJ7O-s6<*1szcuDU{WT{zsh9nY=E~E7+zfDo zWk2jjsM~)7q>9Ty2Fk2N!5qsa5ncBYFlkLFmC>_x z_H{hiPe4L9pr|LayDyjQN*fUL^D$vV$VKiIW=L|oI<;uH3~$JK`-p)(8y?ds6{8V* z9+rH&u2O4Cu>j4Z6r3WPv59bk`1vqh2iigw=-mF37Z%!GQ(cpgd<&<z~6@>gvuc zZ$PRTy0*!K*;TK-@gjSg^HP7f#~4-in-G%qFEIEb%JaA_F@4A2X!_yCkmqznK!av! zJ+9rdL90Eh@{Sg7dAHyJ3Q5THQosId#1HiP5+5;XwZX_vPR`WC>1)YivR|z($anGmmlsLY$w`Dk zcZlpr>XWab%L2+3c%vJBpd@rWOD2!4w8dnt?zq?H`Ujk32CW* zYT&tJgxs1;zB}ThWMoU)m}Pn@I9cxGOi7>OPuvIL!q0Zr;ah1}Q-CfX5Ze(W9-zjo zXZ^6x+_HM?7;Iv~{4HGTjk3U_oK5B&Can#~>ddm=GSj!n@+(Qc z?&_}r!&B?jfuJEjq#=0;Hz?$Ii@uH0<4b(wa>rrloUrm=ZOeMpZ&S+L%)Xy`1E6mn z6IW2~ExeXUC?aP!mg4;BrQwVkXyR1`mJO7C<6esB8S7?BiUad^C>><<_Ws4I=F{i{ z&~~Hj!Q?Zd;i_6Y2ScOIQgG7Auie5f?I%oRg}*Dhg&b)ZkVxQ}W)WsY(K{-PWkvhy z1FCvEHZVqmDa`#~w4JBVcd)x$@ zX#$o>ODte!aT~GG@*$Vhn{*P5@>0NO*;fc6HJ@nStR+~&zwBE8b8eo_RJP4LvFAh& z2`mLW_J8BN^4n)GMQ{w--KhS_KP*Tn8la~G%W)~w={l^`|63_cHXit3KdSf z?NMO?)`++vfI~mjErhue0@yj&x4b(*a=MA#08xL86xspC22=xuX^0;y2)?vVMZO^v zSD#Rg4cER-IX_3mNbk(lGpMSdDU$>vH#mF+?xX_+D(GkbY)X(A4^ z9J3(cXNT(s82(`ua3wrx(crVizdF_SZS*!7Z8Pac<*`EQ|ghmsO_K`k>P z4ff`*3~1+)MiIauf1g8k@#(#Kk8PT7{z9o=4xw?MT8AY!4@LK!BP+4s%j+p&4BF729K3A*Kegbog|x`fQSmp@22}V1w2rFKD?YY zuZqG-rUbBT4p(sUDs%y+wWLG3#*w20GdFY;C97_O*B z-gS9U_C@n)$2~MtFW5jQAAOH)sKKY9V%b5o-BivJ$j0JdSyRwJpK@U~pf2;;fny_6 z=x>oL_OT^V#4JU407U@2i|3>pA(gzS45etL@R-e=@%}!N=PbQyCoFwYhm>BuN_NcD zf@d#4SvWMb4U9Z`szpX`nS3N)|3!=z)i;lpJFh*RgFMIk6jW(RYG+*FO;d2kPc%l) z0sWg(U|vN&x9__zdNn|QwT;J;E7miAgYCy$C&Jhzn3;)7BVAK;L0ntzTYZRPX|39b%A zo)Drde^dV3sa09YcZVqC&uqybIkKh3-aM~c8`k;!@vbGQ^nrU$Eh6`_jxhXc_FB^$ zkdDoD(sfxjqZ5u!VKDFH*;V(SYiok#=W!Mcy?#`{x%iyXHAMl4-GtW`o8Z!+{%MRX zX9Zye{`*H6r{y-3MN|}Jzq`fhZovu_^E=J!v<}L-^W56ti@6eZM&@|m_9{3ilOYnC z`3lsMP^}gx6k~5DE-0eIihJ|c9FX@Y-|;-Z2i?I&9AD8~8t_mA7)98-qD(bVkum@f zVr~_|FiNgcYEM-3XqA;7l=OizWNV9fwl!v+LH)`k$7b=)5(H2NUa$L zf&+?%$OT?DFG`95x0Nw&(ZA=v$tJUH6>?u$9Wx{eNJNYH>A`|o0RmXcu10D0d|sZ^ zhsT*_-`rMOFERL;Ln34V^8J^De}ERc54JJmNKFNKva|n9);2e{!r0tJ!Pk;N7<-v! zq($Ah9eShdGbjJrWbX{ztr)QNQ?i5dV;*}c&#~hT@O>ZV1txhNy|5O*vHralw|9*BQ@@C(fx_jlbCr`S9iPTi?8B$VL^GeBy0CqBwYMiMX7dL zVB)z6o&W84`B`r$19ZTbt{9B!mUlKRlEtV<{}|VZ7%?2|8NHd(jYB{VJ^3nj@`Pt1 zAB(T<#@v*9^|fm~bb5MNQw*|r)t4;*_62cgsp2!Mi#8{hgC_w|GQ~B`P53L;nV>Da zLH_e^6Yrulw~|nM22^B>@iSbjR909mwV1dtR(Sf`CUoC^3v#JpD)CK2vR5$(#{gGu zo=YFMZ^1R>S2v=|U^e*&dZ@|(7voQ^Cc^Yd z0Crc0yL_EafuYG%xgC7)yR%aWyO6V+6^rP16Bwuud}qyfbE0%;b^Q+YKMVTc#dKLS z@(^?YF-#leCRQJR^ettEGjw+If3%&0b|u;tM5B(~v2EM7ZQHi3j&0kvZQHhW;=KD4 z?<;nVy=Kk&q4Q7iq!x@q;CHsj5C8E_gv^|ZNJK0)#^++E-gEdgi>|FkNJkdmOedSi zTu0ZQMa44DbSZIkg|b|fjRb=%4ox$kV&NHxoIhT{qz(A&yoOXF(%4;fu4E-3 zl-~;bF;YDyL4If)q4g*pFIP2%8jA5wiBdw*Y^axDvcixtn9|@;!_xzM$ZClWU@AN_ zO1_|deM3o<7)=E zRiP15zmI|NdU^&fc`-F^yWdGo=6&vs{|*<816!(Y%UAPr64BT>^1vYk$Y7Ac!irYd zX#{~0CYo%d1<)fjFSSp-apbCni~TU$;bI6Ech=r=?Fqpr(+K- z9iN)X8c&(y>pqZ6hc zb<5kP93xh+>(1c#hkA4Abg;p9VI`+u%P84StGQ-3SG_J8KZJ@xvozKlYrxmfMHobg zXmRx(=(`^AV*wU&MU*}>-3L-YeQZ(K54nR z$-AoJPJie+6h65>>b?0tVzU`bZKy6U%^a(^o5%gXcixN3Q#Hp0qt3AlvDgeNJq6S0HkD-?6WN(#YjoDYyn>M4U$$B=mBVt-TncTC^S5otWH5< zj;R}jO-W&uy`TXzN;$Puevi-;W-Tfp(DCw7@AL2K%HB0CX!Ld7+8+-t( zJCu(+6c4hhH(B&K1|1xIX#Ud7daV1*STMBn%9f;QB^ll4_1ig-(|tAPopg-eEuN0* zc@F2ncBgzuw^o3(>24^9{{zf9bwET+26G?rx)yI@ia_hD#Cb^lmSd(Ko-c;N#V9Tg z7Meg&*Q1NZpRzM-epvTnToDX&rPje^iER7&qN-gCd&swh*@8I_xMt=ZBDo)Y(?+TX zrwi(r+F3JhN)HnL{7<*wFBYyGPt$Lj7z~~X<65fjlsA!zIxzfQ4_V^%e`sL++4fkO zI-M3-E_P)>|5~;Zqu>2EHV1Zi#k(c(r z)ty%)JQ$xK`wHOks#FcNr(=WS6{bVQ+}+h1I(lU#*aGOD*mbiIO7iKgthm|W4TD6) z37vPrODKw^-WHk{o$?+qS)R^a7gsjgO6Df5cj+KOX%u(!#X_)~(^* zUFMsBrWHvOaw=?mRAc_%`F|eT6607$N(<|S{AWJ`FwcjJ?o@l@N{BW1wXw?-k-|@K^%ygV8fIUx(l1Q*@?onO?*-WNu;paDVgQD)0R&PccQf_Mm1O1U1Cxy$)G4&wBDo7eQEyv%Zz} z_Ur8iAoruznZ)6coG#tPyLW4Z8R?QX+NQ1Mqc4@&<;~}eQ2%&Q0jsUb07ye#aZ^*G zWC`fvMAr+i@k&s|wX72`C0u$ys##+Cv^2kLAmU~dKyNN&b|;pqz7DwND=Tj@ z8p46DC1Qe>b!(N$5PG(mfWUVw-ZlT*<}}}DzO;rdp#q+cn4K1j3pS(ox9ytBy8fd- zVbV6~Bd!S6nl>K_-5BJ(`8gOfc|lSP@#6{%Hw;TsF8>8cfrouxL5JBE!?TOB)1y!! z%j$;0HK}L)D33ZF{Cc;$UICKUrdr5cw^T(cILLx@s{Y+I`;+}}!{YF}oaf~_6GN#+qlg)=$QiBbb@~@~sNK3Lk>W;*Dmxbz4x2OBf>>gufgEoKzoBP&=j|{1+Qu zz@`9*+E&8Ypc74vW3X`U8%{fQgoP-1yMR$+Oe-x_73F zpB#~%D5D$AOBWw|6=YaHHBW6T+Y@K><`FBB=yRZu*lj#}FZH zRlAu}l0u}Fj|m~N>PE|dS60^2p^^>XW6!WxnnUzJ@xD@^Vb_$GL-%y{pLpS~>OC4TvD`wjH`*E6+Da2j~pAT9{bDQ{gS2}(QIYsps%O!KT% zfevAsGa;D#EEe?FX}=GX@#p$f(r)(3zy8bK5P-L0kq-xupRvX-KML>N?OkxlGYp8= zekaEu9!&NU3V-ic)f#d83dii9;ov$la7iBxR|8t+c)ld;UGY;HUyFw$D9!D%5yoY) z-Xr;5h;)5WtYM1Hw+G~JP$3$E)B=TK+K0ca>2$St5%fdz;iN`@SSB^IJc2k-eUCW_ zcgsSDOQROca81>C3Zt?m{O8Z|KB4q{Y=IAT1{nslG3$|q(A9R0p=tcMhFQFxNfv_f z8|il^7N8oWy6TG2Fo+se=u3Mgdwh+GcYs03G>ky9iSw{{Jvdub94^uK7^m^48oP4w z0HSMpEI1z2$IVS^)q9q!_=?1dPGqY4uo5bIeab{2I;)aWMWTJ3ZlSDA*K$Q$<7}#mApQC4s8M6>AlcAqS&JA+bz!RwVbYm&Ul$5G>a$q#}{(~3rxN7i6a}r z?q$-k3w}7Cy$TIR$~<1|Xh{WB8)!c?V~9=TnPBn=Abv-wb;yszSV`Lg_!Pa?IYs=| zA28zgqa3pEAM9QOt6~C5ok^-3kV(z-mp=MNK$4-2s|A0s!S#jE7?0e9A+=AIG==eB7a^qt>A&z#$v~YO^ ze-bXJ*~6g|Y~^+{k@1?Z6r7ehSP*3ZtKYH*@Lm_k;G!+A@Hz7A=T~}Z7s8I6WSwSQ zI*Z__&C(_0E3ky!uv^IXrIjeT1wjPmIEdUEheHPh?Hyk^TD4xJ0cQyLN0V?amvNp zjp?vevJ0*sZkvao4Esa7ZXS-tk2+*lWx8McD#sC^ryef9XYm zV=e$Y9&1iAdsjKLqCu72&~Ln!kAMV@FZl-+HW2&e9-9ZNL$b|Ve&2spc|uLfx`02k ztGz&yh!b=V+A2mdU|Z*_#cbWxGAjGNd?}Mqork_YolQ))OvwT6mj8R5FVvH*(e6q^ z`M4IKM-OZ&Hjwe{ce~9FVM=UEN-9}rGW~QN<`eP-)_tj8#Z791`K|^PMNwg z+8t~dqOjinPG@%<%X6!+t{y#{U%MgglH0ZMXyueSm-7UP2>JcanEIj-v0Kh*^MRDM z{Ls_m7}PSu{N0_3h=?A(22z)R3Z6>9_{q5g!PE)}#?%WIP(yd*JwKy4qr_eqG7kov<=fAK?+>W;O&)l^RV_a^+n2@&i zYm`V152oByno9Qe%BO$gFr|D9^vUG3(4y?c6UddBQHYjC!4!O3=P#-`I&wy<7SZ`1a81grPqX*+iIy$c^)r>us! zhekCCa|pjCs!1Zo%@d_NDKijz3grBWiKBG>gbJUh(4P^bz!S*y-?PpQ`pN-NUx)tk zZ4`G$c_m@X2304Ae2~KZMoFP6a}Vf?5j$H`2>|H-JO4ArTEt+r(0(jOWaryhY7^n3 zYZ~CzMdT$xQF-WH>f$!!e(swd(JY-fJ$;DUE09&CzHN|8BvjD*erxFa2GINch7xJnZiwlSD!F1URTsi4?kam#a3G{*NCw4C9 zRZYjrgEStUHI`3UR89R@B};w3;*!8uISFx6N0ZzJ>_zd^TxP9u74M^vMZr% z_iJ0G_Dyx$iolTn^H(Ud(?DDU7#4QyBBCl4_3f<<(-$WY;ot=gar8mEA;G`GAB(NI zbZi?M$Pbdo8iCOgaCWgs- zyeOup-2Uckw3&qs$Ja>rYkxW@ z*D)07aJ!Ic$;e>WsS8jhp(gRv2gEJ^WKmq*5T4;2##;JCmCoon=~-dQ3XUbf9BywO za!ZtMTYljf9QY=r%U4&TUrVcHipMPAx}1j<5?9Ae5s%(88=|$OLLVCLZZq_^Wo>>l zqjQlL$-OaF)1MEu)~!jpL@u%VUb2@Xa1IopGC}S~&)Qm9;Ku(tI|LhbHb|9J^|Dlk z4 zi`5*UW>bkKGNO&$Ew2lOfqeUshTd2ZSapc{nUt$hf_%6Qf}S$bj! zvWAWxqv>5r4Ksv2x1B854)T?tGlMThC7CPuOV+kXE6?1w2)BxCFU)+k%v<}J?N7mn z8EK|E#_Wt>Kt#VN7nRPX-tMC$;v%}7+ULEQ z96%c-!I4L2BxbZ!-QYpDfG4kISLBFr4XQE!9(voUw*q6DAp!iAo#ntkI6a`l?2fC2 zbI>+!Js6@*ZFi|rm=rh}0Wo84JX!S_2i7OQU2m_UPlM5mzbj9Gn20_J3^i)Z7pd%HHj8x6tVn3^alx+yO!+USiT1OAI-FKqXLJ~FBQ?kFrv^!(oNOSG9>2*->GQ!hZXLWqPl-KK|RyGJ05S`79oviK|uBt21~ zzs+R3ra<3XWu;DaH2-U0@@!$XbQIw!vY=hd%#uyke8;vd#&;;!hXK*&asYh(plR7C zy1j`&Z-9&Y7iI(=s9xv4#qI`p=3Ms(^}X+R<1nhL1CV+iU)1Ge(id;aIOB)CA{M4# zghIvs3Q_CwSeG<=WASuH6_nrUnX5pTQhN<1T~7LNlY#CTtJ|N++4g`3=8C72K7Ksz z=*YoIg6ntZ&G1`nP%FDk{TvYYaEtw`)T!>5Q==ALwIn@Jo}jsL2MSb2k+~m?&Ft2rBE$iO5f!8 zZi6bu!gU#S# zrT7_@3YEtYOJPQZ6EIBLTgnIVLkhoUec%oXe%_we;mV^yFzJZvNN8%Wa}wOCnc>~C zAz$4uE=4lwoRn*2_etwzs~z$G+3yT{>p^_mYx{^HqyM{~rVgNo;C9wp0Q}w+_sN(U zaa>v_v#vTTXy>N%27Y)}T%h4Q$T!$Io(p1{V<95Yd5~&au1>g0(1<86gi>#at}CG< z1=DdTCjLqbgTzh)wE^?h!d%DtZ!I1*i!B3GTG65P?J@6(%WF3IC;2I_ILBXtfO5lD zL%v>h6F-Afq<+O6tmy<9?mF5}{={WupZy@f&f$II9;{IM=!SYm0s&xcH zk+bjQIKvym*)1r`Fp|+0*#P08q-9G^&g8t8eS}oR@H$F#XJCf2KyZ^d9>KZ$q?1A` zFLiJnwI>NoO}a(Nh6XH3#Ru(=CQ{Mns3qLqKMfY zp2Jj86w`mb%X9&O|NhkLUp7|gJi0xPuv*+-`&UsstzbGqMnZf{Xp8uZ0f8#MehPOf23LkjND>fi3N1aaq@}G)x3D<0$lm9Bo7ts&5q- z$ol(!m@h!RkNM{dO*XTLewepHbT17intH3}8|^Qs7A-C1zhL%OF&f0O$757@Ldg%s zyMG+XRDGay=y3Xl^5h66h35~PoOxrq924YeBTZ>cja&f98}Z&L!hmd5RnRQ0e&&zrgeC^W6abWPOC zC6Y_XFZ${9tnC~vV9&)?Le3S-E0iY!#}T=544Xqk1fJ!dn+*aVnpy5AnOL#6bJ+%~ z9L=vC!DwyC{vt9`3Qm#`_!-u*or2g{TJpfHCgD33WMEVIcUqn52=^U0TSN-rwV?*x zOQy<$Q0LFxz&L-xy#Dty>eo=>Wt8QjtibAR?&hhEJ$Z&c>n4euMjY6|xS0wAd69}= zX);}<5+pj|awL|=M^5L)9Yl$9>g|);beG%Mrb~X;t<$#* z-w+(E7^~R)vL-O@ny14s)hwSfOfPMkKEdbC_{)S@=V%RD&ZXtprYKArQ4=Nm$MpKs zO%+%+bd)YoNl34#CdO%?svcF0xPshL+|E7d#$F(=s7`x{56>*G$}^Rv!`T8EM7ECY7}y*Ym0m<1TFvM%IcgL>L+I}JCq`9&sD7n^9pooNsjN}9OCh9^v*jk^F?Ez0mJ)x`C`o}7}t;aDK`?#(8}J_5uDT3UNYq*60^lB^IOQG z;l%=B)&Sf52cQ-%%am4*YGDpQ<`6M((y~at&?CD?HMODlreUeCX=WiNhPuFr(Zb^+ zp4m||k=OTW9O2Ev74qj&Hc9OlcKxLs|2JIO=Oimr(9{0_-3vMfFHYa=x0D!*hcJd9 z7$=y%Qw8Z}$iZ6W5`APN1Qfk92a#R#kY0f}XUvK(;_T4Bh5RCraeL2vC-}P7h9M551o?GAGJCq(12jqxZlCc z;zR!eE+?d(4e1LDw_S=741sV2_;XmYGvOFr*CT`x*ZTMB)SxnA7lNSlm=(7_i5<9U zquM%RXKhD&XM7svO&`U_nr1M%TVj~5Ah#@&x5!wX{YP47!BaBJcCKt(b@1heklbnU zlR=ix9J#4qz%jsxGg+Bd;!aG;7XmFH4ppwuI|*~a9kZPLY`!#O;^Jw9gO8X1-isb#WEo<_#AuMwtiJs3Tw^`sw?OLV*hl$SP|1^}Q&|&2Tyy=u0 zFaC^8My>brC;X=v>v+gUAzkq$>j3AQoPwl;Po&GBT(PrVobCK8B3YG^oi!pTJxBZW zu{`rJgw1x>+6`>-3+`?S=)#X7`}#nhS=_FWXtV6N6wLOAXX!*2H1XZ;6Ch{BU z-ycE|50>D4qS1;mpPZoNEumklpNMsK?#Ds<5S=y?l&H__+q}PG%WVg0C0|ZI?ZTGR zGlZ){2kK|YSn2~#^~O22qX-P%4ZL3klk9w)rvMG#;bU>VEI3MrtAU(SP6fQ+jBr;i zQu60RQq#9_fIZr|;B;uEM%+FHM}ojfTUM{xWJK3CL-F_98)8oB1^I434XVlyGhH)huI@9yab{uB-1aU=$k^Rql=P}hSJl1$9%+wZB z{w{&fC}SovM@1)O2Eu9sD1&z~9p)d)qzq9;ADTI)|Jsh`-Ap{(uEP2`Q828^YL=kvIvzUM}GY zZ2~cQmlZ(jC`z5LZ5?3INBsldizE|;P`gXCbAP=ZRm0ekL*aAm)JPGahA03LzRXsy zc#!U>rlz`Bo?R8Q||3JZ>+-z!HRVX@(n9*8~BE^BnDw!YsCy(oqMEF!MDIv_1Nq=5(7H^+R_dn#bp^TemOvH|> z!p^=H=ib6+wNp(lR3gPRK)d)FyrP$^Z6YMGu+gmiNY1RL(p5?*dTZP(D=IW)|CCxI z@VA8MyS`kMo|bduAr6D1=IvNHq|gyPJe*s7N$*{W+L#<=C~arhJnXTeqZt$OG?<{w z`j55j1nnv>!yVfsOnTcQ{lO_o&RMOe-h-jruQz~X?U2+2sBT~dTvBE~@VB9GbQkvH z`UtGgG=g@Q>wIxp&MV`C*1f3KuU?iS*}9@y=z``tbx|=c3RKif^R8rFEN?eiT7f{C zpsDVt1lJ?l7PMNzSbplADp#l@WC+H;wDY;6mR>HE*rt(h2r#7aGv2b58d*}U33rvs zWHx%|Ey*0kg63NF@&6>mBd)_IP8<{c!TnPFr5V46;%kNCB2(kV){$k*5F%4L;#Y>t zEsZrTk>#4!7oExVY8{giid>b$&|hFa4F1Lx1{M)2^eS9LgG!Rwbbqk-Gb$Z_A%#=o z0&8<~$rEnm69!hB$}lipcA5lP827fiO(-W1=XW|&(!iD96TyHh=~cnuk2sTce!c!2 zaG=gtoj>I~A!99(Tkd5R_{y|sbt{;H3By2GM_y;rgeeu(HFB!`@sI+L{RrGymD;=l zm0;3JvZ|=BY!I5a+*^GC7-g@3xFd0-)VLJ^8d;3HWod2^u(9+QKe_9;HwUDadqj1^ zBmHmO&j&b$omRKe4ihK0Xk!Si#9Q{8!85)$0eycHYz)ZG^@^;#TK}7z!+X5>DUs+! zUDB=RqIozBsjhsL2H+U)TA{C~j<7?yu?wri;1zvtK6j2s-?E%a$U@Y;n!}i_ow8lj zmZbmW#h?1}KQbt>U|`;H_K2oV9Z*a_M8xm|YB@nj{(?FemVle)@Q%d3C^}ZX=dA?v z(}blhJqH2ULeFoVHDYjWXnhdsm>GL^PL_H#hC-t_q;6r&->4*_s>Vg)Rm#QAB127q zVpa0VIuq({A# zw{*)8Tg2*j*R5Lz!Xmujv#Z1^A~q7N3i}!UB{;pZJJ*wPmsjw}Lof?U>f(PBw6G|7 z?an4`!(lIJ0I_K-LZ>6fyw2#AK8^+|=dBnk?a@7?Nu)lBa0`rqR|_xL8nTKj zTPq>NP8R{SF87MVEie_n2xZ|b4)cT+QHtm!A-xzWR}5?hjgWMwZyMO1VYf$r|5=sk z14%rj!i!b_p6+rp`O!W2`0kOJvO^3SKY5RZ(3y5$vPzI1iRw0pPltD7~D(2wt53uION1m>fZs z6L^DVV3P|sP!K&|22Q1pD0%|(9p+Zoo~ZVqoep(TAS3y%FNk&GSqSBJM<##ToHPom zXHO1D_2r!-q=tbRu zkraBAoKt&1K&GzCu}X*05Nq8?JY^_C^w`V3v7M{wuti5}1{`{Z&v)y?&h7LEzVV+R zJe=WNOGt_N?Dt6Ct08MiV3$hX>q5Ey^MD{PBZ|?4Fnn~N0Fyk0Tik*?W!)V6@Rm9D z#+=*F8LJVSMeFTMHB)>s=cKIG2QA9^!cyO;tI;Ea!quW`SXf9wj^?EP63SO7!5{z1aj^$GSw`bKN^`1r!9N$XyF|P)n3u&K+ zb6b!FluIW_UoF)a3nKT4Lxn8d5Zv}Sb$uV5p4R)Ko_bRzA}$!9=(IlBW8RxbzxyYM zT_o1YKX|Hc_yLH{!lr+Ai&#Y#X-Kr-kxxv?%OxE*AKKf|w}BPvD+PakS-x92m8HfY zrJS!klb=7fZd=$BV0U1Djwt2sF!!C;_i$=tJs}#dk^AHnHT>Dx(4lHs!8vPZQl>f; zBX|OICGTNGokfyF=9@z=mV+VVnI=Z|&r*)nSoL>xHxF{*RBk5mh~`bO~2mjb0Mpul+Q5Pn_tC}dLdi#my|?=~7y z7~Ht5rSOb>bDwN$IHejx2w62OGVMfzJQqT-YtSqu3}o2bV3*&yPjiwqr%&l_D;bC{ z_DH=`l^#@se^bGz8CSufyHul#%?A=G>?b88S(#EG5nolHUzX)u#=%NOaUJSzg$`#M zhRvX9yLZZBceqf?AO|gW>Xnp>ci|m=0O7{^e#f|<+eWCr4I%;J7W8wMKWn_z!eO!B zF+(Z-^?`zg{Wxg7B&qp0szWpB^~(c|Hg6&FXIXM%eG$xl;2S8hC#9XF_sRB?xq@Tw z-GUHXjcRdS@hz2yZdAL|fp;%9b+{QZNwrBr76% z?Z3>x2lQkXhH-M?$Ed{pd)$7ElWT=r1VjCc2a9SrA`Qba0lDCawa%SQaR&^0CNMRf zevgyKUD-&hyzG4tnm@N+IlsJ4(0*TYfSu<)%|T&%t3v$=@210io|OgQcx~n;@)LkpzPLY_qe*!L(wFb+nt1uJGThmxlfz2G7#~0gch&J~&(kGl^7&kB+)B}o z;?OwDISxxYettQGAD_0Ejcu|Vy4K6h@mVe?dV$GX8){~rs(h?-za}Gx0k9Tct>t+Up^rq4h7UVmhmhTJ?j>UDj!(k>djyO-Z3iq?my) zo&t?H-)mQYuvThMC749Nr$OtyaC2o~3Ndb(Ht2nyX248-tHul+;c5w9AM;EP7XUK*>Ay~3y zZ}y={F>2foZJY#1Xy48iljYQ%L*(!gv57_ul&p`?`1Xq{=N7RCcgDAX=JACL0^`0K z#!N{J%Ch6(vmI|hLegkj$e^ z23qNab2S1>dd}MWXL!yB(H_PqOg%NBm*-=jgf+|*KgWSSbncLH_CEpUD7xTiSKnU4)Zfi* zo%FGw(>}c&d{o16ImVk5nv^i~UmPMrH?B~cEI@@wvM#frKDXe*b`+UO)3Tx`KW;*V zF%JHu)+Gn|W_s=RKv7t3Nm{KAm5({{!{>(^3GzzSxPe%c@i`4^@A-nu_3sPueKm0vPLs`#1%F)qMpfln-?<>FHa{ z?FQB*@bpcl$B78z`dx*HANQFb2VHo6vfrWQvRSP4h}SuH9n}AGbXUwf`#W)|cP{#A zU|1>89Lx#ZfJE{3?L(3}T+OLrLU9o^l}>~$f6W@Gu+guSCc9sAs{yH8-DR6LVy0GF z_X1%(N)I{OBROI$hGkAH1dFf0Oj=c7|9(n!2m-3>^-yXm8|+t4u;p!MPDie<+s|v} z;dyYmm)U*cP0p}jR!_iV-%cY{Qhf!LQ^%t?Sx4sPK&m70ZholkyJB9+7d-2!5A#MW zOLi~HW#k^1J^6S*8KhF1EkxtNPJ73bg#&3x=7IQ}+mR_6Q;535g?a&c`-5|5J4Z`<7@ttv6GJm|K1x3b?jRIvi^ZpNKJZ@2-H= z`#sC354{uEj~5sYc76e-cMV&+2Xf@pM|M-WwGloQ51KsMAs$!$E!iN`(odwNFrF(; zfRh8=kcjkBs!n;ubPiT;2b~(cfzhhWy0``m7sc&GZQBEk35b=_4-9`dbiOAPU-NUD zIxR1!mW5eIO^xg2AK;nFT zzzGu>UAIwJ3xtctSIIaGA^wfg*@MIEE!9o}PnVOHG&$3?xo!pGX_ujuyGNC8@8@{V zWUox9beKWGig$p9@xMN3z0d9|R|CFXu;1GJOfus+IRiiQJ+P>~!eD#7zEXEVd`=yG ztti$jcdt3x@I6Q>;Hw%}e5lw@OGfF#^^#;yoX$JRii3Ayqf`8k`S~BmO@CED#@4lk-iWqP=m93qRgC{p{`@O=`aE5i|u))A)xgsmmJeEe3}o>{Oo?P`Z@f5LTYh2*pPt zF(X5o0u+ap(Th4(q$dqfr2h^^O|gMCT}^6fGun{K+}9Em{tuFfKxSQB7K2 zNEOnGma*OBQI<%>RJ-ltHt>oZeIND-eK$}HuKj=Kf4&yeYr8^Mx+Z#6hen3hAE{X1 zTlM)pWy$B{NH`?T4)b8;u2j>4=IqCyyB>6?vf9!vVIW~QEh)_IIX4%ATWO49P>#mS zF{2rOgh#(ogVFr*y$tnz!Fr`7u+u-jx-Xj6!dJjP6IBuzcV*S6xPwb&{sFTETP~2` zfp+hwmNmD2vH&PXZgd!jcQJ3zcg>!i z|Lro-(MYZm<4(NhFqS(HzSia8OF(Rcr>UEuo^|NR3V9jcgF_S5l>gJUwiSr(DMXnJ zEq+y*Qt3nW^4e(DpS5wo0W0&QYHW%g+tJF&4JMx%_63*W=?mVP*dyv`;}C>u58WJm zo-f>mK+OMS9t9U@TGKyq6PbgKC=WFwb{%XKDAF(g3#|BOM;DB|DZA0@%lDV4Z#g7q(tbD-Mfvh%rA(E|b;%4%7>kYz4#82+Wb2H{>HZX;;`=5JCZ%Z~Fa*mN^f5 ztPScQlmWSeZR#oVWWKSFJ>AMmX<`lELKaz0fLP#YrMc$^5)bfp?B%-~#pF2BGDUUq zKxlae$fwMtzAXE{A9O97y#h5-@M#CuPx?53o*19Irc4QrK~pU!-=zuUQky;tc)BjU z!72W6oKU}}7r3rj52n&qbi7lLbx^{_#6NO+H!ooFTE?3M3+`A2SaSbB`0a7Z ztvs+pZ>=RH@~I;jtdGI`Fn0YHQ;ol9J;Ze#y839{D|5q(jzFN_HGvCWyyPDU>iN%* ztQKFcScZ@~dBqK%-Eqi0C12UY@>HBnRensib~To@`rEU-_4~rvnsl(FWj=f{HP1i$ zw>N(HHb(bN^_BMQsYJgxCiE?@(75@-5^PsBOcld%mko-TCgwrM9}Olz9sSp*LxZkj_><8Mt7&>#N=O$mVM~v4pxGF(a$tlT%zuVB^rqwHnH(6~4V_9R#o9cfS9v6-$veZ}cmnTQEQ^_26z+lFTlncp%pd8NFCWJt$h@r; z{DqpCsQ6E3pCSOcb19iy)^MxxzV0ii$1glj*2REAQR+Y$tM~i5o?O#kq@-pdWe9`l zpve-NKpsgS2pwqQKLMT)%i8-)V5r9PE7I@!4}a>P7?y8+eUtk}rKGPBMH&0@omiMR z(k9pZXeS|@jnO)6I?ayEnlaI)HTHKu%6Alu%<1Hm5W8h@KU1zcHLI0?*R^|4zKha` zEj)>r5B_aM^WP<--j&cl#lSj?BSffL-_WU5QTcP*Nl~#+?o&l$={UA-Xj7FrA4%N4 zuuvg2!Js?`c>ZGqg#fp9W7^um_HqS% zbU#`dm18T`sur_XyQqS67pmi@VxdSklIf|%oH6l}#B`u?OVmxRwT@Fb8sHyI1J@wx z{61Pg^An*-|Mn;D{e%$|fcMOwKvCSp+LxzrK;YQw{lFBj626i^cOf>u`LdtBM&!ca zapSc>OUbdw8U-yvWg~V61K2L>;S$Ed{nR zYG`MdLQM%dYL$>=TSX#&a6$)V+u#FX{VhYDfO5U~nA%joViLMrPGU!`2HEV-<84iJ zY0Au(n0?sV&6#W2iYO|k^8OaTBXQc-TB1Mycd!?8Q5f%C8%DG_xU}#1Z8Oih#o*Z^ z$tb;CHB%?HoQcPKI*QD|nfdOXz#Y?aMSb7qlAc$KC(y0UhY(hjU>=Wk<-CFyiRz;} zrm>hqx^Nx&9s|CqqB;-64Cqwev|~zJ`Q&9tH3(s>v-vkU+L>Q$9G0xJN9wUNA$L`i z)IT1nsW5P*=wmL74@QD2L=g>RcsHT-V2m*0;YNxZl0H>!LNj6pq}G2NAegd+?oxMl ze_$UDsgo4JH80kwb`n=MoF9ThE^;lB(wh<-i!Y2%GYB^C^1j1I^&`U55I6;_(_Uy9 zQSvmCC6-LIW`^j0-w1`1WH3Rzs3E7C5#%Of8_F~d zol#w$6P35UGF%yz+q?WNfIhkTk2X>al2p^tX9BZE=UFsdr}LDAJonL~?A2N1ui){A z$74QuynLSH-8S{UPZtsmK>eV+!uRx?FJ~r-z~I5Eyqf0hJCXOG9$yM6Gp6>?un{c{ za2F?+;^Zq&8MVPldYn)d)jLzN?P*(2hbp2T*%E4Nh0GD1JYNJErD4uAY$~JDmvpcP zA!)`eNIy2a#-G)tJOXRf0(!vnj6lavMMMFL-=Es=2t1IaI_;~W!6$9S=04hUd)$*M zOdHH+A#*xMAoz97Ujo^>IC3H6>wc zB-IHXIg4?^^P1)q&sX(9YrgoY*7n*gW@l&D9zPRQ#iu400Gs-21VR8e!=9^NFN^KD zH+?ms-CrtYP@p6;6roaMl|TBO0)ec~gy{3&V`%A<|l7UG80)2jXUhSZc*a)Ts( z++O>eP1dL1w1Op2dYIp=m^#|H&b7`R@Ryy3RT{poh{tfUAzD9;8+<1>pOvM>MM`Nz z!wJKqbm+q4b7zv}i;6H7jqkPDb8-K*Z)Vx1gZM(A8T^vwpZ*!{N4Kz-YoI(IP+1^( z(b>fs`puk`!&`&sH_O|M%`t;Co`6SMdU$8bbM!BM)(F@3Fqm~189P6$KGSCW-VV)4 zm5hj4>KPZlw8YrzD^XAf#ZhJ=r%v&cle{vz)3RR2<@nOVs>z$i>TT&Gw8p{U?@hUM=C6W531qu>5g~4j z*(T7PRU_o<9$2qu*?$mmdJebv+i2GEqe|?frFfR?_p7BSL zb*-+9V)Em;T{HD`2X{Wq&7m{U>*SEG6x?QtW<)XtRu|r}pgtWftTPOyPlt84?phyu%K zut*6=LDe`Jc!}ky9-iYuN*Kq3sy7vT0NOi%0zJ8%+^dLG5yogxNFtj^zR)hl&tQ}A zo}E2Uk5$tz7Oc1DFOuSxvGPbjynf(`lE(P$_+Aqk0D_dA$7WAENg`pQCr{DfGx%9n zt^?ScxS#W(=W_(Se`-++yV3^9G$=iMC^2vxdJmFgA8a19jEIRt5V&fOX>BJyoXDR8 z(s9hqQfm5Ovh2ry@=v06aoyIDOyT!& zt0QmHZJJ;wG=d0|!vtokAVggODWO)#N0%M$Jdg?3yU;*HrfEeXI6q^jv{n>Kl9Fcv z0+)E)*O4j+raY+(n-zZn{6z>Yur@m30 zBEzDFbPgYxRo;%L~msi`UWK z6C8S%-yGg`(}VC9f?NXJl?TakLEBc3J*(rgbs@>BXtXx^^zGRWW1Vkq74g`MN9c>U zS2-zV9A}wL<6I;7xp*z(jZDJIRQgwG)()l0IRuz>jqp1;*ly*S74U_OcEgBKHecr; zSnh^phTb#|p@LAJyli}F*McXx2)$&-lH*JTW_1;E35;ND$V3J)vBZda&szTaEFEgst zq+BqyOB#4&Ycj?OGb^^T@6uK2h9Rv=W6&pJ`^LewyLr<5u`DE`^)qkHM|B2jzeGrh z;P(s^r~$xw<Yh~pV^%nE20 zzt(ANLHj}49rjw|r2Unv`WE0MZzPFh@@p5*0JTx8>$Ce%rAQ=Do-$y!0j9j1OGO*i zzKpUKwBm{i%ZKDk#FL&{$&;ol{+a0yD-ia0mB7gRa^WN@GC!E*SK;ZAMfcREZ5xz> z)@8HasOvr(L^TSirl{4$M~|TGRM%R*qVQ3h(dkjzFI{{onlwkp zAI!pfOXpC80Miz&wCcOFmsezub0_BArC4cPcgx{o$ca^DC>xK0jf$i-n0qDs=fW%Gf?ssOlKazIL}>GhCx_kYXp!Ir}C8 zNL(AwC7X%Vo~Uq(09ftATciu|$h(1Vxi~3PqOR%W&lXq6tSJcqBIznF)(8c0%)wZ?S{m&i0k710W`6D^1M;} zHu#^+?kbwP36WWcTECWWg^lcMV5j$FRya-Vx}Q@Hmt4G|QrzM2^tGW!|3s`%gGt|jX5I?Cv6kK2>2v1MYNEEipIY2+AoFgcJCq~-m?k0x25 zGxa1#z7h+l`$%N}?q2yPJUWGd>-z`aWjgTw!#D~0>!k;}y8tk<2Fbm1K{Kh2`T0uP zli5mhh;&3`Q=Ph0HU*uU)LA29nFPmq`EV6PS%u2HJe;}PSz-Gj7TLe`7zXlX1$?f4cf;EXx=UjsBXwhm)E+F5k2mmM_ZM$A6Gm#ykmuK^UUghPC4@<~u5aQe17% zH-GL?%w+GTBN@ZN^7BL^swF_VW~@op$0OrMkASBNYj6Y}SK%S*;ku@mb0Q(9q%o96lDmx-0 zY2BC}qze?1eNfIuy?fmGM2V=f%Kp^liH5+9 zfB^lrn|}G$ytP3Qj~p<80Jx-P)0P^${wCBUn0Uc{JUNSqR68h?H*a4ZAW!HX;cgo&^B9E-^Pn;m$t;lTiY7}tM(AA4O zvOjd1ovSwBYdY+wV%)Oe5*QIc$5Q$V<5NfPdr?ryEBlv+M)lM*6)-O7zUwtpa!r1H zVB88OnMn`;VWjp2rL-f*WQ&5I*J^XZZJot8Od||*7%ME2DzR+m8=(*~*67V^_uhmyI`7hn zAnWW*#%e$`-*ij^z zhL={;gS1)j-qEj8=hHB9M?@13jq=?EBtWYVQEV=WV*ivz7(m%(ALM^7T%yyq6z%TQ z=;X{x7$l_n{P>R4VP2rX41An&1WRcYihb-=JLF|@fFbRyW1zY5S%5g?PcO-wMP8)o zahSiIQ@S2evS!{~IwN+160=5~y(w06)HL+H$rygktfA~jPBZt+ao@&>Cz)tD`G4#G zaW9A`Xyc(^GP_=v(*!b9zQdf}qU!I0~NdZnh z$=$k})tbjmjh66@iSIZY&kvago#~1b;%ggOP&tIicaPQj0rHl-&hIk+MRvl$z8V|Y zbf!q7CC#lC@RSBAXC7hhrceMgP?iySVfd>8;0q}Cn3k4T-;fr@*wDl!!2_^EkX=S; z`;^lt@LUmb`pLezgZE!fwa3~km~s1ocJ|y9>{Y~z#h&J<_by@K#^s=4y$SfpgTiW| zdi<&>`#n=dnD3ca2VshA03Ed|e?DZRAN7TPDHBWWk{Jb$_HjFZt257|9lG&B z9e?QsPEnG%RtuFW+KVmHbH3#?ytuCjHWDEeHl_0XQL4T*+u>wKofbk$qqNV=$|`!a zDmLm?6Er`si(o6tw6_j--ryb|)0%unS z7xuxbYDOchu$kQArdFoZ%(;s-&{?BbN~AW4bi!lH@F83&tnsWJD0=5`CGq<`SoH`| z!kj|alQCh91iR309tDo1C%0EGnW#4;oTmIBbwGm#Lx0QT&u*2Q@C7$uoeu-_YAd_p zx0(+-!+2h&f#_v(^;j!;K6w4jc{PY{^kfbd+Q#1l;RGsSJ8__NSu>mOiHox&k62;O79ntJ{5L& z#$W=PB#B8uDX?BDmgYcsf5Z@6)xtfbsOD$}CiG~h6rhH|Qc-w^rfCfaTP(Y9g2!Y9aZ;we7 z_vz&9lMD*E)qO_t=A?L$6(1&g;|aiLGNcX~8YK6EMFDrTH(n69+sDhK$Wn*$KJ)6V z-AIhvp$Wyd3Z2gS<|E?}VE?6jpg|#(ofOi0ihNqBQswqe8p`XkoF9O6B8*14PRv%^ zIN%Vi$b8j14pF8zd<}npK6H8QCmX3*;a#fZ5R797VIsQ~-jb@_hw0VrF50yX+?~nQ zRHLZ{*uiXT%5ES<6Wp6Nu`g60x}n@_&v~g`JQlf?;-`MmMOLQQwVhP z3PVJ;vDuYPNf0C6>Iky>Fgn5HA7p8;DCKRD^(~^lSSF?VyVP4B33$eDt%^uBq1H-d zO)3AJ1j7vc3CLLU{x)Fg%F7nS%s-+2=h@3Z5z+Pg zF+2s%#6By$YKB)<;d);_UyF^U{E5uq7^@2)w4g*BkXIBD3Y*+0#KqnTHx#7Gzf-5U zpNOjYq3&&&RgH~ELqH`FlNkYd1TO3Ktxe&rrI)qsYOc#lH5k*qZlJtvmO+M_ zB*Z){0NyqX_C?!Nn32;;CbRDc-XBkTmvwz2#K_GrzTUC|=dHpWYR zFACl>f{y}b>L^E$R;_7UhD>7<;}#tm88LW|h8{e5x{CBNK9t>s7r7H)Vb$md5b>S- zxup@AA#0-Um=J`&F|b1hvme2opiJ%92KeH8YF?j~TSYoQrGJJNb_?$ z^)}PJV_)}zsU$t!bGB^z6yafDcLZ3~d=3X@Q_~pg2$$$h+sBZ0Il9go`>CWttA=Y} z=aej%_D^t%1}hwpUFuJ)3K?~T9&SjzhpJ$S+WNdC`kq&SP0gVKJ&D=9ULt7k3xR2w z(x26z3Hu>>nKLN>7Zm5Zg9fy{xL4N{e*)Z6G!!OTIr^01+0cZOHQ=9nrK>DnD;BOC zdZ5L49t!Yw;RO(^c#eW$5bM5%nsqJLtP#TS=T{TIV0*FTuKvG<{W!|TO9J1pe zy_<0Mbls(w06yt0nSI=jxD~qfh{?S|`9V3DpjynYgr6ppTbWsOs~Xp4p7?#I{^7lp z4kkW*vclnKzJk?2g8#GyvJ`te^*A5#OCrPY%x2UVLOYAHKZdXX(GA*XRNu#;T4ftYNkO?7lp9sYQT% z=W?c5kCWo3`r1=kDwNa9x495m!s0ZG##vacvRuj>{FWitue$OE8+6qb%Bbl<3oa8k zM!XQ7PXpur2lQ)gOJ%B5yLULD!|VYeJbYKH{Jvw_LJjjgDP{C@0e^ZB@C6cWavaFR zF-|hBu^CqXA(Y@+4F zF=?-~i@>b~SIWiapG3%&%$K9om`esZsoHtP4f)0ToavI)8%~N3*UEL4aCbxNTA)tX z3RNd$I&Ko_??uKjw!NQhH#!j4=APD)hXVg(28d6Rib#GBTP%CF5)40^td}Emm|nam z+A|~`nc5A50xjfSoX*S9%&045sN3!wREeT6SHO{OZ>LFqwC&zPz(_4uhB~f=eV0rr zD&X^W0aOt>e?sRLX&J#jfbW*0RpS)IWgD42_D@??K-BfQurVsL9b~Vyp?ehfG7d)V z^yqH&_O5{+`rv{wt(gLs#8%(T+~7Z}6-Oh`bzjo~0IdDhU81mFNdmkbk6^o|+S<+b zR4ApMU{=)1$I(bt=b2z$fBi)eX7ia%GCJLKw0=%V-FX2?^y(&2DQ{YI1VLrT< zQ^COqFZRk7QZr@(-+TZG*!x$BEgJ`vPd{qU5sp<(;#~3ZYq?+WjfFp_7>*@1ahbc3C?zStPtIuj#SOGN-oSD?VZc08-DTEb z^+Eq;C@<~H%$$aYFB@D4y`ZbF9|*DL-Il! z3Tq^FA3!hk_vAYPm(5m|~2lMf{N0JHyE7(z&|$ z74vw*(MR57E1wZ)G`&64_k8|j`>4CEU(zjfv1dVrt~7pkq_K%FIUiNW9V%rYsmx#8 z$3Ain4wF{%0Fx$m$#3YuFaHI)U13&;D2)AS6n@?19H15#_mBkpgc_ErNyFkR=TvG} z8gUT{*8ICqlRLMOj8gBG^nwkY&W2OCo>eLgaMc7`XqzSNWU&}09aZ?|p1CKD)H@P; z2NM|dXl87qp2DjzGNO-JBq(zo=7aaO>LM$vk^AdWJvbW`Jw6kEXwv~J`x(Mr_Cb~r zcl+53HXQ>jV<{Puq{T(r?+=_On<(XPi;(XmrG02dF^v%NkT}|=x~w@jY_+7{ud6gq zQQLv+4XP^xMzxtElWROqYn}i3v-TEi@@^T;XK}+pqa2B;*Acnn`~Yrwd8zA zWz~vD;2_c`+XkLxW-;JQjut%SA$yi`Aj>m9sbE~t1i z!`9{)tb!obCvD1dz1LhT6QRembZO72b>_Yb!ZuywD`AOHHzFH}BsdRU;@+;ZSjKX} zD0{y~n|`SP;X=40ebI8$11&yXvYwhGQ<3aXTBAg$!w7+lmSfe3tie#G5><1D_M*4= zK`J3XSGf+QSW2%|sUp%SBde%RhCvQ^r?A4sT!v62Me3wd+DTo}VFUvFtBbDABLQqe zRkQS<{jX~Y5f5=r^Z%{?!w3SF3SE-XJSWr5HUW-uw1sx*5_cm);>+J{FZuTsj@rwm zadaYKu{!?v%`2TB*#f^^ML_2Wg$6Uy<~}cN0%%g_Yki4_Kn(Z^S*llst)ozu8!=Bs zq1yE?TQEL96}iFaEB!+jOf1* zVFxwVGTIy-HhL>{!-lMfcj#jPW4~-R+FDivh0ixcl3;ygc{a}yDsrhll^YpXiUt@0 zRnUj@l;{Nmh#sO8cvFg7?@+-{immc<-#LEX%$3UF=v9($>4}sDE@qUR1tEdXvL&}^8WQc*ll7())WP-y zq7S7rOIKucGv221#LG1IiqXlM1=Mizgn;rvDTSd!WRmd-T1$gw(zOny<3?!ox=1>o z$jW@4m5Y_nKeLWRW-}|6KBT~3au!-`$djuU`hu{e5P5MQlCE-dmDf7-K)1#a|AIp?ryDYlp4(EwxM{nZoQKRW5VjgQo9MsSDmn=NPC(CT@oD15W^L8Z&nwgo@7+ znmJAT_LcDBg_s>hBHi9B{RtRgoweorAq z8mFf15?O^D0XBu1*wnh8vrlcQk)u%80kT<)P2sW>IdrxwTj~nbWOdm@VJe`}oe8`U zbBj$P5di0m%dC%pY644iIVosP8dFXc^6nyaP+K_a&DBxG#Dn8M?BqmqW#_nto#$Q# z6=}C=vV;C`v8!;a&Gr~2k+>+=59q1_?h0n5cET6c@jlKz@T|sM?WF(5`kV!-P#6gG zNpIP^iSk3OHrRSAPNPzXWdN)T(y2|$nSKGkj@{%XCwqX1_(b;#(SDFCSC4)n>u7{# zu0fj<0c^*#X+6d z+^5*#JeTIrztv{r1=<(K1Re90gJ?8+RlAvz4PmGFHl>M}bvqvDv?8c#HEf++D zOQCg(0Na%Cb9r2=NlW3$>juT(Y%LSOrZ`Awwj-tCRaC_+lw-D;Zc0$e{EymHc3IAA zuYC?XK7bxj;-AKBV>osj&nivqKn{86U(aKGjhV)R_Ug%oSbs6=DBo8Dq7^^`;3Cva z6f|Mg6p*VudWGOGGJ%OyiQe(xhX>qUDNIPrinCT!C*Sw0-FhxTf287D z!qa@tQRz*SP=uGIi$cP86}Gc z)D~|^iz=VQ&6Nk|BOK0F$}^(CJrY`tKkvDxY~X_RnL*D`oS7YN@cf~x#XtYp*0bG> zm2bV&BSy$14d1TtVUkMl3e4SSrL+MYrk$=@PBWcd^N)s?MK`|R*unA#1IF<5x_l6@ zXI9NAD4Bs4kKQFjVI9Zoja=6Q$VRHdzv$P-W?r&Tt>-gsYenxfM_c(|Os-4qud2Uz z2(Wof()(Pd`+Hz3p^Cw#kWv?0n*Z{ggK@85Nn&4?G=uJRs-fv!2sP}^-rA2Hi=ZX% zhJT`=GQX9~xF^g@-EE&wWbnr+y^7z@aaudtEu%^^i08b^L{uKlkHZK3#TiFtzf1xt z=-+FjWRr^oejfm1B4aaQj)_C&doqG-n;+>|2W~{5AY5)E(Ac6WbJA+sXH3;t{V;Af zD_}IR=_9fa(=a+zvn&eQ#Qx@=pp~K9Ca{_z&SAxLxN_)2Y_F-I4b7;523`^jFHVd| zP_FaN`BrAl!)#EP!t?C+ufmVvVX^_a<4J>v#B<2;$w*T54 zUZ0^aysuiB(vX%tomTb`ML_G}@9j5HsB1RMu24ZQX3G~`0t35yLf$cS(jhsQlkfFa zL!%IKm0KGd>XDKA`psybilN~kj0W7rdw3i7Hp_E>op&C{rP$TGL#i|?FXav+GY3E2 z&R-gF(APl@ z@hst`AnesDzzuJw%0SC|G}o@rie-?(UsP`Nc6pQ&Afy;+9kw{Mf%C?6@UfzyyzHZP(2r^V_a7r7)h{skJ1(yah4LSSvVU?N;a0LSA^^Z$iedE`?BmLD3E@@p}h`u zAoS)S(!3D3>t!0qPPo+uui(#Yhjtpp=qq;LhUjTAcu!;d1VQ7Nq1M7s*0u zu8N?{D^i4Wv4!qm({_2!AVUFlSXEG6*6IrcAS7WmZBN-TJ#5DV5bWG%>}DVHyCaSa zewt5}y~+YYH&e1qq!6#gU@k+rry2ot1P|b7x+sw1S`yrGl8%MyYPrQb`BeXmx~f3n zm(Q)-_b-GpB5brb`R)gvXxWH9eKh7i`4A8scxfzq;{E%&!GDlFsXg6$$O_AUN_YH1 z#$}I5*~{_YMo%LVEsMWe2HKgR%gZ0Dm$0*F$*HGPcIuM>^U204OEO&nIs6Y3{2)f0 z0K-e7yI4db6p;|9pf%;*XHCAxsMIp$5^4zngbgjaucM{f>`onbZY*j0!Qcbo(?${(?w{#8k2m>X_rH1kCHX;5fRq+=y|nJB^ugSPk{- zt|Kkh0gaQwMyuZ7f) zWNvge%>+?*4p4Vi;IItEAB=bW)Q5#zDw!n>Xutor{@-A35Lu}}X5K-lpw%4=*_v6A z)l0bh={Mgo9Z901imwK}@!KxGTEEy-KhVigURIq_cx;+iT_s9H49(3?7NibS0@G6R z)!u@QKE1Q}SBKMt^TI&cPlpHI`*_UdM=T#YpDb5JoHdY_4Pa)(D)w=cex{gvu{GxE z#vtjU!qEyJZKPWFN9;6|j#jidt(Gz?9;B}I%!y%0Bkyuux&O`aAe9RqLDheKOn2#M z+)n?|1!hHR3$7@`Q`=tS98W`yX%b8+8@qwZ<070w*BLyKfwP6A;gdViBdXXT`_bc0$59J_o08c`(D+p%knxc4 ztpT?JVfiZnxpOH*X8}pDRKRseLI^5Li3SjW+G}CUM0dnrP&+Gsih6gj*B|Lg+}_f^ z#q{pTZYF_Ze>nvPnMwA5*jG|ox{yN%5$*n{wgX~U_ZW#`xdLjw7@36wZpY}Ra(`Va z_0uDupX{9;8XOa8)$`aY-a+B6EzT-*>sm>q{Co2o2<) zb&?Q~hobWNKI+^2qD6(C5f-U_6oDG36qKWCRND|34W7&*Q8;+FUb_hc2)Nc7W=?Fi-#*6sGje=vk zaJW*rZ;7navkHdNZ_5$lUb|=I6c2f(foC%^V;7^bP6jZ6@zv_-x?nn>kwDesw?J=n z!z30{{n+a?e`LONp(s*pI-aR}$1>T!NV%kuhd)vTlUpf(Vd7tjb|NOlQ+DVnAO@6X zhloHg$es*^b=8>_AI-rH))WBA<$`!8w~h-*ZMBmUkG_9 z$1)*rebF1u5OTFs;PlOF^#QdEN|Ja?-4N|~!PL8igSDqbJnL5LV@3(Tbs_KDAgZ_$ zqR$UtS{~0Z<`~1(7EzNq#ZxI<;3g~~3VHSF?8cB>_6ct}H69i&h7gI~7-Gp@ZzHX~ zPZH!^8uRxgOw3(WA?s(=6n^Byr-kyydC}N5h{HidgszGZFw^r~PHmMZPTCK1#pCwF zHMV~Ix67+ne!7X4L43R8q-_5Xb?BfoSG$13PG_kHJ0}ke6W5E`B6^7-)U#n<=RvTh%v{-zf@8&h$ zWSvsDAoRCiou-#$0edWsYoBqW{I6B*vy9=^CZ(9(qpHMbz4_V;C(64!~<18*tpcP(YKLdO(FAjhGRBtoy2Y2 zK=Qz{Oyb(H(Hzq>@vg0>hoK$8BJ7x9DtW>1Y#euEQ-Ww28GGb62@2_lQx&>Xk0Syo zT63@p$jg*(d)l;7{OoG5%BreJJSwz5Mlj@9Y?E!gFWV*Fqc96ix)bt$ByJtRq6BjCRvCpKBdKP@G5GwRsGvf-wk0iokX zo^H-2mFH%%Ad3Iqkd(Co@OAy|qR!Y!jiBg%)3sQ0F0-Yr7OH1+_HHjtZHnC7B|3GH z3SWs=w<91Ab_ImGRAY26*eIk zIR$&9_Y}3N{xko~I$SSH;bDqVa%nhLGhPQK+=ZEt1c4SY)s7rreFf=r@|tA4-xA04-opN{_v|sffCw# z#Df7X#Y6Fwd_pefvf0A)ac3U39-7>lpw-NTF2%8^`lW6uE6Q8bt10^(^z^ny;ebxD zDG56{dh2%uI(v+kE~X;1lpR4uLq=GcJb8CHF$TF&JTa?G!Acy{;R-(wjo(_)9uWk} zu_h>o#@Y39#4={kxmN*^0y-M*re2Rhh{6o&M#6sTd))02rEu53`%uGHE3S=+L_<`e zIu=mK7I_QT^;0u};C<{ri{C0@<69T_fN7neu=?$tJE8HCzgA{S8`US*{m)#bNu>+$ z-a-cA?4JKtgfX6XFkqWq==PEWqK7G{7Wdm|GuH?3Sr z$b&Lu8N<|{guI67@xGpv^hBWgZdQy}$aKgWlTb$H53WqoadG8xcG^pmDVCA?u_D(i zc50X!>Li|$py^G5s6kUG!~e#$u4n?U7ST%a4MJmQIg-FJdD%V3Uk2i_P}8H8BSB7k z>{7k|{13FErF4=vQa|=g<82eF3`p$OVdeDVG8z7w&c866>_b?m)|jdCMzEj=8jV;O zqzIMSi9rxVP|_W|f&;V}f}x!EeJ{Fr}Uj}D$dKFIO;%VFs*?U#;y zRcnZX{!tuXUvhqYoRCl+sE4AACJaS5w(BE6R8>XQKwvT(f){VVfIQP{k~E`eh7#Z? z_B4HqDllAU-@;ES5}1vL<}F)RB`$vHij04&2%B3(7!NL#{Z^i%7#RM$+6t(j+G0`v z7%NdtldhmcW(}?QqvL^uY*)_iA|BxS8)4!Q`t#Jy!{QlQ+6yUTpcsOP7oiK!>pTKyK$W;&s zfP+(Aw(T=*Ovc8VV-c}hN$5WghsJG_val28>7%v4Y{7lt2M3Fs9W04yZoT5f@NUtt zZ9}F~z8?oa8REi)5w|t#mCvQPa2qQU>Xu5x;W%@3~tJ%gl z975=euJr|p%%+EPpB0Xyo2k|CvvQA=t*wmq5?ZRxgW!$fWRIR`6PEf6I_$TvGtd#l z!-q@13lv$n@B~}(8)dF_474$PYE?c@fofgct(gs1jXv&YN5`~OPX!S}pyB_%|Ens8 zx2ya|$3J|%bS#|VVhxMA3me&hPzMQOT^Q4m@GP>?P#6dfVJ0|oo;K=3u33p;5AZtD z_j3@n^Qn!dEBvfyqRv648*3aynqT_U%?M1YyVMTF{1VbQId*S}+?JI(g7U&)`QS(- zoqW$jx4Rl|0G&z>BnZKgV`Esx)BnEij!^aQePAy7hq1g@ddn zI+~ZY-V6uJStjyZZ=;2Lsac$MP!kAgL10>LwTVq5qIHqA1^k%wMeR(BMP!4?zO0%b%>KZo{GL+*JxDCaRh=^1>XLRKSU)g4Z|Aqy}{94EJ&pKF{%hrS$)K{?axd^ zGK1clH~vpq)kXf`wl{o!)Z>3_#s&QrBn?q0hSF%xXfy13Ol^8Ssib8%CAdJhk>6-m7~uS0bj={FhTm&oTouGX6Pm?n6`~gv z=9Zl-yKE59y;pih|D?H^LM*W)#Zt&%tOZVnlO7LI z^>VjN8c>l$i(#!zVJ_RUG`)!eu&|Lzz~Mv5&{3rb#!z&POcmevYU2cU zYuRTF9tnQ^SmW^A@yFFk6YA3vBuQPeon^Mu44n^FerkJA=O1ftKBRFDo0+Hen@qMV z4J42P_#ZCRTit(yx8q4eA5GcztyhVDs^sQ%@9l{h3F{Gce=91NK?>rJBD1)EeYbVK z!md=J62z#`+ngM6-Cyp#G?2w}j|GibusIq!6%9p}wuC+MPsqP6h6+}->uC_F=Ex1w zT&aN0g#WfawxPrXCGZb{`=qgyT2=irT;M_-4r&+^jZihdlUzdiQ2ZQHqv}%PLk%n? zlzi8|X(ZIdMRA7={BuRyLE^V{NBFM5yn4Nn02MFX1i#gPHg~phi;nvlr`EeJ>6at8 z&XX;!UFh#{1(yHgKxwt!|JonMvZ&Aq#Va3N7k~YtF|dywCq+q-J0)d-R4*x62Mg!+#8zLF~)ZTu~;zW zn7N@vIf5Y*_rzEK$b}M8H6G+a5ST$;{2gx`Qn1hk%d~$xa{T&_Q@{ZX67r|NGDUzv z;EG&Q?683cDiYF+`I~jiN{rh2-WSz>^Yn9!Qy) z%Kur!D8EZ2Fju3(bFk76?CI|G-T(4!gdz8hg1s>ws4M%NMH>!#m=KGgC*GI9ZUfle zUy_XR7=ZU8CxDC@TgE{|JnH}y9p9XG=>4|`X8`-^K~EmC7DrI`Tz*C)ugW%>aDdP> zuk)iAvJOU`4W@1p(nnciUrDikea5q1@NhAo80kZ*4!!jLUnix#R_3MArWY*%Y6=BQ zd=jOW#~G*{^>4-Bv}`jh=6w=0rasYgoN2pVGZ|yi&+g|+BzR3x+8kHUdrm?h>j za*I;67&yMhal>y&hApA(#vofFpUZ1q)?Nu>zh0v@-F96?#Jo*hy}40QKz}ff4`e#K zPdG64e%qAUNxvw4PI>O5dNSJOH^yX-t~SOu-@K7mG|J-+5p8Ayvc8Q!kV6}43UlAy zOMfMIvkAH*5aIGG;rwD#EA}k{_f%;bxdnBNJiu>t+Qo@C?cYu`eLsZC%RF<~P`>b} zU}`yfRY#Z(L82|tLH_1*!YVr*E&kNSJi^eROEj-a}(qtwSUL{?K~DARISOZ4Yd|yX6?=3DHvDcC?Qs+{w6m|$?0Qz34s%!$T@#Th zQyU;r_e<>{1OX+Y5>34BO95h7EkmYrm?2y#SsmQRvwpK zJO9}p+l&1YtMjQB-1&WKiN~NgT6IySXT@;kE17xq``f%zgOa%^3I1>NG#-RGkF$Mg zKiI>0OP(i~HLA$xNz(m!@FJRax!sjAJFaU8-iWCV>!V1rsb>82HxiNpI&Ps}FA+cg z_Q}!}Hv8LZD4Xvl?r=0zHwiogG=S;A;4;Kodx2{wQxzl}eG{_ecUG9SXXK@r80u7ZbRVSTq>2~Ub4IRs2 zVH2tpoE(Cvi!Y;is|sTxex{{C3#_gyEc$Rag{~|h3}>0x=}KcmXlQi)V`$Cb|69a^ z0SEoNlDC$;uo$&NyAvRpvo3%sLzoz#Lua%^a`#+PxN}DYtcKa|0@=u}5M9Q;Z~~nn zw=zVl=s?3kRSW@d2IkxUUa6FKJB)BCM4_Yv+^35X$I*|ztn;T8bySQCSxrL&)#Uvc zp)7^^FNl|g*1+pz%*Lo2s~gbize6v3rSb^swvzhe7VIxU|I2{N7LwKoicc=QEx#TLiH7yFtn z7#s%KSAcEjGK>|3eP%)=3T8W2lu?{bpd-265;hp|H(|?urvL{n-5oPQ>u%v=Zi z&mVsA-e(?PGOe3CJN4|6Xnr#H6Dw3zhPNAkV^i=Nn=EQmVCon@N5@;J70Y~(Qxcb~ zy=-Q$qu+^u+TSa8FgkDy;Nq9dFMaowqA#2!m4dcnr1a}&2nw}-)36Tp!a?0T4@lgh z4(7_&Kzuip*2H2OUv042CLlhE+>{F+*<3H^xE+v+tu0JfkVo52E||y?^@A`40fu@X zdCueK?7ufk3utnNaB4O_H|J(mZwE@$84?TAGVOWtHYz2j(FnjW;qM&A4-3BpdTtJ~UXU%-{ zOw(LyT}r)XYRDP{EbBXk60rLLRP>c0$=Cv}V-Csk!C5zIw>ym)i#Ry04=LAWt6X%xhA$eNgnkXJ zI|cIz^PmW`eTeG5#&+PfTIaQaGGz6AI{X)lv-T1A%!!!JOnyo-?3?j!=&x9mnGVZb z^A$0P@N6gN!JVYssrpwhl+ipM5sStY>VFc}o)VrZ{;pw^ zWxQ`=z7qv}@K7N>dh$>YckYd(JX#7IEo2Wt63DfK#@2YSE%Nrd$4i>~K`WMlaG&bd z_i_1`o79v5vuoMAne;^Vy@=3y>xaC$=M3S$8Z%s2STNYi)QZw~2(-`#EP$<~7TJ(O z^QtwPlpyG-4Hfk$$=bQB1VTZ;jgnO&_XWSV~E&>8V(`YBd^$ zPh{iFyK`26B5PIzL`-nE7o`|&FL?qb!I;dpX+YJd(HV<2EK+vUTVn(fa)*-LyoaYn zNhu%IPD1`7Wz!|c>MY(U^B_IQqm_qRO%0i}ZxIvc6fV4Di5TNWi;l4(oZB{M@G=CfU@^5u#5<|# z%9xjqrunb3Ai6g=4mZ3!?lP}DbVkdg)&GIghyrB&PBSd6ph14-az%V0l$cbcty|U%_?tFLb_XDTjM}b~Ef_SUKf&C4;>D_XW za;uELcIeW!P;hdR8R`t`Mk1O#An#Y5{H?_ zqhCkqKSb3Ubn&#~pJi1)f|)z7{tgWv}9K_BYFPgKV{)l+Gu z`uIP?ZLnP^5lIz#(@Kn%QqLLOa(W2WbV5@q?rlU8-9Y|m`gxw2zeb}l+4Fv-=E_XIPTjRW+yo+^3^w7(? zvy)J;;7!;tJfo@n9kytPZc_(hbz`HNY4w ziE?2lYevJl_;Z5eK0;K~ipd{$R~qB6On#9WM4I-=m}4!m_j+Zqr86bbn6FJ{Hy0c0Svit30t*71M42`dr#h zj;**smnzMfgAq0qQvajv91ufMkRTY_wr$(CZQHhO+sTV<+qP}n+<%F^MNdyn4NU)$ z(+T>!erKybmF^)?y&DNCh!L`bBX=$5DxAPYS`$|A!nJv3Fv34BLFNE`tL4v?-RdK^Kq0fDy7v>eD+BXhcHqLmAXX#uE{wZEBY>G(p?- zZw^KUracCw=em2Ez~_Xrlr<9mP2#;qC>31wzz1137?=1}3!Jz7j3(B9Ge*B@8Q3NA z`ujkjzu~(qMf5t9<6G`=;XhAnI5yg-MR)i)47)#45*u^bRIcfn(`sIK;Sq&U=?yl_ zEQQtwLp<$z%i5L$9ZocBA41?6QyMOae^$YD=2QMr!U8r~S7Gy?`o%|F zx#S-z7N<+WFbj}AEK=|YQ*9{&sW~;&$Kx^@clrYwm|-(z+az)rebqt%DeX^Ut6`59 zmrfb*#SEyHvnt`KC{e}r$o@4BMcEh)s?pUW&TeFG-UR~$@k{lfC)ij2HyYdIMF(&Q z;xpfJB6axhByS|gZ6Hm_M=C7_4BoadC}9y)Fq7>H+L7MYjzTd(!BQwHgeZF(NN*{E z4${RCU7C#<+vz+?RM!$RM=XimQl=9LYF6!8pB7Y}a2}wJ11hLLHBsL9gV^aVdR|SBQ7AWbc)@3ul|pxC?k#cBweeL|4As7%>72wId+1b>gtjCq`kh zU5^Mit`c}O%#sX&qDB)+SNy7K6viFW%bJ)l26b^|u#d<5KNu?^r74?yu~d(om%;Q@$=u+lXO?}$gwQQPsT|kyk0uokk3Yd zV;gN@kiW~%k^5xFRj(*V`BlD($w987r-M|Q)~OF_#3aNtb~FdG#M@A&vDoNO zp0pqtxm_AvQZKnf%%7$_yM%9FZFclQ+yGZm2o=3kLPQ>x>p+^rv7zCwlo_ zO37gBp{+At8-JO)G|RJ%6{>^^0iMx#dc#{a%AmxN^0PA**hR2|u351~#_UMmir%OD znw367Rra|qBP+BdjVoE5maca3Tonmc--mQiTENkKqz)rD+Ra$P|ePRO64 zvAKbb?bQ-U8}H9mRzH6vHaBYiXx~q^b^mC@=tcWq1BI%|z>-BbI2!u~n(egp3V7B} z6l4A=MRlvO(^=cE)c>9TsjSBwjx~1zM&IKfSZ7rD7)nWPE9tJinseo@gANTp8HL27 zsSXxYo&gE!|AE%#cb`r_1e+V&3~`W{AKehw8+@%#c!qp=1t%IET;=)Z56AdpLcAjE zvY)0oB`!cCqySGtBlNQp7+z0TeJAFezm)(7`lAzA-qOf^q7otZ0QriEb$dK=k5AV+ z9+$j&welyC%nYi}J;^r%6EAa|eQ_mk7oTN0MgvGRzDt!nr}q@cOOMto?x?l$;hpjv zYs2OE1BehZ8`T0 zgTKe}z&rEP^)!X6=8|hDwb3ay;KH$i;|^S{cD}2{CvkXN1KqnJj+Z7~#Jyy!n=>iE zciA|XC>KwqQclVJECdSNyj-*1uDZ7J5HX;iKI(Y{W|Br|*Go$$Z)yNQ`7YsNU16kv$cXPnOfr>Jlb~g+s^$7VytxDBNSJGa4Cfdjk1GTt5nHsIMSF`@u*7-L!+VTk69WD z(E0lo(@72RVaQ}vw?ymLmBmS)6@Cg&ZSA-j98;0-Wd_X(2B#^@X=(4D|gbw7zlY zP{&X-N=UiCss>eyn2)I)YqiNdyBi)f5xFj_z{0DsYi~jP4rt&CyzTs*YTusqAVmL= zJPHQFkh4x+64mx&0V|VIEZKPxLc|h0&|t}m)A0<5)z9zFK!{x6>-*4-p*No42>;~} ze3Vh1xjc@vc|%(sZPjep9MA;t6Lg2h~l{07Jl3xk|#9%3ogTgLW2dS$etILMw;#*%Pnw0jqd>z zz0tz83sl)^leKOA7pH(+uMs1B5<(ZN#%)7z>{-F;Jr?cN(4X7}Z9sgZt`tBbnA%fe z-P!>2{Zr0PyoE;30-gXFf#@Q5pd`K)b4zJ6A@%igGc9hwJ|LDV3^6O9f38c6Wj4Bq za<6)nK^tk7eSz6kvh!*h6-a6Y@-9K~*4pSC%q_k4Yq%o&Sz!~&KSj5@Se20_O(h3s zz=Y;9zXEh-slPQAPQACU9KfxG-?=CJEGd0>fXj8K$8eZ%_3a$LR65Qduf<&XFEa@5 zi>fJIa0VoFEiO^jeA?3s&IyM*;xKArY#<^bt>@cCIxPl^E%hBu_R|p7?Z#&64kChY zda4YCpS4)djOOP>5p>CvD`9?eAJGyE_##&zOxy9}kTV~ojbEZ#1ZC8|`0T{F>MH8* zmR~fM(pm%C4{e8>5N6eTUqxs8@^JC+1)z4u9O_|lLJZ^=crorMH5@Vo+>W92z$c1D z*I0#Kt@Qy|^4y3+s=VDQ!DIZT5yfyU-k*Y%K#8-O)Clc}iy@heX!&!870J_#1Zeb5 zWax4z=0^o>79{kgOA7pDK@pB5TBSfFHM*^Suv%@8nP$nnE#Na$66MC^?}j6X7}-45 zGajZd6RJrH8J>vp+oZL!g+cI&z3(&wZdz{fkTC}KE!&ykuVMqxUF&wig9T}_#+m2n zP)+6DPml(CC5I?&4F2Jd)C*U9+N>HSnl3?J@CvOyemsgQQ zF}XrI@Ct-_d{2ipDvib?0y!T%zF+6Q%XMg~)yG|=JOV(wb$nsOecoG3)jwV_xFQR( z(PV1+0%8^_XMvpanL zh46CQxFle3X!wz-W>Tk*==XA`~h3gC_qcw=KJ za*Q+EU(6JP3n*NcyYwk8$}f@W&ECGNA@o?jO=}YoLh=UQ^3rUN7jKJV~jWVVV$VgXyM(Y7bYfbbT#w+ zI%sKoTs607)2gR1CuX9-1r$??uJzW(y!E!9j_N6YA6RX=-S^4(CJ-6vvGJ&nK?xRZ zln_((_uIuak?c4ratcy5!>PIZQAv^SN%~|NiIbU8Y!uD6b4sd%f*n4IR1zKw-nT8< z*T&`Dh#TEdAi5@B%Aa12&SK$Ufz;}jqNcDi;h&$Y076~jyO10*LBiopI|KBf4eqv^7!Y?mHHMbMpWZ zk5>2dKrvs@;CV@1qxv_(jagt8Kx(1nvhlQN(}A`4XTvfl%Rn##YCIJobS>+*?++K` z@@9|0&L&N8Mt_1%^wwk8`jHof6o?xvDu?_&I*(?d8Z0(#CEE4-jp3SSOK>q(k>$v1 zGuxqcfavVy#T80M`i?fuqP&|_n{&Xa$8+IIJV5DeS=F*rw~)WWElY>rv5=-1W>{uG z@u;MyC5}Fsj<^Y{7TU3Capm5WD)on>!S$*t|5hHO@tL`;&H`6Q+X4%)Z16Sl3BWXR zF~eSWtkICmtcGFE<_mKiHb}23<(_C@B_XC+#9@uD5U3&Q$ID@l*U%gKH+a4ooiJ=2 zvYk_?L4G~@5{tEVG{)z>t3 zKY*A|>|{BCg8Y*IaFd)Gji864Mi3-BxdXi#E)!N=ZlRJo+6W!S;*`9Wfr>Ij+U70&HP z&0**fdp>uzY3knP#eRp>bgM2an|yk+1uP@LZT-Ld|6?-!5S_)6)xTU|>{*o)$Ns%t z9A`~}6nceKU^NaonDKL+gAsYGj5dZzuZQ}DP~p5T_cuS_$M6HM^Jfr>k)z`$N1mdL z9BBpb=g4TDvPxzqAQk?j4vT!65A^TuM_XoYZ|+XuPTi3S3dj?mlSTu0`QI4pkF=R- zd%rb^4zGpWGv7N1CyDtXKIYT}XOCZF#LY#moIzBnnb0sQIgSoEamkd-YI=9y+b{CE zPsp0K`6{5}uL^)IbvSuh2NOB!1FR#yi)VCNGmoQosvuKE%gi1T(ohvVI%&9fZ^PtV zClqM-C-`NHpoNydCSxwlYE=aT7E9Uv_lgRgyv8X#h=Hr#so_h3C+#-iO(PTGkSMVa zZkR_h`0?iQy-=wE{?ed+gkcnDobEgh&9cNs8SP|U4~W?dWaR^@@8O46*}hLPMwvz5 zarKgtqAtlRT=(^#P!>(lYzJWv>U1DKK0r|1agYz%4*VV4mZ@!B%KSYfG2A(FxVdHN z!TusTA?|m-5M^Ov?HSl)&bYawCB(@5(8}ntf)~ll9DN14(&v41@ZO zFkh^$^e~h9K$+DIa-TT6?3z=M&Q7x)&)%-6p!)e^cMw>W>1qkr+P>bLF#D4Am*DAA z4r8pZ%;r;mHKlg$4*B_+^b4*oFyCX}=w-xvaGY0<-Q&ZA*uz$yrBz!T=LDeH7(d3# zVmksMoR7we?Tx*Yg^TVfs~L$2dNBxOcm($Ir!00v)A3fAI`f;j!1sq7ok=U{+3-DqL9*VX^vvFvGV+OL zBMtRqB;2&AjzdQ_sgEa}pVF4B;XH}g0u;6EyG@~Hpuzfy+km20uMsTWZ~AZvRze7; z45F_*lLMrx3IaDz4azahtb#t!Ygwqo7CxOJ&QXf+X4arVB+^!MqY0W1xTuW~P0+|X zUq_9`ov5zfO8en&qfm>U{&m%ZPeE#Ns>^Gm9eXoVP z)`z^?VJ%zBkbM>%`wXaliia-(FHg|#CmeHNFfG)_x&s8m2v29Wgi%+$kzeA)bRA(J zwMVUOkPN7exJddiDl8VCekJ7zYR8pe4%CL$!eS$PpV76p=rK0kf{a$E{Ogh?BA2+v z>ozn|{6Ka*k^<9UW!QRe*UL82M2Z@g+YOz7eb!c&I{!)f1l5xLmvm(}o;B)B1YW%b zP4mAuzpEWn%4ivh1DmHq`H!L5ifd%pWvR88Zdcu!)gWT(1an6{0rV7mtDj5ed^IgN z0nNe|+IM>GQ%YboA&#rDOgc8M{aXv!C`G1MDm64|K3I*klkkXFgC2xXyrr7#V|dN* z(qRV%=(oifRfyV!z{ZpX>|~YbB*h;kr5=4zTmLq3beDgBD`Zs5G0KBe~LL~q_3B; zcIBmbbcQ64*ym@{wU3!TI7poU7jH}sc5K&M|A-Pe-Ja9>ov=C_8k-{M6xjbR0-2q2 zRMuG0-U!xwORuT_6S4clO{WE&Xu zNQEoutJm2$gX0hwg=Ex6Yi(7h!umd8*!=1L`*fH z=p9{Eu9d6mC6?k|MZx)(>M_dkkT{QG0ytyXv?H8W)<{2YR1>WEnRs)H9QgwbYQ`99 z#yVC3ut-IeU2(|wo(nWzU)X74-TbnvJb_*u`Ee#r;WYj_hT3Ve-gI=|Fq_^nr98f) zK|RPLu!EKu{#{voVJT zK{^(T*}|@SI?ZeE=DQ-x?{#wU7zaoK&@Pny*32@fdh2|m?#X*q2!I{WY3oZW)Q?kn zhuvn{(70skIIU*$h%ZmCUB!VVcWhbcw@Zify1B@wS?iF51YgkK@e z|7jSIfgB2`$5ArnLyVa;U&2w`>Sdcelxn>N7e5->hfQb5B}N#PAqWPpuesl1ZJZd_ zdhb?mfB5GIsKghaMQP26y4P{dcF8aVH`dGZGYbsjUu>iEE5{%1hI1sZ7srQJs2#3< zV4JDZ%S$ZT;CpyJ6ez5mO~};;$3&WZ7i_XZ117qJVvd8;pSqQ{P`2%Pe%4R-MfacO zHmWSx3PR0;`8!kWpqb`I`3qWoldzVqEQuvp{zqk|5KuY4eXkO*3(zA*Ymo*|K|PQw zi0f_mZ00HcP&1FbFq*!dMY8R~hj#;)>#+D=A}&nnJx`y&_gbB^*$J z4L!qRcitXq18XQp&55d=zsL?x72?LAL3 z644OXZuC@)Z>VK~M?Kc#kxIg)PmdGPgu7a1ddE9ie2jD-u35&CmObD1iZ z1rCeFC$sB5t-5d&oN0ubv;^Kq4K7u8H6LDc1(49H@w{}p-8f6bbMwRE+M8T;V*iYb z`F&#x+xuv7yY`}P?YyS65=|Z*K`CuZN6cSn+QNcsy6_DdQN(EdI)rdqCqgU{;dIG? zTV?VgdK)iN+DLc%(|gL7fqeGr;MU<4P^@jR)KP>C3+Qa8R7<)ppy?cpvI9Vp1O_eI z;&>kDF!Q%e4!Y!h=dS{6I^-utKJNA-ad3BAma0B>ES5}6NrTgvro@};BYw!F>LCZB z22GG_SWL&hv1zKp#VaKopc+1@j#Mt=QkI)}ohm=pfjF36gW z^24wbz)(h7DmVwhX$oH3dFW9D)X9c2KlI*C=uJm7UyzP;y>{U+_><^Sa99R~Bu~nu zTh!F4ky!qAGN)Hxnm)sN4m;he5T8Q7Fu%ljvHow2<3ZnVZnpRY$cLFV0L@JSM&S=1sNTb{N4y)VV!heMdN^bz1wEG$#g5+aVNW zC#cZwNdT0hnmn{yl{X1Hsm9(19knW^L2JcELor68c`8i_`(?7~K19zzu4{9i*8S*R zj#Us>PR}zQ%RezhcNY?0ELn)D+)iQC>Dr%|7W>3|_pSa869a%qkH_JBCuX%(Ywy;p zJSkVM#1+&3t@~MQyGPbM+pOm8dx93p&f8v5-+f(U zk=s8Bb!_G8M!LZ4;|PgVSeq6m*P-k?nprc*R1#rYXn{Du+L*(z`&0HoP8Klbt^!6~ zW@NxCaV1=&RcFMQ{zqH<5~o4(qEo-f91mDbrNqk1YI~qc{GVE2b3@$?Emindau?TV zQ9<5DHH1fk>L}xo)Nh^8CGaX7#+B5-qA->fYQs+Q>Ay)Zns9UGgPs^Ntn>9GJQ$!`elPOO@nM;Eb8Yo4E>@j{Yg%XpdlX+ z*n{9kNAFd8yIzj^TmohK67HwnttB0)b7O&cCZ8q`*D>G@rj0xj=xV*k2x!kwLO{#| zy^)r(O`Idag8E=R0NWbiyoZh*kZoc@f8q z)(i<-FkcO=dG%-fNhdhQ-zIRuKES;EM3N?I?iA=p*DS9NHBUty$I@7yXU_CDja*DZ zhe47(PpvxEue5?8;ujnvtVCSI*%717O<|fMw9^J6R*3J@UKF}7^Ylnqr9PR(C`8TJ zckXh6P6{Y`C!;J;(tjdN%=<+<8X#oE?UKyc2f_zyL*R4Sc^wG4hkQ@41p7VQQwdb& ztQV4b>=v43mz3ON#RTM%0>mx2vu#lF2~V;ROcswr9>hhouuFSRk4QF|$jkq(>at1& zDdP+#vhwkMhv^gk&V&W`nGH0NhxMNB?d^vX&vb;H6LKS^kz*fa8N%Fk7QB%whTX@m z7HO0n??rQq!)CaJx+Cal56+&A8KI>EGf7)vcuL{R z!T}@dkwt@^_c7HUi4yv9i4Oy)z7$tB;xhAKQzUh1l5GSRgF)Y%wI9c4BM@i6c)6z{ z!i%)Ff(x!w3U{4^_K5uCxi%zo0Id3sLt^u!`1m$rc_YEpAY*ASGgrBGVDUx*=6ST6 z%FSw|4aBCtnT3q9-C#2aQ~`md~ozmZBwqB9CQQXZh68vm#-U&k%`qlry!68m5T0^-?`{ z+V=Uj0|Dy2j+mN+8X~xj4H#7;xGc6O+}X^{m-3S1WLzQ>kP)g81j$oSzJj7hxT)QQ z)L!oLw;WAFsosUtEK;fqhbCog7ji5Q0k@4leeAu%dHctuX{2AQxY>f{7SH|r63`0! zSvt0g?1K^h3lxl2RZKAX_)h;xJlorGumRH_RcM#e*-FKzfd|c6vyTw_ zDpsm2$9aL}q-M3~#Z0IR$3oRxTM8!(L$%Q#fo1?S$qEw%rv|Y3nI1yhzL4ufG@f~i zJIL06Y%yqle$*?8y3up`2$~I#xKM^q{l_xz|H|H>Y#B$uhcOiWFs62cNszZIRwvQd zrT9XZUblP4r#Jo1aT`%*P0K0b75n9jK@RxMP02JZ9(H1W7U?FDcvdc~fhfWy@Q0Ux z?`MvrARQoxQ+74$s{hG-DIsMMv@50g(Tveekdzd8YJ(?0Uljn=gr4Eiw(>vZzx zSO97Rp1zpjhrU@RbUmDvSw$DyzfXg0()uHAWe$rRfTo0lMrq*EhQUp9u96 z?_qWBWIQainfs*5C2n}u9MQatU@a5}WO@Qm|pwpa415N^JC zu*%Hi$v>Cqh}WRU^J`9g1)lO1|MWR~^r}|2z0tLnQEyI0V}HcN<)XJ}@KPtbUm>NN z%Aymb!Jgmkzv~FXPUcyKzO6^_{DxUKZk7Yb_`$`AWhVSkHw}ztlken(X#CFXyIIb9 zYui{4tD#J?EwT}?_$4rfPkkphuf%ufg@rU8VTrh1`D6EJ7TuDg@ZR&XW){&Q|E`{z z6pMeE)xALApEmYkLA7|8&o)CO{YIV^*odYv0W>m(RAOOAVg`f0+b9EUz}7&FBOn&*B)ut-k#As+q5mm%Bl70;1oz<8 zWosB3ie@{nYnD$FkqTqIzYN;&&OaG$*2{UrQd5bX+!d>o{;r50-6c$5}Pk1PE~qhx?aoJ`0R0nq!2Q2mZ;t%IBm zH-o)l`Bmm#F#RqLIr<1bC6sj83)J>}>3yQ|WRgtw(m`xZhLlj$8HXm+=IO!p>HLk> z#RbZVqMwY*S&vqo6G!@M6xrJ>gMIAwNQP12_yppWlePVu5p1_q#CodJZMntxHTE#K zmD=LasJHp{GFUptpy;!s=xK#*lcZNxmt!NklHi=cZ>;W%%5{M((L533oYX)!hutS?zF z*=o;3g|6bM>c%ySK-M6CrcP2~(sa2Dj-DOC<8o{Yg11%Du-j8Eu|2PIzp{hy74Uoy=PP({d=S zk7N%*ZUahuZfkG5%+=qp9dLn1B8RHIz(tjU>6kX<*BG?-bKx=yZc@XH6UZzii+B5k z;AaSYlba)3gi>d(0W+$8uL z0Q(-ZiPQ<`mQxLlp5i;Ko7uJ2Fn@Fr62KAo+_W!O66R<~PTCo}x(v3rt_W#R&8Dr6yhv!-rY0i$UCgXIjZm?&JnIR`WUe3C=j z;e1XV8`awSq=A;kd)9L9pAF)8`mVaZQTv6Y4@un$b|A*)7Rf;}I7audJTW0xZ2Apq zEgD$ahOJ^d!?BCIc|g46_c@p)X-Wbs(rHNxe}RnQ6M8G;8E(f=^gJBxdIp48V0{c5 z(i*ibI`W|?ZHH)%cHd*7cqAzGWhz-6R2Oz)2QWk8q zXP@%z&=!T3*w5WS)JBWUG3AxK7ZKdMy5DicD>ZKlNY4CXtdMw?XB*e}XXS-;tm^ju z>qq#^uu%Vhg2Tue>X78OFAE3y@hzhkaa%tNB~i57TXq>QX@;sXO&xnazl<&BxDKJ}bpraUQrIOgJYde4*f0!qzYnoGwbIp|=q zMX;X%EwBJmsZ*v?wC17;@`?Vl9tF;r>|FDnC&f%v3v5%DBG$nH?+vZE{gXb?11Xo* ze~=;&kVgK{pTT+sP$+cQ*%IIEN0UcJC7>FFID~_5`F1Bp%Be8Tk3?VZP7N@}FY+A`?V1Wb*I>u!FK2_segi$KEi%nqE$*H!&|qD`l={mM;k#{q`s-+YfDv(_1M_kJ@bs zcMq@$foerT1;UhI-ofUu?~kr_(#<6H3P<4B#EbJ;K@2{9UgXJ>?k73dlCBLF7`cUO z!{?-`f2;qwDM@gCW1G-kH7PUMWM;Uk%4n-2ejhhkB~2pX{?6Up9Q4o0d{o+J%XV=p z^HK6cbdmC$46raxaa=JzKWc}e?g8M;LU2UePmhMZ_^8dmMO#PZ5ZlxpzSyBN8fw9% zh|xp=;?_MkXFnJ_cy==E418zd0VrCQ{EO)QRGr7_*isIbcw1D*OrQ^%LKMthj91i5 z$%0P|==>O{e%{`pk>e>$)F zM24}}CVLOtMJkdC)R>tjW{W5Kr+=9S((?4{xUNx+;%Ek1ga|_)Of9JUzT+VU-FcnQ z9ZIHOv{M|F;A7y)Bfuc6B5# zD`1WZaBEWy7FwA`^{$-u(_pNe4jz#~?X79|H)>V;-DZ1=vlDqL zIX8dwmAp*w0V@Efdtr6%wm0%)JK6u4lf2Hk9yx`8?rGg15}T_Z@d{aOdmJp3@YF0} z{C4vp(gf752|l%ZZ|z-^45RQ1(aG8&L*a2Kt@Q^V$WhVLDW8t}n=C8nN}e^}LJ|6v z3N3UaxTE|aCkcUh*YE7Kmr96^+V2~eCdns|wr_xX-bQ5h{TNI4m@#M53x3)t;*XB= z{;t6KfyDjCE06IdqOXt50s3D4NRb_$!rOc%BAFT&-;OCK!EZbwhQyzsz}4Z8He*T_ zWW=F>&-lXdE=PCIirjrP&js6Cw9awySO&jiz#+5n*$~s#F6M3SRq2e$l*+zPI{{9E zeemLh5yi)jrkPt{&8)E`*WK+yus0_A5PyUiD*7MCO)E?hdT8RG1x8EegC~|8F!S?_ zW9N?A!Z6%M2D`JI8KjeY+_Y0)^+WCJM(eo26@FwxUJSg$=v<8b<2yZtVjyI%T-#d@ zS>x)b(*lz&4M8>!ms{&SyCBTgev5p!G2liuGOePb8WnYxnKIB6k%uFXfY*=sg49%` z@bm(EJLWr9qr>0oMoreVfeskMY2J9|Sdxf8l{E(0VE&idG4rjatIu@-7zNBQTV0<2fRZ zxihgXl^GyLe`zTUj>`fjU7lIoTFfP?)hsTk^CY)HivZsptXec;7B$$Ah4XPBh#npR zSHZFD*;P8uEUFJG_ZedK-sRauVIb2E2Vo8q{bbp-#(Px`BuNa8O$DKK`F&r4^nJ61 zC*9q=+K^O3N8{ph>E(M_QrZ8nZ{#8ud*BYpfn($Gmi;p{oCkB zr0s)8iy?!Z*U&E^Z*d{1>}3n-4T-B+YRMxwz6Fop!9L|%FPst=2?cb*e+(=@i(OBC z-ucs|N*$WK^UPAF6YWlPe}T%h(xC@jS>tR#2f_Gyn!tEhGE{R7nQy$EkkjQSb#$f3 zNKdWQRUp%9eXB>53uZHg84#<%BL6pO%-;N80&8vgS$}(@7w98xc1b=UZsdP*3S`l5 zyWqkoFp24vT1`Hh( z9y#iy6GDh|g=L0y0Wtr~gL(%mW()z&zqP5(yd~1{UjUGFN!KE;1lcuW{R-C~`Mzz{ z5_A;i4mV5h(x^x4VGm{q;ix{?#MQQiG9G4ys02@rz`P)anVus82ijtn32f@suIn16 zC#=GxeXJs!D+GjKa-Qd&GP20aO2QO|>qM&b{SbIz)F^=^|L6)ysa`i{#x0POlbiQ7 zyW@QbGrlCbe|e+mtu8F*o(P%#jaS#VLacdCJ&fPwE^DyI-oPe20Od=OHFV>uHHKYs(7f`7Df9L;9P;zXnb5LAB#pP@w=I!>TvF;Lm*ueP@p>XR>R&{RiNWx6iT~ zvZi*N^G#A|sP+&uY5QydKV zToVjQd^sfL*pds-@W{CHQrJLRRT-qnxwq1Ye|68ts^t_XKqGW@8aYs338c)l+eVjZ zDV4s0bre$YuF+om&x-rO-PB%SJ^ZEDR8_~eo1mfr9^No0s`Qw@+O`l#+{-8J;qzOm&YkQ0XDZ8@080 zf0DHfE3t+tf*kFG#tzsSY+0&vO|W6x5W}E@1N`2L0U(>fs9at5_6+mOmX(DOFxP&0 zLt2B?66)BAD}8xnJk=b0UtscymN8-VC4=>)V-CoXCl=Ea#s;C$RhAlQO zGtDKkvjdRaFGsK2f8=datu!8pOIjuRAYebuf4u2UEiD<9OAGOIa`X5M>Y!FR1k_45 zf`C~0n6vFAf%57DNWf*?QRn+WW~HV#cUM`PBweAW(zBG^*+oGWl$s=k3_1SSWgIK8 z_y;jGXyrK(=eS=cCSsy%{8Z#Cw{tDcyqG+Ym0wj$GT3iUK>^j28Vq} z_z<6tY0kYe)wqNyN{7Bn6aLt+k97c1LKzfvRR0Uw(Qe&J4xx3A;d$GM?#5CB{bR^5 zHu;xPBLkbC7If&$(wL(m2Uh0KBR^>>c3!flqD1S7VP-9UwaKl~ojlt=p<8uajoZLL zpEv$;V&X;IX3*(dlh3Sj11SxmrQihM;M{2TvC;B10l>h1x|96^-uPK`)5y8$ahHLcMPQ7rfG4x+tuakWXL4kGvc8vs~w< zs!fV53X?Nqxh%m}i1YT)&2MRX>bU6yo=)%qq6axaA|Z2@Y2+n{_@V0=?cINeIFoO8 zULUJ31%AOICY~cE!WmR__&!HY6#_7}Sn74|TQ%M{E!9|h1Yo;|NR)B%GpC%_iB=*N zp{ygX9iy*9cUn;_uAB{06h8L_u$ti7MYt^5LIMl$eRBB2iHH=OzZHry(;P56*CbZ8 z3$64rGQhT!I3HxU)^fPc2{(>zvP&WQh~XX(Gi90p0JSfwb=4 zt>A!9ObpfTKP22I_<{yZkmdIYdw<>#((RdwSxL^EHo>TgUrIQGGETm++ytP{_LPxmxH9=tUk7)CD!f3_S{XrV}Jzz!o& zI{YXedo)y63&N2aZyL4nU+F+E^W1{ z&{Su%a=d8bgwwq7^z4@%l5x4uS?LwuHnWAb;>4H#eEAP<8XbkI%R|vHhP^;vmlitG zw33J>(b?DLrZ!_%&{o}Fzq(kAobU{h=X=K$ZA@R5IF(&sbHP<1qC4l=7sO|Zd^UeK z&DsFEkv@c#*P8TtV<&RTJOj+NZ$xBj{m#i|-Ag?^Wx^wPv9FNh2aK2fQ13(_`IEEx z$=KNN@$O;&PEbjy?|ZhP7zIUTZw7S0H279&+zECxf;IlUt|IohIiNN^lL-9goM;Ck zSjYKG2>V(g>ea)nOZW4t@086&P-0qM@L*Roq(!K@K^`&*!B9VyNgjq>kq8!U3yu`V-}fTZo#K?P{BF51L;4L;G6(X_(qdg*6p;>m`-8fQh4 z)pZag8hlbiZ^9s~?&Id3p+7Hq9$>qqes?e(Is}Xy*OWJ1SxM?cNRu{6R8OVA77#fjUdi;kVP_lT%Sb13ujW?*>|AOQ>ZdjuU=XOd|a~YF+pWSVz zk7-6Fot1NheZ5tqxMShPW40jv>TySjPQL5iU|UgOuFFdR**6Q`$Le6cl-d1G z#PKi^Mb)kX?2@7I2V$FN4W}~I7f|%rBj(UJ4wmlB*yj6*%zw-0vVop5sD{e2@@qJ*RcyY54)=wScZd0Y}lZfv z&3xEA=TxOdW=Qup_whzDI85t4GDIbx&SAV5r)MW(OQveP9AM9y5iPZ7_8jZ0dxSB*QGM~joF1L2iBN7s}+6K6R|4_^)-FkbFW=>MJnjRjWm z_wGIQuQ3h+mvWaDwQ4NMlQNH=2^|3}+7Es3HeQM7H_wr$(CZTD&0wr$(CZQHgz z_b28D>Mf&oWaM5eceEh}(AK=`nN6XZ)(*6PooiNvL1uqzrSe<`yGuVq9VNj*tmS=6 z9o>^(o7*CnX4NY@GC#u0cY+_f*fd-u8LyuEyJ`Z8#Lxv#$iOW>?Phm_b*K~yg3rJS zUc=$Ak=xxS*)znUma<~3TKvr99}-n1So(7- ztf?)!yXV)1uRWhMTTeql3$QxuwzwlJo+^$Nio)D7=R=Wi(5_Ag1kLj9uV7~qO-VJY zEHJq38jgQhN+T8QnilzSVH1w`(40zGj3+?=gWM8^R1JE;tG0DG{v zw#tbeulsRJ#iZVpN?`oLf_=jc-|Nf3S^uKgQLIWd zWEK$5Qp2el>31ZUfaas(A^{ML;cKxzv!4B+g1h?};JZq4B#04U%o80=CB*7g$l1BL z#AbO0{Ay~lZ6C?-x-uad1K6!T{4QFSol=GmcJ7db-hu5crU{V)d*R-rG2VPDRf^qj zIyH=j$1;2V0w-f&;YE2E&fxvy^k`?HbgUP*%E5~XT+qehzAIz(laT0He($^;(3W;{ z>=aR6IdL>8=aPu@dJuj1qVZR*L}~nRI54Y6VEB)cloMA-o`yP*8#E){EYflK98T)2s158`vx%@#!bIOh`=31J_k6zc?xqWXuEhY@p6p0QoQy;u zW7DCK%F}{-<*GDp&An?p6*1Y;u9MquUZxh8E!2YizsE*`JxJ{vpQ&N=UvgW{t1C@* zL>jOgV}FY|1rOgZr$dBj7~$Pef=|3nkRa*79IpCK z%tijgmTWOP%q(iGvO}g-d$}CkopOHDCQU82JbfuuxfOFU5)(e)EKFHtYXfP!y9urw zx3|C(^ow-aA8mCcgmf>hC^FvYh|@?n~Pfyyl=T<|Z{ zSdWqr(_Eu-i^P-q_O70*jfZ>vh|aaD*2n*mDxZm(c!w@jPwx4*3#U5n5 z;5VJ*!e9PGPC3HAMrDnBOqf=kqm{?P)YMmwZv+1ls|Zb99e^2i1))J2N4@sJi}rBB zzW){m|KZfc>q&)aIN=DggR;RR6Rw;&;fX;}%OqyZcN7NT@1LlL#kZly*N1kiHo=se zgoeykD?Q#Om=ERL!V-8pzEpk*N%n|9{<8*xgk8x*tjIS#Eb{~lH&6KI*KCwcVELFO zim8AW{OW2KwW5u5%|Zf;&-B5zTE7n(E5il!N=$ue$|yGw49I+e*-IGE8kDKq$RCE4 zeecY^AT!@lr%pShxQU`LzkLS+3C&9PXNi!XPvp}>u+GnI1Aq08kETtyjj@E!B;<@M z7Ukb>A;HplVqe5>JB6*jJ@3mU|u`lks~aH7LQIx5;SG zf^dVbq8mVWxgp)gn>Uyd@X~o$QZj`oi7h;Q3A^ooq=VBguCrj5>R_a%Mapj5UBcJ3 z!MvWsrD{Ym*LzO!elFme_nrHGXR0+t{G$vENsU<8}ezxhKjZ$jVk6sgDI(Vm@BaBzzV1M1Hh?npII2u+ikO( z#|cOYbWo`7uAt5Ad&9!YY@y!sh<(F^CsU5P=HG$UY)I|{aWKU=G*ZiCDXX^JHRHjw zO@(=AA#bx?KPo3<9f1cPkf3HS8<}sn<|2G+N**~7Qmcr&@Ul@9LZcqW6sn4 z))JWB*)PP()f;#~Rml+=-n1v<Fj9Uq>bqJ7g^tYn(%RP=&8z@@AfuzT4KB^&JSx|Y zZz52)S>aer0SdQjFwbVKIv)yiOTmfUXL{jZ`Sq~|GL@TB&&4!OSObTVgvA{B?Sl10 zy!!W&GI^3&!E|+Bf8H@v6~7lS(0d0Z9b47fVm1ic=3JkA;>zA7SoD-o(#15v$M8kZ-X9m!or zi|Ful&!@Tj=}J>>zR!VEYHwx92BSJ5i5p{nV#yh>=3k3tTHM9JTd5%{EUq2%!8o`A zj)lETGbekadOfr_=E(|qLBW_9-YcDl*1RXC)0Lvoud*?6KODG`?}-0^ZM@x~qn zUXJsSLC#drNSNac@*X}F!bI?;;!+%Qms+?j$`Htrs=1xz&1pukeLffHcbaO-D&a z3vvJ9Jjq$})V`C@DB)R0Ho!wbN9c|{)1-B9P$`Eg^6xRLa zt(SHdT;jZx&oE@xk1m*0{9Sl1e5uz@5gxP?22WWYTW6X!}i?sWgp z*{zGdHCH06Ezh>ldTkwklarw0F*G`qs>(yeNfUzKhzsE{opwA70F`)$zs{sMdfqj95Rxe?wbiG)v{optgNYp>7+n zYLEZQ788aRvinAx7uC@a$PTa9R!|>vqqQ63vdThm4gp{lkPf8Yar^!yz_jND4HJ7!-NibvZMoIgKaw8CI9ZNt9h@fyA_Q~ zwLlJTU}}ScC3(yNfR$I(Xr3PAzN>{#l^x8-(0?KoUBgXSce?pmyN(L28ern&PPG4b z{)bvg+R1~5LaDB53)=n55EptY03r7NbC)a9##D?`>u+}V<2DF#yil&Z`*^_r$)*DW z>l#@kxv;G({0#jJ%YgSivg1EPlHSGD();{j!$32c$stX|_FETmF_*=rnX011*~DDP zBZpaLpau#D>(NCJ*(~;XSwG+>qVSXsXrQCD-xpk@p99<70|ep%$dLBE`Ke>H3QJaS zq9=d<#DlWpl7v`AS3CobE)Vy=-cGYfxKvs?xKyW!vO}QQj!xkv%(j(K0ZO)`pHkbZ zut-pW2k6w$_-f4qVA<$FMP}Hg9)^mIY|sD(e^^MG`|(X8VsP1N=xv4_ob;T`%_5*i zI$lO0LiL*Nu6%SpU2=4puQh=mNk*5qKFtI*U&G_MPyy?Dhx9%Ss0r2pslP(D18*I> zBJ=Cl(p7HB?>V)sk|08E@BSo&$D#8HUJ_Vi8K*E6^L*qF+|MjCw`uprl?g@M2?0cg zpv?=mhrG|%i`)TzX>PRaTZn+Tb(CIM-A1xz7RXL$KGOZ`eucpg&W-_LA5!o$#3h|S zmGM|Y55Ro<1rR>e{P`#~<)fA=9$P$B8uajt!^)DsnZREKK^ClE|S<=swU z8zp{a5T%qonX=Zfq;OubN_Wm1lF@X?JE(}krf!0|_a}>ab^fR{ZKHfox=1ea==Py# zN-@eqg_@t7mCy`}Yv3<5M7wRrvTYL7nSgsfn^e0}^cEoC?N$(SIS+DDDvLDB#aBa6 z3qaC@k56@~Y=_BzwCWq!s_IvRqE!*9xX4Wz_i8KqC;s#}i+S%C1eL#t@Fa{Nk`30N zx2?0Q!kVUpjLA_RW(BVQ3j~P{%PsTO;$*LM|dKR zV?z29S1%`?RsO_Tc+Y=*uB~N5MdF#_0r%o!12a$dg?suLoWEZ5#Fhe^l^MaB?$Y}0 zKOIWw)K9!( z_NH} zq0+jRIq%K=?Hqi8Hm!;(2Rp!WX2WElFCBGyH|i|#f+F)|!j-ZuQk`wFNg7$Bq3kOT zld)T7`&6QF0buyhI$_{U9Y5VF^6-!i-eXes+?;h7>erWz{}|s0q1{n?6x3eXyaWI82hrm6Jy^G7PSwH)q&X@W%RIuL@&=HC#CGPVq7S3qR2R@6ry^RoUMo~ z#Ln)T7oMO+*6^0f=IW;=ur^^4EdR&JX{m3yz^cxOUMn1sN$u=w@c=t=EW~2=VFHdZ z6i@7x%(K3TS{rTyC=guuxOT8yFvt>sUVHxzL+ik4CZRS^E?NPO9iqL`pQ`RlLPHTk zJD_Ex2Bi~HI&B7FhlB(Bgt9$xNTl4lPqfB=5d2mNUP=aPb>S%~2z(0sjZfOoD5JX2 zq(DS3qnKVdsbzqj0lxdOOcMxL?wAAt^HY(nptrUnB)p~DdG!Q&@h2+RMI*;LBhWoM zB;KxWX}`BE_c|3Y{gbqwt~k4)PLO)^1Gn115k9e=*Dm8`UqFNzE>-BjpPw*9rEf}$ zEC^skLg)SyK;N1({+vE!7xYTr3V;KT8X^0Rd%6uu$NY!NmjFo;{=~aBkngsJ8H1EE z-H>HN-I^-hs|JdBe%9KD*+p@Qi(F0>+`;kzf8Z{qmE1U{YHH74wT9wRWT#X>-!APN zKy+p*05)fZDESz)V~*@#!V&(ForedXgeq;tzQqAiA4{w@*Qy*VwW+buAA)Bb;K-mp zVGfWepvaO4+RPb<3L?y@C#6o}FfZ!ty%t_6b_0YLAw`C~9)NVmtYl5g_6f92Z)w@P zi5)8A1_EjbT5_2`R;s0}=~v>6B?gEbozuarFk7DV3j2e9fwS^LZE)v~#HN8tLnFX0 zZA^?6A>+)(3~S|DDNdMcXpw>Miay2<5~_Ea|6v`r5u3x&tTd00GKBU*12gSJ(Xm9GsHwfMD*(GczHJP69uwhIol$+!k)egwIx;4 zGZc>?ZD6aipLV}92_x*@vt^EcPDdSs|B)e43aBIe22@8_2=RBctIs5bVDWS+TD z114TWt3L%S-;bU*c^xluhWH`LuT5IWGOMeRmqDb>2)k~T2zCkNy;9C)_e(KcB~}Dl zLl(f%Z|sX(Ezo$YS%;I|mu@q&beOzD)jX|ux1TaNQLx&(Ux4FN5N6v>ZdY03<$${% zbS1|FKB<6Y6Ct*hs`NlGF)C5=t!@cdp%@|WXitG7mB+pl(`+Kv1o^`Ga>|2hlk;#2 z;8&%HSN8|Y(e!*$|8h1NVNACqH+myfjqqYdLliC?vBJy>G}RZm zWvOKjQ9IKyN02n%hKT@@BfX)%I1H+FjUcmq0!>W5hV3R0X4j`f)gT$F(T_#=oubR7 zjPSVWNh)|jufs%H_y0SUQpvc$bg=$o2D{YW+OYtyZI2$q3cw2Xri+jiGIPGdyc;`; zK$*!5Xjzb26y2?dAOA@B*A-Msr~~3t{N~(Cs$2>q&zTLEHxB_JA44`u_;tou3CThH zMfuvk2yN#^h{~< z1Sym8yac@PaFNhKTwL(;Y*Mho;S7zp@0ln>iGqsw#nk*~%EEyY_T8f#%vSyH-j#zT+ zor?VDR`qOJN}uRYtFt8Z_)42Lo5WI)q4gRzP0dbC?4LUlEo9e@+(~BKR)CCuM@ITI zlD0i_^EB&hebD0|&Qs4{1q%!IT#nC7yb06CZ8u&-pv zxd|i5o^g9uo3rh*_T;A$3A4<*qCDwHWJ>^Df+fpptWuwW2M~@>(XCYM`Gr1+K0qp} zX(5zS_w@I*FB7-B%nuR4=|a*A(qB2M0C~}Rs4J!Y8|z$kZ`J&vDYE5P;6JCE0{m2D zLNAy^YS~kI$4%3xEFkFNwI7SSH1x!X>ewJ*>BwUY4Q9GC`AA8>2nNS zxCH`uRSHcG*D5ydI4O8{`(n+o)kn?I=yD%!exNZ*(AX)Dv_Q%yr(`%zj54G6#R8)L zeC$G9<*yhAh0zjzM5HZ8D2JlLK$f_$iDqra8Y%#;S;i;Ey}8+=MavFP$Q!|>_1Coh zM5#Bs@Xi?sM_@4fyiZSnM=}UTOsD zWq_G#9{u`5sF*~j=WY~^UR{!4K<{L-lttmoVk9w(3Lxr-4(xiM3AK!b3j=z zqwm8x$5{lV)P|)W(!j>nRR37X@d6Y7Pci8W6XcR=4-mA+`=ZY+`>tFSbj61b%Khvj zS6cX0raMwGAE~+y1u&K8wS&tAWWxNx@UEKTly= zjL*en1C&-;K|?v|E=Kxm$qI(i$ZaD0F_p)j{6;LD^<3P*how1Qylco0&)S+61{{17 zK~ZaO{-P&&Z$_g?N=5z#BBT6|GQMyOK z_Ykk=7#+*X$ulkyX4Pf5impHL9#+2V4rJYlH;^6JH_byN-OMBJ11th#25?S&;Sj6* zo1!hLM2U+e|7Tly&Le<1+3&or-@J$#NcKq}uzojp_Asn_sjLbrm}h;MFu3v)6A%kr z=!0aD`?tlif=oFZf~;GGi>XwXRkoHV(4gE1HH=`@!aTNYGwj;S17;^OgF_<{m{Kd` z5zuNkG|NT#fYH5GaYs%HQNdndB26I_3tKPcvpY;sNL9KNZ;9#lC6x>u9BWIof`g7zy@!hn z%0S&;^*~U2FSSuX)E$G?^@SRqWaBDp8X;s!z;DwjZzLat`?0SK{H{mRoq#&`YUcxg z*3?cHMS#;l+}bz9V!ey}N1`@#icMIr%+GQXzwQ-vFNCpxmxsgxQc8&7J|>c$&54HAqaIzwh#5-zZXrV!)-C_NB^m@h)3a^Y^W0GKGOJZJn>&`t0=hAmR z3^KdQLWj)uY+VN9XL$06u@wwGoYRgu8#&yY@(zug#{)BV1ORmG&r2`K-B2qWZEa#B z-}HSYJ8n)>M3SZ}c(L1pV{`etd#&R>utl;x&nJDJ9~N@7$Y7j6+B?OQk+2NhS|hyFiW> za#up3(GWq0GSTpb6G^MwI3G7KjL~?m#Tb$2^}O37>S20+P*!hnfl`Ll{GbDkK!ea3ks)9P8DL`~(QO$JP(8&K?n>FxytH4iF5^^h#ZH3TloIed z^-}@$<*LnML`5{@WQeLW*ni&t-I0$_I1e+#*4`7H;c-4c!My^eNjgt0+4at&Ij|N3U834dIr#k3R3NT-1kead&qi9<0n z*hXEiQwI5*jlX>UN|kwu3xQDl_IzVhw{8yoK76yr2int=deYAwC40z*-C!&PjN1~6 z2#)4^9R{>}K6g&(ln20)wE4M*SC)JnnR*Hbsby0ts0b-AE1UcbpiMBk+ilFBvI`c3 z2XMJA#E*En@jU&g^T@J zyCe+1$IXQer?%;s<|3gg(|-Bdh~+Em_w-{X!G}R#r|6W^A!p_W=EMB?%(C@BsS9Q` zIEs{d5Xn5?3%fk(+{`K?Yp9Wfp4!J@nPB+(C^0C%0qt5yk=>In8Se)kym=#t(@!5l+ngGr?=j|8520p>{~)wOuF&6T+we|IYv6ZDI4tw95xp=y07_L|1O(UutoopQT4hQfEMSpyF%}x;sW#6MA^%v;0_GfGF z5)v3%f}-kOAiyPDCRH77UsP9D>^mqp1?|8Dfva8{t!Q~u$8gJww=1PO$6TBy!LI&5 z=ZF{4)Z(mw^@HBpa(5uE2yjQzwmaphN-UQBre2Kh0*7+GIe`hyK4N7`H7TyZ%=RE} z47BfhcJ^M`CQhnhPIlz%PiR!ns{nB7UP3HUk`B{+OWiSj^bDpoN%TNu9!Qo0yGK^1 zq71og_AAchEg#RAzgZ6ZVQjXlL8n3-#%Z{M3JH=gNIdN`S9M7_eC; zo;Wec^7;U5%#p;I)sj!8ILg!OE#YO;p_v7&?@7y>xz7DeIxBN`Cr#L{2wfMgvQ*ON zBD9W91<&I6QX31`xC?q}w^TN^)MQwsA@f-D2*E-8LZ>1m9;~of2@pFG>I$05rl=V4 zLCr)8kwoKaCdD~?&{x^ACcK1LZB#IW6MW>enbOVZN!Zx-Rex+n_EKPI_pJ36VnD!IzeIP>B1~alT`KJ0N*{$ zA!_xbl);(StS6zeB3XU+*lgiJYhnz%z$g!!MHyck_YG|UAkSN(0=n{Ob6|Y4fal5-OLbs{c?0dBhJ`)*etmOd6 zs1+cY1Zvj6cI*qHa&~J0;B#$fs!)8rOUPBoMW_bIsRHVKaQ}(15t{mfe-9FRbz1b* z+cYIszmJBzvfc*8u!q}l%JG`>L%)Mu2}#3TK)$^ae0Wck0AuGR%RfR5($Q4Y96dRO zOMT8?eNpOH>0TixxpNEMW>?AncF)=>Qm&==Kt|js3(yAo5FNH^;urdwlT=f0DOYW2 zg{cLD;GW<*;HHadv3Bren(9vjbncnN<$E_>noCEE;sP;JVG0tdj*{WVH(3zY$`?7( zmg1bY#pele)P~4qb~BnYcQ0k#cI>#MjyB8bc^VPJ^^X+g*0T{w4XtmZ7}p<=I*t_k zGbFE)`~glO7m1mre{grge@DH-xU&MjOo=TCl|G${7HZT~C};los((=W_Cky~WWIs6 z@6PEQ@tL)2p-1KWTl2qi3_p*LNWR~xG%bXZ7jz|=4t6QH!~mIM#wDT6QYsr_Jr}^a zs69C9ylIS>l+rt4XH;fB{uYjy-PpxV{h-%+_PekA4YQj_gg>>6_P^~|88{h4GZX6_ zU0?N}ko(=ExABXZ@PlSGS1KK^@1%;fZT`OSu9K%3?Hf{$^d=~$nl2ao(vT>tjDc%? z9~@ICiI%BkPcLFjb!f^hk$Sdd`VvylPdZcdD7J!d9y(K1f1S+2ML@=extKLhpj|#w z0uk)~%I8Y(0}y0zJ*QScsR78ea4~NeE@m(Il$#Vw=^)c~hgz*|3t>GXq3hzg+=W;? zXPLyBnr8Y_STykhpS@B7?^q8mm@iHeirIC$@6}xMkpS2A95?9M^N=44g)}l|e^-(k ze|%BF{0~^Bj1R>U(iieQOr3}axz{%o;zc*necAyx#Ex~PyC zT^sE^FxT$3(qo8K*DR%Kee6>6kbz~U29hb<1^A6PAqEh$c;lyibaDN4x41h;0w{tA zJtZsW!*jU%Ij>JeBVhEZ#iCz1oX~b_gpOeUhex&-OqdKyK8{;md8k3tTEO}q0Vk+f zjGAFo#*CaEgMtFnnt;JBCKBJj)I=b=?wXc7eqFZnCQ3#X-D*;L88}~~K5p#^_NdP{ z>LYeGy>|#AFjOkAatt->GF26UZa+!UV%|gIZUZz4FVrk{!zKk?X4j7#YTR8RcT)mO z|GsV{=W2FIw(Y2_bP2yW(^#r&&cD9f&lkjlI6zz^MI(sqaW+(=gGWy*;aw;4QiE;M~vh--DPo&UwRWXxCS`DMLWg7wcSh%xi3yC%CpY1DXKc`@m zzYZ9XmU8nxOWC3%faAj^<4+uZ9%S8COr<5-Vv=*EUakF+|39zIK*R>!Fq^p*b8bc?f<3=zHPJ}#Axo>qZZQ78bul^dV!^gZ>S=xpef&f5V{7q zW#z0&-H9Ro&l?QF*2mY9(IG?KM?_I1~2<`_lcdDPv_JTEXFo?j_?w77$@H1j@ z@-}MgXIBuHwek;blK%QI73!MaPa9q(fp83lWqOkPlMp}4 zVht?X4?&sSFw#W}Gg}!dN2FD29B@feQp+?vt6duBR6G?ao77-P&+Hy-AM&{aRQAn<$|-R#C_f$YeA{~(*zX1VY~uTu>vy$0Zy zMLmV96Xfk$wxT86td4v1K-n@~=`y$jBhy1@AaVPxew*}$BLfm?Dm9Z!{f4gnX>gTy z{1FwML*s$1Tg`NPtr3>rr*7Tb0n=+4Hlsdug5dhz_Vp+wTFKUj|GZerDyE$}tLHL}a-Ir`7Ms%|Pk%X}-2BAKx}0Gzg$=$j<|wQ_^Sx zTb)c_LWIg;cFSQOFsAH0v%Iih#(7>}{81KwZS{s9X5zn|LEoZE=K#Bml(8G?*E@=7 zjfJZw%c{HBoZX+dF*jlS_Y8*1IJu*Mudz5u{1(`Jbh%0@wMnptlbZ3~CD&NBnh6_L zoqT2OCD$3wCDse9gj8(iN&iA78c(i0X-0YTLcgOYW*vHj*qRH%RRz}a%jdo)rGC~Y zkOSy@-rLb#i@3Taa|DwUchuX)zm%b~-I2MDC#ipNgjG1r4x~4Id1`m`q+%AV zye~q~(B}-Rn(53j2XPs<-U4ab8gl=hv%NueZ#zil{a$A*&JdjB%7(e1sr*AgaGaX$ zP}e~U)(?DpY@z}^r9})Fjy&4tBtl>32rSn3fft~Gsxf6K9AaKH1NV0kaNnSX?m7d$Y|9Ac; z6aNQnDau+{?XtLTn~Sy>p+^+ip!FEPiNHCYlW8Zt>6d#`rm0F0?dE~_A;dnJ621xW^4%ugPO2^6eNP)(l$`J?{#FvY9tcm{6 zT{EiQW_dr$-3T@mwL%hikdcd&f7_bfvURnScGps9s-ncwL1bdyAMFSC99r z;%e>t3FjhaGAUhw5QZQk)T?X(0GvY)1(fWErT+7){tCgtEXd}#o39OmClP3;~+9!Tz8y94wQ7qkneQOPJOKvCC|4Q-#Qq&w=FnxUW3X#StY42mZSidFRQqCk;h@K9(=+Z zvmM$_B6kiFOha4uQZE|Pxfq<((PAw@aKc^5v}IJbiU3BV_) zxMF1#N*-ecOmN%SuqM1K!$N@-8h^%lHx=>pu+W*8C9gui2ww2~?Jwp*PpeGSPy4zaqSvd!q7H`qu7+>re z*CT4u^;eVA(Qg4H8~`aKhKuBY*-xN{Xcx!=NKXh9eHRCX*lSen{>h+qUd7EhiK?md#O=uHgOtKhjJ!XFu50;Y6>2evUk`J~!EoEKmARKK>{q}&2A{*-kLoU0u z96a#^eKpbXJt5y=VoiZUWj~g!`MOxOLz2kLM?yxFWuBsBs8xHZW-+ef(JI!%Ky+#% z(mE{*bThyivS2YTch{Kij{$o)jklc;5WC)yK}OiC2Cn*+N7`}Qqc^EuO-r)L$GRn(|ZXe~VjTzFAUs{7WGOaUj{N<@Gz!BS{?6Y$?P=H*s%PjrrIvY0t_xpv9} z-_5-WF7`iSSxO2X`5q;SMopL#CWjgM7y&n^hJTkAF_`Kk&J?fwI!!Pa1|o0515#kn zre3$~$8oq2h$dP8s+FEk;P6tOm%>4gU&*KI-4+A5me2Ood!piG3gh*3?d_TKzx_ue ztzSM~DVS70el2>t+_4R|28uV#?5KPD3~I_MR9Lzf%ej~jn`Y#Fkiu3=1Bi)#WmHZ_ z3DvEi&wI^<256I$Oo?=%3tB{8?nHNcfh-`mdb~+3zadwT15vKt{OV9YLvZ<{*W((> zvW{zjM12_GW!4qa0@w7JD^)f|onxiCAzb3Vy6)t9`?nBH4x28)lnXFvX^Mw@`OQSk zdYMYB(C7|@zGT=-QenaXiOmxAu~rToO&^pWi*)U+q!`pvMaiB7p2U42_HG*~U%L+_ zSWMdrNWLTc*X#k0oP;KtGP;G=elQUWy69Sw_=0_2AS z{xY3zSQo2b$UJfPL(HnNrowd`e~yB+yq2VSnIr}e{`z+5|L7#2GE6cOv8sENf=Df` z!}?hxYuxm+{-VebZQ;1z8~N7oM2%?)(>gmgEC|iAn27NBHT}V;r^8nP1kVQH{$=^g zsCeg+blrT6H|t3hOed%S@TvHy!MzNLeKXLHnii`b@c~ zl!TYcQhM))d}InbN0=eMn;(NwNHxEZd9~EUBfG<=ng$|tO?F43A#J2;cux>WAY0+T zc1~&+lQp?EtB~wP)bvYdV{(i|Tya)chH+~!O;Zl{h`*BWqB1zA}(MY6=dFTmZ zuJiZxWg_q@yP*9C;UZp8QS<3$aXDtoY{O&Vq1qffJo*?cYx=~i;reb`65d0ePCD4~ zAKs^0nA^R%{v-+|!kq`rPqw7FeiD_()(TMj%aQay6>PPY(mO{QNF+O==25c38<|Q( z4iiZ}fwjV(0%r1Kd|J9O&(3z29k-~DMI9qzaA=Ee4bz@jHxdM3i$S4HY&`s)7YQi* zsJR2HyQP(iX?)-zwp8og`=dFJZQ#HX(|u#{!qp1<(httn!vTg7a4d3>!?Y#FOxfKJ#gYd_wCI~t4*$|#660ELc zz_Tuk@*Mlk=b?r4Q%?{kI%V_y9!5(6D-ux2d@Z1?rkjOBN0c2ty}!ErrH?abDqnh` zi>q?l`(XHdDDoYYhbI7EC5?iIv4Eqggux+Smdk$7AwKW zF?0-njpRT=W8>9N;{VS7iO-S2L4C^1jew$Cs2DK~O){a-NbHd!08QL)j7U%M0BlhE z<@I!eA@ehhk|8mBI@B(mR@q?PofZ74XF$4Dy3GS*!zTm&jKftl+Ys8U>ySVU+(Be| zJt#u9_pMX&T1}6bhKo<*EPyTTY%?i|Q|G}qgVOGEq^4}B3}#m5`H;JtuRH)nvvtxCfcJsBHOD!i}A07jurmGsIynx>GO4oe`%K}BlUbV z9u`~tZyLufIA%XgH@q2j0gAMZ_Y-N*jM!LyM*1IIQU-WE#wg!DZ+kMkKh#5A|U+EMh8FQZnuFD9Uq!_sViz z(gmaJNY!-NK9r?Axkow<(6XqH6mYjy0o850x_nKG4_&7(C4tCg#&D?7T&^X1spJ0x zlW139mXFL8_kpSJkTjp5IB#(;g>~khE?_efHq}vI;s~T!9NUke4qm5vaW@}wQZ^x; zFedr;8B!>5KEG>B;M(X1ke?wBTJLpH;rC|^Y1Tqsm8e;pGqxZ7jfTP(7NSbTfkPB! znxRMja}RgHCbW8fSR1mpKf@R5nZ_z~%A)Udk=cqZTn;y5g@=GOgwAE=q0g>hP8yUl z{Dab{eB?ST;p+T3Flg^CB~9jUHf1)bWEH!8BXf+YWrAW>6H#g`EYbqX?P^IKnp??# z|8(oC;vQ$bWVj`2iny_b4F-W`xKXVdno2}~{s%PxBQ&ysjxhgm7wl{77lKqtj#tUF zhT>RuDB#?2Q@{7Dr}0p26ySy1Y4@Fsx7BDzw>Z)p&P#?71LQPVTfN$Ktn6$d-GBTB zuXuzF%yJm-9*i6gGU`BQu8HW!;o`^Og;HvPl6oC-qomEOb}^~F4Bv{qxpVpXasQMo zYrXFN9pq?bGrEhxFxEm^oZ%cs%bcy!z~0@M2s0vRvjuw_^xyJ;%Q;tbxb=5s*JeKS zKKu#5<{4&33t03PJ^OtUd@R`V<6$LB2pBti>O;04r^ zb{7lfI!~X9Xycd_CX0_sBmuod4=)(|WH$B`{a-@ustvr3V2aC6-h0U2Y4*!ajJ)Ny zDF*q)FP{SBXszi=8lC2%I&~<4FKVA*+X8|YSNknnsdH}Ov(gck+!$WhslL1eU@gHt zoRfUNOX(@2)@^i*h;&9A0cAGSv2IS>4@2g@rEgW1$kkiPX_RXLRpfy7BMVY656H1( z@|~9zAm1~$rvWr7)j6=nBf24bplPGwt%vsqcjxX#kfg0S7sBc!cQ~Bw2F;)!4Z&u*fUHL8C#zo$wo_t2&w#H!ngr)~Ae@8YQb^3h2 zSL`Wi4PQ4dM1o~r6XzYCLu8|C#t71OB0yK9^Ru*kZDqHXu8Pzt_$0-a2Co1+RbNq* z+gUmAILuxyS4;TVK}^uAnn4F!YfZ`bxor5nW>^nz=5)?0*t|zrx74%spGS#mGSD9% z|D)|35Cl=OAlSBT+qP}nwr$&*wr$(CZQC~XUt(_&^|CV3dveun%|#dsxsJ7=5CXE; z`dym&Agx16=OX0AYhI{1ci_JXD6kQeHLj}HZfluG@UDaPPZCd_H)1x8wrY}B&m|O@ zWWn{)zS!D+pQH%gv6m))9?MOOPMFL;8fcemgJ?ZbxBCchv_JD$>4q>tEg+cd zn=016URp69q;G~45Wi8)L;`1*vqjv-ut@Jm7$rbBxK=denHLIuhw}2;-b6H4H%o08 zw*T~9GRy;%k?rGa$79IlNq5#CCy;Rij&=1>Tg}F`z7&70u+EiunZut8DghG#i7!9h zH7LRw&lsYYwbSQ zkKZaBb*_<#FE6EXJ}iQ6KE^bwgiI3GYeZ?e`0X40n(#tMD!h6)p*PPXXUA9r?c-8f zRyPQ$3H^R%8KaknamhScq^P=Ccvs~O<9iWjYOD}1%ORps++UUszQ0|+y^7*8eSKB; zE3Qq(VM1GAOvad+=t4RXHHIx(f9#+KTE|{rGT*-4)AvYBJSUwQ8c*x<~ZDV`qx+IZpy*#t9UbReaVzJ+ahkb|t?3 zJkY!&PdqP3F!4<|{XZGECXH0}g8m&o@tMd#J~NLdOE5j}L_LJQDmf=_ z`&Wgp=S9O=oafh6h`)-=+_xbotJBp+99V=&SNDiE#9Io-n=s}YximqE?d#V+#=BDJ z4_;m}(zXr~h#S&1_Ur*7+`V*#JZ1Kfp@G+)>vPaPGZV%Vwh7soXzJPnTEP*L>kS2p zK!VTZ@@)^oybM61eK4~v&ujI+M~k&dojFA+nt;q8c2fbVThP=hCkmteDp%-xfMCi3Y%iOKVk6{q0kZ=Dgw zQQm_{6LLzU0Q3N8ouP5Rt#`7+(w})@6hs2t%pZN9-)K<309o9*(_n-NVS@1q#h{4> z8x~Lw0sx5kb1Xwof(n3EMm5Q8uzWlsJ&+@hD-;{=o4~GcY@T=*9n0Vn(@>?L{gI zh=KIWQkt!YyN$)rtoN_lMaxVR!gEi#4Uq&!B*|Dfl-ZY`2fP|X2mR@0c}~eREi0~m z3YtK@$M+~>d)DD2T1{j|c@Y?Dxh@lDlN$Jq*_ zT}D6R{8C}ImJgUb#C*bA9vB+NyqNYpp$-*OTD|#E8-74b4%sxvrY-ix9xe!%-_f5KT@ZbJt2}jL za3D@d=jg7w%5s&{0qLaU`}@mjlo^o~CcJ(}C_*JeHG@!=7gc%I@RK8;J`u&INz=tyPHtLu}^Gy>PKh zKrmr673`)1pbLzw(|lNmk2lM?AlxtS^e28)rvCU2Gs}? z>>Iur=2b`#XrpY=CNm%huKh-CpzE*4BTXGs0kb^g`8KOz#%BlO&hCp$GoTa~p@PWM z@JJhLn+-04`CInf`T)>^*byp1{WM@`MD5F)iVOwBG@pT5~1-i7{ z%V~vKGbQ<4TK3u_G#&~;=;#}4aDRipgWB6-A?aKk%H|Vvne3>Y;x&c6g`z795p)Ys zi-ec+?KPD5qqS+DvaPWI$q;(%xA7d5JxY1hM$?0 z1u@ksnvgDZ-<`y(^?v3_LpxxB1%G3a_lw@IC7`Tn*X#JmB-_+2E@jK#-`N-3GmcY= zE}+^oC~S0`242Ip&WCa@C0TMM%R$Wk)MgS1Ni8@WaK5*}LXK56U{gssqYKWasGr$c z9|ioXExpeH3sibR*wiY4Q$Pe_Mp{5Y^oS_>lV9`{NvOyi zpqUxE4ekB78fJ9)>-18(lpva2Qkwi}E%C}h_G9&^?{?ii(N%PP7^{5iM%`%E0x;v? z5UfW;1(Bi7-lDQh@BwSsHLM%8asIC?(lSM{tX?s_#TqEtjY4<##4ZSMaXoJh7K+F| zbU4UzR|UPI&CFheUeBwj?^+`&LUCxkNMR@3eqBK=5WGx*`$Xc*YL-8H)Qy<_1-b`L zz1PxG<3gn;2h(m07alqOdr~&#e9jj&g9*A{PZ2|dD1j}B)>TRWtlKizzLS>UKvk>s z!5Uw%w|EeG&gu>%rApK z>}Ai-NjR*RxxE=hb9FS(J^47iF2rcUw#;Hd!otx!Cf5DYK{w=3lD`9Qr9rp_$VY=o zm;+AjjJodY@-xvQa`NS}74qzQbxvR}QtY9) zl~0Jf?OUsq-xVG@EUN|!`DNmxdt)}a4s)`=he6k^!xGviIUGX68G7mg9I^K85`lYv z4{ZgD5cM?*r(s24H>-CX{{}0cPcf_tNrk)@g#J6|Bu8o+i%J+^7l4&%xSsjT4&WFy z*Fy66IFcS>0nUS%KiIJue@n^v>>BP)zld!3URtcO#_g*SF+Ve@>T78@CP!OW+&%z> z)rHXCwzszIIC;ScnYQZE2BBo?a4ap1R5VH{<+ol`6Wq4}($UNRV3J0g^T)T_QzaTf z%zZ+IOGBi9pd&T&V9@V*)O9B*+NLYZq$PDgnd5uM-1JzyQkJG<2Ck-4;&+Rsd}zJe$XMT~BR7hA)>QhVV9JjxS+>A{|gU zZu`mINs8z8uvw+s)s827T=Pl-6uHTcBkkL|IYLp}7HCVA; zhLuS!XwT*oc)Vt@r=tY-#5*MQM3}7Y>~4Ux@}5{7dw8Ki<(wLCX9r=0n=fHFbH)O+ zq+!n->5ak#%1A>yOhy#=;4ZPR&^+`>xtmj!`o5_UYniN#W4ufX^+5(er;ysm>ZhFq)U?uCn()-Q1D3E zdIN2y#BuC0%sDkaOY#{WDFLJ5FECp2u=gQbzvA9B7lykKq?lrblbiiUGdpPD)--Az_79PLQIGb?4|VQ0a5wv9g`HqnU!0zbm0Zx zj&8hO%6@T@EaxlR<+YM|)sAI82JET}w5b7vY0VYR2fGaGffSvqj3?*HzgLo&kJ5>mqZV2R>l{l(avB(A^KG`x`rOw(O>}@ej7VE5kgtVRv`EfAxLc%afSLF9tdt zBlfVTi&JtWD-Yphi})4l3uA$_eaZP}l^$N!81Qd!{@*8El!8tveCr4vPIgS_0=L%sR4wpXcU2nb)jR(~{uEtV28gSv6#8&~t zNA;dX0@tBtw&Qo*cEN7Z3l3J4JPsCxQXL@uhvJ5^51lZ+cg!?ns7v1fl`dLqFVndm zJdc0}>w6kbEg#z^5yDfj6(&h9tTY+rfjgC7gByqUGQvDcz>&8$S17_RgM?I}F^LQ& z#kIB>3%_P_3$7HM-}dj~>cTjlpqubAwNb!EI&oh2t0WjwJfPJ~w=266G0 zFc}RvX9@uV%#M^|N^ojn9+-BcKatMBu=1u@_BrL)D&%c4f|avYzx^M6hZk$Aac-*J zAyd_utFJ=bFPY7l;gh^@8&@iq&kG;hQcl~n=ABfafr#4ZZMHOgFIvcQP?M*N5W`b1 z%Pjh{nC6g+Gq`)UKQeoQ@XI7{X+VE!w%u1|$YuX;{x1gsVcf38|E6f@UXn;;zWmum zGZ761xj$158*y;obFJ-nKt-2t!GTzH1#D28A|Kjv}V5sfsr5;4q56ycywwYGM3 z=7-_`>EnLs;HkZW@P{5IUhCZ9fkDrT%CJP~eC76Ie_di4hqU=(8G3-h85F7x_1}u~ z4V_@*^IG+5BR2+bpo2cx`nU$ zsx&?(m%sB~0z=dqy0@L(n!m3<`5NaTZAfXmx~^86;no)D?WyAvY=DxH!4s*s;Yjh52*li@!n_aA{a*>$@JzM+t``69~Vr` z@BCX2@D^jA8G8O9J>C5oX8+_sZ!-~DHV)}piCMo@J|g2t_4el%^M64L1kxT75!o;w*y4RJhW`NV45_@YRvW6$9jou7 zc;n{5I+?mVd~Ajx@|NnpUOF&Y$xtw0^3y$q{i3-ln){7y zz<%@1#c3s4TQFkwN(KC45+?;U@GY{F0b$+x`%82_a~-Y59lx@kkAYo)vu_1dD-#D9 zdpTB^(&&>Ruv&%e@qT;+b*v=AB7>&})~y9To&WyD3A{O;xf(`jwO%gA9ZrqL$ZV9a z>56&#u~g?oW=*m?Cr#$+fb7r-Ww4Oq`#ZUgVk){j*saw@NH^jA=Tjr!M^->3B zHA!By?1HNnH-YA70UJGC6~v2xnnFu23@H)(EU~{;0+aRkIb2I1{M4uK` zJs(W{e5&nHY3HAI8_thD`$Ur@+ie~AjnWa24P2Gwa6RPOm?a5SH$3MXi~90G^skdAM`Tg`hX2<%(C{YG{6!~>f?>ccB* zRvx5>;Y;HvL-oPv)TO8$u$jI{;&i%dO+YfD4KY4s+Cs%MM3J?XXMY-{X=4az0GrNC zH4IUWQ#OyfHKHq6aI!Rw$zcmyMuBMIJ__{_XlTDkd_ZeV#y0;KURMh#saMlsSKN7( zRnKHVQ)r+saLxq<^XvF5oH%sCipkcjz9O~DLW{IpbNRD2b)d{Kdjwm#G=1IEN^EL7 zKX=)q^yT$4{mSKX+p!F;otUni$Oz8{(z&caw{ttS9fk{p((iMp&tsUyfS!-3ggFq1 zMi#VNsdTRikrSfInuRvCls)6?5Oq<7RwHJ1zN< zuyZaThY+&BwzLF#qf$5xOj9+UJN;+l@}EcTMVf4sdWHR%zaMmgg(eo-iq6I*!#|TX;bjqLr6B8*92c!0J6><^5aJqpiOTy= zECF@?8zacT#UsjPELgbZb7HNa&|<0?EeK*>NwPGQR#v`IYU)FC7_NW(*Nbb4FjlG4 z&(mX)WB50mq;qOEL7Ca}y|zh6T}SL6$c<`VrXZ;rBR=}+Cp83zGV`i+F{4hmmH*-@ z+GgVRwdSgH*|My?s%d2;GmSNM1lbbft-t6qmSH6dUip?B@A#i8(|PvKZr4uAM&exe zAX=noeD?yoP--*Y>WblfI)b`Y7Bn~eIlC9*P36w8h=996oA!l;UZq&%z}Rl5jsNNC zWvs$1wUYk|^<;;~t)>HS_ia8Q2R8c$IdY_u6+!ur`c+W;uQ`hTp_RJ_YZnkQrW$`& znKDZa6a@az#rM}Vr&AYi)K5?R3$Tx!h&LMMk_PUQJ5IT*gkBxU$bi_jW)bN(upzx| z(D`VFk7k$7&CSR&D1o{i*r!WXjPoHEa_22l4Z-VU=(PAx{gfRL_0F~eG4SSHn|<^} z+j{R!cV_D-oT3D7zW*ko^GtN~_K(LqzTyIpW^YjK&QzDVJ5vR@5hkjUM!90Uoso>n z%^9D|xk1zw?9ye&)NY#Op54Fl(OyTW0m&>^oSngkRFwF?-{!9+-AE_VW`QtjR$AH$ ztD1$BJ!#tVz|PL*Bs_TP8?NtO(-(@?ds5iaxG1_#VUcC_{M*CQLX`hb61(m4n7u~gyW0>X8VJDWz1D84Epa6e;2$Tp@h|6+HK>;YUer5y6OTQKwo`j5%( zIS+Lb7%w0NmA0U)`Rpci{@V}8mA5Kb*17kXrlX5rX4cqv&&ZFQmc+`XZGYkrMwM{4 z+M5L)*9X&b1qmLOv?-4%bZFJLM9t1;1Xa4`=v_dyzF`$fVI?=M#zl9ymvko&gG+Goso>xk-65h9!AR9js~($Q!3_EL%+fi(v#O&yGL_EwrogFmz9_u%VA_oFD$a{FIeYj*m@hv5hq*^ zAYUBRsAYb=?YyN6`%;WyaD0{#r;vwpYfBA)^u0;?(I}573m9FBWgL#uTnP8xlwy8E zEe4VI=%=BN;y3$Bk@@sK>?{dWgV&iZ4rkgBgglBp{iCRQ=VIDl;Z=M9+BJxGb&GKy zTA=8&P>J+QdPU!KspY~2SU8eZh69HnnHEYyNK0L-F0G`bLkxx<{oo+Aq{e=7NS6Wr zC({n;#VZ!nId(HKnT>V*@yU>^0w^_m3{8y=LOyX;izkDL@9@uCd69;Wql(cC+Wpn7 z+fS{_*t@PQbov0JZFEVeR11ki1`=&@8BP&I+G;El-ik4DCivFy1c-Yeq~*ljjN2}>+Bp}cBz8+5*oR2AjLPOD5Cf+j}HSCsTgKEnR6)acF+fBYnpBNHg0dOT|Bfy@gWXo0oCjwCY?`}3u_0|T zpz?DEx{bGuA#P+k=Q|clRc<=p3H2oD9Druz$-&pic6G^c#+*8FsBp`j6qFo|8~KfL zRh7jM-hTXQ}AFc>9_TKG409P#9o4^!UkYdX_+SblmD>2g!ZEEmPw4|rb z$ou433Kr*k?q0Q4K!3ryTU!sbvgrR(Pd>>7?iAziojUuXz66!ysVT+U2QQw3qY7oid5HBC#RUtQ&f{ zk-{5}D!Z_qm!imDL|TvYuDX0_*Z9I}{{r;jn!a-^-O}kzoI;r4fx|i)h29Ru3!;kv zJ7MaA2d`P;xX)jw-u>muU-A$2L4-Oqe=}exrmDOuz|EnCtt2sJ{+)~P2^q;AXpOUj zw|4rA(;db}n{3P~Em<#xWJovzB-#@1&xVkXN@_WkjbIg=L+?LID9oS%=4QdhfZUvnhNn(Hc4kaxMxmhSXA8GR--4jdx@;G)VsVQ!zgV_cwvpdoT zpdPQa+DA8!k2VZ)$BYM9er=##NN7Ui=9iSaa)ru}{R#VZGvN2g@SiKtNCbg7BkrmVp>Yvy~Yv z30&sy47{Wf*uc)pE75{UM1*{y(!+hfn^>3mZ@~{08DL&Zm2M~|&wH72R?ZUf;q=JN zZ%4rs@a<-0RKXZN6=5nHM*I;Y7I_OY-W2&YfvCs0qw`kYQ@j=05pT=?p!WD%_w}L7 z5Jty$W)&#bP+LH3zJA+26r|dxZX}LgT81dpq#SBXGc&xU}l>|9UoG`{wHbrNY%SAh_<<6t2d)ung8#Ux6Jp-`v}^{;NaX&_yfCW(fl;m7A0^?^Z;Fk|5bhLxPl@>WO;|uc zZCx3wfSfa|1RURnkTA0Kxp;{q-X;x@0CM2Do$U1YmJM}bxy3zT$B_hgtGJ_Q*Qnn2 z3gW#3ie~hCjGFU5sLC>&cEiOL*`b@tm%LZ~?~39B6wP8eR_=m{Y1%!>iq!$$ z{k*#|#KPU-NGoNLa>vAZr)bP~YNEU4pz&wj$E~fg!X@??y2DBy`Y{fCXs+yY#AM#rk;RfrFYTyRly!My16zSGBHekxx{z5!FkN_9wc z9Tf*@FF-hqQ0K5J=8c>z81R4=Mrux4IxZbkY3ieJy>dzJK)tc?W%gYm_2o?x1^~Ls zakv9tJgvj#x&2PR!v&LB2aYaIJ#9|i#z+bsES9R=9P}I*oBd9pK?rmqobs0ktp0^^ zd!JSFQG|JYl}U4Ur4%5&rmfN(lMUSTtp1U2GP_bGr6O?15fyOR{U2Cg+69iRE=(IU zc2j!g<6RZGps)}>Q?7$+#5U5;RoO*r!YprLKXX+ z3Nww018w?lFvLVJ34^a_cGpa&(xpK9fce71%o4ZoI{A9;(+?0Lar4QungMSs{R9UA zYi&myMm>z=_@Ap!JEO5X!0{o!l}~RS)7YN6b>tLdA|KN>o>bfDD+u<(yr9&grel1S zZ6RbU%k>r&yYzbM>zULdxUxK|OcUIJFw+j(QkSi~BeyQ7;mADO?p*@|>QCQ=PLdc0 zcxOgs7?MC8XS>9YIT9w<<~fSFd``G0c#l^BdaA%|=xHg$JPNhF!a)n9i14 zGX_S1$Uo~%)pBliWQU^RlOelc%bTC%UhK{Gf#*HDhgR(^Heb^R_I8H94hJS8fP0w2 zId!RJBhXb!-i4~VBCc_Ia;;%yeHUyc(x3r@QqiYTuj0=ZJ8;O(Z-53UbRRvnovuyP zn{pb6I9O7J!fO+Jqd$N8i4{ZO`N&pcN-$=XPP0!jIOLPAVBB6d0h@ zV)n*}^x`)NMM5Fg207wW0ikowOLC3rJ;F6%>wldC%^s&rL6lDtPRP-@RAli{h)^6^ zShoHF`!=*Xh5Xog33S;PZ0*uMQn@fz!PT+1l)&o@Un|h9Zr*cM%WAR%3nkh*Mgkb;jFh_L=D2O7(s>2jMUw2 z@3{}nizXRtjabIfVZ0Ko0L>T_iyIoWtbx@RDyXzFXV z<>rT#lvqDn{uEx6|KI%2$bLOY6f)jDD7THUd|c$wu44cz<=#I5 zlG(Xs@egNaY?mWkq_f!-eK|XZY7yq4Ar=w_f^d|=L8Q% z6RP=|=2N@txB~g~gF(Zl3MH!?kAOda+kIH}RV8M8^94lBZgh@n+azh3h5G}R>EB}+ z!ybyxBa3Ee3`_0Zh0M!htgBKrPV$siu_5Im2A?;@f9_cqTg4KgNfM`U!6qZ@GAXE~ zrr4J{@$%T-n%K2Yx1a|VNXO66njdK&ik#^~_D5G5H+ol)DhfZMnEk}qo z$L1qP{?M}_PwhV=FK=0ahb_*FCnv2FSG+C0NS%S(Oj?xF%eq(9DQpi{?wA<&db1OE z+fz3DVjH^bXUzAIAYSUM`@!uLdE-hxa#Nn2N)5t_p&eqJK{co#$;9g&owjt~5qWfK zB)_FqbMgNzpItE;p2%t5R@`IpJZWyS8$*FiagWulnSMsfQl%2izmTju;*BDZF~WiJ zl=&uRe_frO(tikW95bJ%z2sg$3i@5Mq}}zT9gR}?ax@+$v=w0hBT-w-<(GwAMjD*? zgW9T~eMAn2f^PO$#UqA)eE*ZKO#jd^kMWC&aB-JWVbr)b?i45M3B92IDVp_cJ~v*V z5JeWszln+dPi+0AP%E$~?n+NF=KRNiDQ;!r>8|BZFINps2knPMRw924S4Pzx)D{?F zp7vkh5wdjBVU5sMv0RoCZ;{880}k!i_m#9DdYPPs)_=Z5V4hMr?eZ@^S%FJ&+1@AR zq!^5K0|K|5(`rpXm;p8!UGo;p1*iQ7BXgtZ7knydfJ?g!Xmct~2#dvF z(F2zD+P$mkZ4Mf{({?2h@X3EgqBySH6g>XIwXt=^9iW9ncYH1Q1<0ILE$Gagj^=C| zC0}CdH0TCwC7~y>BKl<+yh*Z$d-APT?HbgQVwy)U4P2)T;K-)zNlz5YBB{I@U|xV* z+ZSE!b*}ef6Cm%;0}^Ncwx6kCz~`Muon-_Ehj$>l28L%_o=6nsP(VtoBpRTFM4vHw z2L5$d9kQi>xZ&_?kJ^ew#STC{ z%`8d;pB-eIdJGutF0OIvYNxB9$(X%}dApWzPkHBCBE&jWcq(QTKiy~U2!i61@G0qL zOKBnT&cHaL|Ju% zXL>;+Fu)PE2Y4vyHASfJt>DdWKOb2I68f6Hv>p8VJq>Cn)&^50h?jjSMhFGrrievn zhxwQhh}{Z0LBwjZy!DSE?Msa~lhfBiy80J|>JEzl*842yW{Y+Q!Lz{!Bq6F&n zZ4vh;bEoY7rTW`)eFl-2&<|FebJB^}5(9EfxXeZlHV}6bM$cJ}kzy0^`7=teXn-}7 zv4u>i#;F}mR{^1sxZ{lLNXG9u*t(^*$xq^fYx^{*7}xZ}#}o(D&@^*~zbfujV%TK= zS7L7X!X8%qDaX82P4?c>xDBCF`xEa$%SO;$) zc>(?gubSAAijl!CBZ~!wr!K?fEq5GNRnUeyB=5W)8AZ>(tZXBz*-~MZTqNk`lIuf- z;X*+$HqsfHO*ilV?gV#xKlM2PNdl!|*tkW&i6@SB^BW%9<-HNM1y<%cNYk(4a42%%npUkdh2W6+BIo<*gIzni1%Q<4Hz}q(52cFfZ*rg z4WR1;4FJGWHuWtKM3S=pZ0iR1YmgkS&76nDUsK-4-h^|Dq)81_De1*LX{yciKPA!RsnW4i`NN+|XTBXf99V$+LY}t2=S`IHXdx1f^qLNN&|HG1@^b^W(;FBX{v_Yaqnx&;6q=KQyIvJJ6%BQwV z1kM)F%PZ79RYT;{@&DzOWvB5=Mg9%}Lya4f-qV)9`3jOg3342D*7RoJXLBBBn{hq+ z<<}u;C8|90Y3eoL*V*g;JrK~v+;t%+&mZ~v5Zz_(}Wam2$6ie5UVVuTt!SrSp!+b_y497?z-Xnyb+8oWef?4Q1P9 z--K8{?RP^*uECr$*~ILV3Gie&FE7K#*UnW;{r2Wxj$+ zFU;>Z&;hFMLqVqq0q{HhHX`kCXl1}%0NDmhVCBdJBAA5tMN(+iCYYutN*7`V+H3!a zUoh*DSYs0P#p9`Mt{a;crtW&dvA}3xA-QlLr^O)qFcgnE(3-|Cvez-~p_CK_Gs&9pS)zd*i-L-}yyTrsnr7zEkrC2YhR z@)Qb=OAgZZ$4pb^U=T+$_l!e2RCUd@25p|FjQ%J0jP&syZ-dU@Lf*^Y&f%5hUs8|Ozq~T z)B*M8dY!gjM5&k;lmG;ro(75Wo0KRiZV@N>SX9$R1nWqEaL?Qk<-@if>LDWEICT95mug$<{=8>#^P46h$Nn$wLG@wsm{HV_TsW2qdG^@XdepiEK@z$1Ju8?dq49Miz8n|&5q`x$K{FUP=oPai}bwS!p zP6a!m-MTv9DED34Nyvuo!s{0duzF;;5F;&x5#kN>2 z>LukuV^iO!UVPc!^F-*R{tePo!E1r}jdJ8Rh|U>>;n;eAvE(DaM0bs}a- z%F5SG_LyxX(T)r+%!zC6^7W)xW&CYI?yySG{w5;maZ}%N*rgM;m;8(km_4`y8Qu7O z1r!X)$Ww7MCA#<@_n!;^(hEO=DUW=spQ?p1VAQ#N(nSGDU!{D1FvO;aRzkYy3bJ5^ z$SOQWC+g{AzPY_M3x3^R_QzAv4#}RLv@=W8oE8!-w5sI8X3aV&(&GL$`yPB(9*ijr zh0fv|lp8C3?6mf#Y7c8^Jds`m+qG9*BMm2A5-K=-jNrq2s=%I|_`q#HJgRw)5hokG z9(gN(N+bzDBoHz7;KBl__uBGfxDD5kygft>WZcFAcBMKqL`Oyx7Bq&U7s+K=f*m_* z_LU`$aU8>NPGtspMjZpBh5fC}KpmQ<*IK|$(F$a4KMaN|Ub09~&%z=PxY^Gdp|Sgx zN+v*!DgKuYg+9BVmdMc}3n`4G?bn}CTFH_qCHE;l{-nx3;*WPia1Pfp^3|!k^=uln)jpxqXZwO@nYQ{zkRR5k zuDoXGZnl^tt)7$CQPG{`^=Clr^liX+MfVOi3L|Hme7;~mL?UWbMw-0x86d!3mf?{Q zg2C}mYSlU=>dE35>+JbUdzt?wFP{iECYnaLc-uMpdY4_a(t6l&@Fv^qnh5Lw?H zymkP?>F1M~aR2c%!;My){6M2zDR5=5a7^%Q%U+GoJx9sO@}0`NNO^|ss6|g6eAK%8 z+w*W#4V&!VCbA>w%|$3c&uI94X*@hOc^4l3kxnx>3|bte#-|~RQb?}0s#5xH1A5CO z;aB)g=y}PaOo*olI(*l;GO|6)JlEb(i7`qbJi1x>qQhGU^R*0GwPUkj5h;nWXp0^P zH*22tBz6gQRN&z^l=qD1{Z|j`C`Yn;naESgbf}|OS1quW$&y3+y2|uUVQc{pV{Bk&Fn*Th zvC)f=2u+IM>coRM<*;%yNB^sUomNA^EbtLm#el6GG1e<)D=hZR}4l3g9V4MMwW zs)?JRjvGx9J>0rwb=itGH)-Z12rp*kSV10*!KreuDwVU(W*FZ6!gc)j&(}K`J1$GN zX~goPHV;5;!i;#WJTQUEJd&4_Hy3jXQ5)5zk<{)EUr1Yc`Kqb?H?CmoUc~Peo3x+> zuQuF|ZP5e1mU#i&M3ytrHvF2(Aoo1Y6VbMB0TtZZG3a<6aTrbcW(f_uxQa>5T^kv+sQ`L@L?- z)3Xy}saxC3s?1vt4WCA51|%qh0lt*H=jAH;YMd-z7Efw!|E}w5`}s=l4sP|s%F^W; zGfF*Py4~iwj%7X@M$HVcP;%>C?Q}M0%%4PG@#E`y1~-iG$hMWo+i~4}fM}vSg@Z8O z(7)Nf`BTnBORS@GTfJ7>w6-Us5Ad{kfwU+3cc^Bt(Lp-fs$&XRM2V6PZhkaRr$qM0 ze|82{c|jbDjZU4ORN*)GltM_eObyb!m&;e?%V4hC=$z*iKD1X}M{s9s)bM-8m;b(Z ztN(`l$t4mSC{Lm&4w)?b997FNu4QteWhF%BBh{t(tdn%__e!yg=vD}JG@UIj%}gtr z-7(1O$Zh%<*a;06FWsxlL^_9=og`iq&y%;8S|>hJV%-w2Lvq)kNlxNxlet-Ct7`-% zD(Z;=VZ7hlR=&NKIu%l+j|Flw0@|}q;Pbm-nV8dG+-N?YDCUtx6n?FF=on#|js$foZn4r&{YhU=Ul^W3})O3l-jp_?b(9PAY8Ye<_Cz)ZiBrdk>hp z&d=(@@}S-V{Fq+Y;ju?EN_C}<2m*sf#Q6a;*#!Q?L#&+yY7FJ5yyl){Dr8~y=3%?p zTg^_Ztwzb@+MJy5bK50ein1KQx znBUPZgkyg}Vqj}&w#Un6$-<}?@3YZf4vFC|ULdXW6Q-$Y`B~{Dh{@Ne^eZ_nqSEOp z%0r;a%)K&5U45XLnG^8DOs9|=)-VUq4;r55M`;x`ym(JH&R@Bg^K*B^6Ji_ZUd<+g z<|x~p483c+(NU({iU#zr(#C}r+C)s^!>X4i69fUgz~iBws@i$-cGAp0x(*3;j))dAPr#B20Y2mxW)#hC(q)Bk7KPC~v!I=9q!IJiPX7&b)` zPb=D>VpfQSx^~AKQi@4CfT0!uQ1upW>wzlKXX5jz6C#@1twj8;mt37B zIwf~kSWj74>}4Y!`_=%N+2=sbr?>&_j5$>MsG~!eK0}ZKMgbRGe8Ca;fo#=2g!!%n zX$57ttffe2e*vZHQRf%bOs)6fx$30&VIATy%oyTiN7f0m3M$-Fh;!w@YCYnHd6hpW z;}WKa1YI4V>qr7y#~>2q5&xSpyQSq>b3Oo!{mk>l2&{5JAK=v$Za4LRt|l}$xX=oFi zi_)X$x#KTst;;8^aewxaBqqlD(pgZ9iu9)`N9g8l`akat_VZfG4bW)-Ep;+p+i)GN zDueNu8f#k<)-P{5WY-ix#Pj~Q15}A}Ki!7JUqk~7ognxblcmSG2rEMMv0MdwsK5=x zVD?ZK-~<=63%;#6`~5%K&H+0V1qq_DZQHhO+fH6=+qP}nwr$(C&H0;{UDT=Wy0<1G zpc}L|wA2@Rtg=s2y1Lqd#}f{`e^DH#nso@dR>HPnoZ*q)%8> zTwTeY^IQABbF_hJ_ko1eh;k}=aau><68~xOFoQI}kJI={@c^{PLHCK}ATdR5dQ}<| z;vlL_n(3(6{*v$e$|%t01ACYgwp<;&+{Y1(|3F9(8%60NHP@uovlG8hOq1})PE)+0 zO%gChy|jJGe<*&AO402XqE_WZ(5<>W60iN*`vH!WDl=A<-h}Sp?w^(pRe!4p&amtI zOD7Xd$JEX@6?36rTIDPvAO?{MtCnc$O-a2*=%6-yJXottRrL|90scS0%aw`JgvMfR z{IUag(&l0ciXJE>CDkD!=Z-A-Th(c~J`^kzm4;_Ti<2d&CE)g{50PL=4E6ARqbyoT z!E32OTBQ4WLeNHtDqqaT;0bV^&!Hh0>%#+MUOy^6uG|-?2fa(6&FNBe)i;PEA9AG+ z&%h>EHd-KxGT6qGL{fsYq6ubfvox>)U`$G3it{yBUcelv>y*)L`x{O)C$2vWL1DJD zQk7%_NB28dDA^9uleRJa4REY`2{@@%Fbh)c*ymlC0b6&oN9Y4f!EXU}!mQ>AJfKL6 z7+GD@f>&uQ-3`KDO-TVaIMFWNTI*8pSOG(zf&lPRNo6b^41fh>Oj%jWdFah7=DRw% zx|ltR25|j3ZC_y`erz{RLaekAU(TXApHf{f41D(?r0N3Dyg=udi_O-T|)Ihg<2K9B~-Q0_|FZiET@@EkjzQuUIkfNR?_A#47e^jl|Ya8x=p4a)z42rCEX@qwJgy~l~}(j1RH zqpnym;XDuRKE3_5KlrLC-+2ADq#)@x2ZoWmCIDAnsZ{0NgoQQsqJ@xzwwhGD_;hw@ z_}Jd8HZvHZw{Hm&pP1oR-@;;7rL87jx~azMEdXwEe5I4DA$7&XgUH!b(lca3bwGAl z&zZ8BZKU_;n$A3P*|k%QEWJI0N8zfBiAePO)ZulkxluV?+2fMtohXxn=A71{-$a_T z3mUr*?2t(gN$9~ZkC%M`{)H_h)=(bMYNv8z%f%I5i=pg&^0cEZL!K7*FgaW7jy)8e zr_F)uzc8(5MTl~!qbs5ZlnN65-L<1O+(vZrStl9b<*U#%0A8p5l0DJw9-9K+f6kQ8 z({qA%@{WKQ4BNuDZve#bmvWzHtppnxT;>R04tGwPuheJwh1GMO- zIL?|!jS(VGoTwFrCugKL#D=rbPasAss44|6B^N$fvt()$q%d6Sxo*6{iPPBM6#hc> z31olgpTD4aA|JzXI<|PE&25$rWJP3G9y3)wb> z`BhqvuN3YO)z9<}!#l3zWBj|J3AA&LoZxE$?VtqVgY)y?r4? zL27WltqRwbLM#Aufb~tj_@l59kQ0;3=))O1?2lvB5*8X*=`dbOBN)QB?m1Rl8@aKb zc>;u1<`d2uVK<+XK`7Y@Hn6_n8yCXiqpSAcJDn##^w+GJq88{yHbN%{j~QbB80!iw zk$B=wfn$in-L3jJ>jSk+9MvJNA@nVbdh!n&tx$M|02F$_AJA{LK!FU^{Q=8h-EGRP zXCn%vO?RLtH21w`YoU`YS)D{7czNyp#fxHt`uUow7v)H)ND$P4jC{LEc)9nvjiDJf z%sdx^(~D!Uh%i{1K$!Wah;m=nRU8LRbOX1R=+M!D7C*afIEB98kMD?VQX^Ifr>`#S zPO@sXMa@PA{9Mo*74%3Y^&H=dO19Bu6;=Q=ZXH4u@x?JN_P}TkZez&<%Zg+m>r3>; zKifazf%On~yUbU5N)I7=1!vh?1gKFeEB&Bb`lAP?<^Gi3ISV-EJtYya&hkx#xp8~8 zdXuXO;_eVGluSGFRA_5WfT{s?j$JF~Xa{YPG$>fR^#pVu(el|0* z%0bnm5FO+-b3$VyJ*$$aE!cV8L3nJ$v0!MyQi|fsgkxMSEQmDr-KA$T+dd4k(5k#I zbseJ*Ln|Uo0SpL4xnAF~!s)AvFzcNLInk#W9?G-NSTy7PGwH%~pAwyvl%dZVlx+Df z#1G+okNJUj`u5=-PkGVX<_O6td}rYS=Lhwm@}adT@Z7sYBx!Th_se%uQ+payzETH? zUrr`+bo)!Fh&E?P!)?D_{BkKaN!?)QDwSDvpp1Vo}xAi90TKy{#-T#Tk7w0EawQT#eAz;RypGrwTej zq5;J4u@v~Sey6(AxKc3xqvJnq_y821#0N6f#-lf(8oJ!M8DRTP|nsWN%0qM51`aApqx{}Lv#G-c|kno%t z|8;ME%Wc+6q>?~BMiUl|VpC_ls*Vh0mI&FdLqX=**QuyzGZENr2K9{YgR8{8-*Y)S zna9k8KymjK!}8>MgD}Hu@>(knf?eb9uZ^)z=K(k5tn$ci07QOjuzU?m`P{9A>HxRq z34fOQP@9)nBX8Q5+ry>{ESXbZSDt^-BjfP+mKke&_NyDt5oSX|*Ie#zG+paOWL7ik z)Kqo!J8;0ZyQnZCAl3q13{558pqdA#$ZQJLflrFT=Uu~zOsxO(Ar6QmZo%+?dN5!h z6G%~Gz~Z`GV2?@rXLICO4qYjb9kV?Yn|5Df8BK0S4C}wr%kFsp8^kmeTu%^%t97LP z_cY1Avk#b!w3Ema(6)%?Aj5L#XE}me0vxGo)8&hX$DdEy+oztOy$5)8C0nE#zznXoXjcoc2Zg|IYuB z*l3&^q`9{VHMfU_kN?5|kT(|95x*DVPp1CpeKpT)*>Ml#1CFGPL?8ZxvR2`7Vbt{Z zgBNi0al3evnWhB>EMHAR$^l})o>qTgGecwG9jUhnx>R59R>0 z;BRG|YYRyICg@%lerAnN)fqhGUto*1n7a6M1<(H!X80eUyC`{Vp{D;%;EQ2*dl@;+ zpctux6+-6GGK(?YtqNKU^&j`VzJxvf0N;rKu(YZJR*z=@3fs~X&?{C>7bN%TN1EP+ zV~3(L@HjxUHfBvIx-3h^lHMO8y|>t~I>%&WG)~y>TcYdY#G;8YTr<2KgE3$04p>w@ zDoXfjGf<}L@mBYIRehD#!R`{g z{XxbBq27bYXMkCz8G&js&+ypBRpY+2gTHWP--Y%6Gu5%QZPQXpx<%VSk?5C)M&Vs_ zpRiz#12RH`U;D1#iab0x^eLlp=tdgBo{0>aufy2xSp}<>)*sG28c3Lk9VMTR(DOE> zd2b6QCs8hyyXd#cQ(tCUsod6XLMo|A{FNnK9yfpb=Y9 z_F)+Pc@cTiGeo70;Cw8LwCvn}JPO7E(fIBnE0?ybz5F8zB%@4re$2xApz0S(csC#W z-1J6YB^QiH7@MKmBnFKc9&+EVt*CJTq*A8o=)!&6gm0|Y^YMVGSD!<3&H2DL#^sSz zcxcJc19$#XYb9bK>BIa3;FLVOA%!IK*QOhgu9@_ww(3c{5i;tb6u;_;xOsz{RjGNLRG{6J z_1E7EEbgc-sl)X4b-Yjf@4j8j`=|aof$si`@$;l`^W1TIPI-!nf^=7$$5lz_RIIe` z2zR~xH`mn@fZh!c*#7!Ouac5H^D+x$H9p=&r8Ddz>OR*H&_Xu;yOe~0V*xGqxb3MT zOZ2`q6YLfpLYWF*65)uNj$0rj{IRib^dDNMAa%u<4kqP$1L0}pEow(Z;^n)-K|>e+ zHg0@$n%{3jarS5z&Fcc%mSg@;Q`UoG;r`oEo+xVhB$7iq{jRtE)M1W`#D?N_dXMA{ z!Z2vgkDxGHM%BN2M5xLI^PJE%K-y4Uumqq%Cj`ww{{T|IZncsz+ zUXZWdI^w{HXsM+3T~t(&E%HPS*sD4Yz@EZTH-s5Zjf;m1fYPm&%uzo zdXyV;w9bXi^Vx~D&Sp#5@^gMW(>3}HquD6~3(`asJNmZ5@KSTi9SQwYovH z)+~;X(J&`wNzO03FM5#^->zi&b>+K*nqsq-hcOD8%xl)0mEdiI9erogmPGj0(1Y$} zh432MR%mtVxLh>kMzijd6~>;&VG2tQYi+a(#=e_WTGi~9J-SCAyUC)a08d+g5^sTA zy83CV4?Coy`oNDpR%Iu1!JAX?c@;@$yWW(RA+B56nE(q9v-6o-095gHIJG&s3Q$<& zd2ejKO0R{5g7A^%0(6eZq3Dau1u}N5f1OhF)V;{GLYT*ejBP%=^mJ<)l4_zepf7`U zI`%=RGs)nQXXk(6Mqt8H-myB!xWF(pg4paMAm%czP5|U%%BDN?12Y08Y_tF_(<4-HyD(* zbR{G_9ZE?(^4Ey%(-x>lY4dN&E@|4_niuH~KUW+1G|5o>4NwRF(lkn9{o312kbo*F z2a5EP_KM1*?3ZK@T1870*uMuP!~T?ridtd$1?HuShKHyw;q&Hp**uEU{u2G{n1J8| z!n5Tt1j(#7=o++mnP-L}_TbBJhSoVMRQjj;%x`ucv#Z&5@k)86K|Xw0-fhTeIN3#x zzNVDL0+rWec2F*rzY47$kwIh!l^gJ_kqq& z(Ak~GX}e+o%CX(We+-E4tCzYxA&5~fK&ko6FgQAF#@cWdih6jT9ByP12J2H94U5)3dAKE28){A8k!%wIdmG*jdI(@KZZYsJ^o9_xo#%S+0fklr@TAa1;x z71_=y5Lmp_B+o0p@qZH*cS|euQz~3Bg#uI8f{@Ab*}7ils1q-YK5WrgI2na$S?bZ& zhccwIHI|IHhrQ(y&_4)6j579}-F38vFhDF0EPz;WBzN<-o3U2u{#ZzNa~JJN!+B6f zur1u;2l(t*q%uh=n_PE64jzuxb2?C;hcwLz8uSW2P@hLPy8%<6=GC65iIG3>hJCJZ z>g(EhqCW>P>ukO3zSj-iPFz5zf3k$PgM|Mc5_m(HRgkt_q~Qu}mI^E|HMmPi%`q8? z&0b)euPRr4D4|fR*^9WTW=+h0e~}U1yf;8EwMLcw0p^;ZEF`MDjH8ykOGae~%-W;f zC1TIn@Ibgof6|QF+1?S*I;5E)3wU$bE6V)bAcS{~aDRSndc>p*TyNwUYX!t}r%!(9 z4w$Y5YVm0{dIZ}`&|*h-iRnTB2OCwR+MNu?-1L0d6`ml)8b{53{LqFkkOttTp{Sgc zc65<)pO@d%2AwMe&j*&}}vf`vYxVn0@{59~6WkzOv6ge#hvGL54qwJY)>KG@k43AE zQbywOAz^;wGSm+KmEKb?Q$}|UealQ=c_4g^KF`IUlt7btIRR8C%XP#(gi<)&||`qgBce+y2Sc7xL2NAVs87%2k840YE= zfAbssa-#x2^%+oTlAMSzSFH@>3Q{J*dr7nVCR&R*1Ku}@xS!JVL^twHY2RmcnI(fK z98J;gN`{fa`il{a6mxU5I`H78JX-**>_2|-uM|QiBEBO8kfs{byBsn?cjbLUgbmi> zUmAE=EJF#6lpm+m{=f78*@+MqVh8--KZ=EbC^Ieah{}E)%kFL@y!#IMs{dk41a>WA z1bS>Y4?z4NSebLJDF2?v^{Qls-_m%h(IG}>p9|5>q3y2u$^+)```>YefEIO0akfjh zO-B8{^(l_^E*`0;XpcoVKmGU)i~>7e27~^X{Dibsxaf4!N)eON_|z| zdwvCI%9xR6Vpux=U-S)nfp`ieGT28?2uz!nVDJ+w!d$}`#db$z$F3g)TXj}RKC^+x z6Y-t3UB3}0UBAp_50&3~yugmR2M3)+b6m?g$V2jEW4Xm@G)|trmiDrqAk{oyM7*@Q z@wbsPJzzhcwH35$Z*r?s$KjX&JiCBK@gHOn8y*gcI-^Gcc2G?DU>DG@SJnbWlq}B; zl)dJ(mr67njPL-JK?8>okzP&Q*2^AQ@y5r1njEeU=Bn=Nm*~(#M6>cBigO%23Y8H` z`pF8)z4|~}E^@Rp`j@!s*BA8q8ShK`jI)0((q?e{rS~`bR`jV5;M=|us=iQ|XAeLI zFr7aLm(-(v%zeBxBVBCcVpKlzLoIvq4tx3AyDDOV)bYeHuCDnE?0sKuNnLn;TgcO& zIm_TnSY%kWTxKyS3ZWZH+8+@6yK$;{0wRWEg+B3h(rUr;o*B!EVaHV3HUwkbuhKmv zj_SPMcg^fO@`=N&ud8YSww$=QLuUJg?LIKeoZMb%-T!J{ny-s}?QsKYhd%;NVEwG~ zb#O&a{MIEgsV7{3x-3B450>y16-*nlUoB_QdvBjD2FRyFnMeZNakV9VNk!AJk2sco z31^=|mxSGvQpZtSmLikG+UfloIPG`yQghlj^fNyTx1Imkpbf-yCx(E6K%~oJuUbUj z>|YF4fJj__xq%2LzM9s;Q54?GY&qPokw^5nc|ljGBCRw=g?neegfsiY(d0o1q^SIM z{ZP8P{5v=QsZz80JW2RMcvPVW>{RW7YI|Is zG{{ro@mk+S6rz!Z;?n27%KI6TP+aBz{gU+V*Oa?T(?fp2pHIMY#mB1gI z`o$7)>MhzY;@Da1{-?XglmT%Pt}%$WCY>%fG-mnWD(a+u)a(<*6u*z0>eQ864*caw zZiW#+HH&)BaY(4CN)KH-GW2u*V1W9k zKxP#*`migCqT4R&TvBSjQiw*dCU2;E#Qd_F6E3=+pzrJZF9G?h<4Nd1X!E2_P^TXp z_TeTOd7N}+?KV8I<2(QKs?AKo=XT_9Ly0jgY#&IChN}&XrZkeZ$&~>NlJnX#^;_k2 zk!55zSx+Kw^`Jq(3@u8}w0-hmre3Oj{tfXT zsX^hkRa4n^*fRY7GD{d-F0(siNwHG^2pt;#TOmD9c4`9uq9XbyObGN$JQ|Mi2)C0XZ zop_uCb1nn(mD-{UpSCqFOPUciemGfZv&p&(ZTeqb_S@emqv=_UHHHM`H!#GRV3+tL zC9_)j{mTD9!u_avB1iUnt>kdcvZq$T)_Ww9!c87+U1z57s5t0aMN(~OADjkr5i=Zi z7Z*+JO5%~GmK55abJT*jCs~hmMrdW)df;kBVof_zJJ{!&Y1pnHVRCbeNHOl>Oajn> zdj6A&07lZpfzFpL*0T0SFiEJ%!~`H}yP|^i4?I)}O)=(4b!*$l*3&1nGkB*l3_ExqlzE{a6Ift*qZ`aLZA*SXI)H;^T~ zey%u1B1%HPMr0LTV==85glBEVxd0f|^=ka%-vv00%&GpO5J+fh7{5;nY894nouGCA zjE+Jv2~Ngf4T_xlDHFGa7a@ufYOX40XoxjGyEGqv<(hJQQO1~!iz9AF^E7c& z5a$3_9r27ISi?=Wg63U$SKPRO zcdbmdrhyscX4faWBA21qp$7_5MHoIoM*qO{`z!&Xi7cCJIM05-_wjEvc{Z_{VR-*7 z7;1<$awsM9&5Lt~?rJYfpfKXBq*m!+! zDb60@rMCskuKw$OZ0619d~Gd2FJi+HjO4mvx#=IiQB<`|&Mrkwv&U2aLwA+Hf29vh0*0N}$%=lxn^tvVnBoL|E zhO^fSYvk8-_G(6?4w9Xuo@nc-c~uxIQG#Rbp3W@k&s?QZ1$D>|a8LoXBSQHxGcmI- zcd|p;?XtN!!r$jRM#Rqnl093ZI)H795Ck`^M`)~pQ2I;@zAZdlznY?x-#6=NFwD^B zmv`Kxg|lf8e-kjCKa#qMLT(K%oBM z0Xh*EjMIh~gqaQ&9zow{|^P zqNnP&(rVOWKe@ASgz{v%S8a;17lJBq+w53~e%5x#Gw8d1UY^HNh)A_!{HLx74(m=3 zLxWI49WB1yLzFjtT!ZY_l}yf){sAarVnfgFi5qXg3(LE42-eVC>8w;4?e6=1NncPE zeC75BQoZT0$=El4OwvKO!1*p$6h==;FMm_cUg#*ylto*9MAra9+ttVxvtaflf5QQI zS$fO3m?5>FC=Vu2>pf3TKG{fB?#{FwT_u1#?EC-D|B$I#_}6+=u71qgu9o`pzf!6H zNt#(!-~sKFR%4CD<{WX?Fr-S1x-Y|n<*DR1Btogi^3Pz0jqVn89kE8Z)>7jeEXCFo zxp-?9Xr(W`?%n<0>CA@VsC0$pOq-%L6JP#NTfppXZ&zYTkvVPmYOxVTzkT+&*vE-= z!WvjzU?g*Z)80XRslc#)GHb)k)*`lB6GE19K-{Znljb8~;s(=I5hJ`~vPv5YvZ#?n?On!uZ3-=-+CB~kVcWag1BmN z|B!|8wX|F7B})cIVFrVx$Ss3D&0-MyPXrN&Sr1(^hWpQ1BbCbui0Q zZELT4*BjS0xXMw-`s=5LlG-%URop(nzG!%H+*0zjABmRwifly;T;!)v^I7vYyjdSZ z!BYiK^06pmqg>-c;h{lO=%Jy>#*K>mpKaMW>U24a(z)SzsPB8`JHp$QAlvAVqIq1M zIL8duZAzf=i2z3|8-B?>yW&pbx|17_AHMQ}Ogt%{Fn|GuV!W@G3q3@bW5b%8`Hv8S zuuN#4imS!q-b7YqlJPt2KBQodQ5-1pN4kMGm5#q`+;-XbaOD}Od2KMxDOHm=( zH?`#~%ZvCZ+<&E$!xNWJ;V>E|ZodKU+Tu8R9)Ttj82g~Db#_gOmopPn&0rlunE17) zZj07C1$o_8H9!^a=TB?4yro4*;W`h{8yxy&f5{}NOAzu*_bsp^XT@+EVL4S?^{V

mc4G9GHa zN22GW5dwtg^%R!5Malv{K76r-k6Ec1b7WF3$Nh)hqpZ0N1@<<>qQ|%sA{f{C@6v-P zdNcCHJ0Z&qwI$jl)jARB4*SRf)(8Y5xPB@@qRFT@8JTqo3$O?=!UaH*CEv1iiWfLA z0g##DAcH6I&1yf~u$sNn@~-lX_sm;BLNPJ4<+}@2*_P1RS(Mn6M(yA`=EAcwn7s-< z5X(%&MENU5XcB+nak}RY>aZmaO4iBoE^ZQ3MttpcQoN;6e}@drbgXm+!t2exSjjhP zl3vjr&98d`$xyTW$9RDO!WlX%>-oF}Al6e)vg2oJ+Ywe-9$qPDFNa}W*3m{X;W%)!OzQ8~ zidODue)4y0JdD@{yNNJ7JD%{w3gY<_1Su?#6~n{wcQ&AXtvDf_5i+bl9;Gk<7S0kr zW}$7`#2ax<@M=+}=FC_V_>?Sr@pj3cyP2 zu>=Pwaqb+uuc}`nn5D^Zu6t2#n-XjGk14G}^FgAeVA>(kOaGy5{vm(}clqmIhK?6V zSi`!tGEm(sKcOAMr~ht4*lAd;BK(Y;JKSIh*49~GZR>yQoXUf+RadCaKTy8IV=*mP z_v*UBl&XuP(Y~B78KQj?T^@PQHC8(&GvH9+Cx7#CN&s|M1IV`9!y9dXm4kXq5MK&e zLw+ASQ?|Nyz|wuuTj)v#dhvoCsWF=F`V1Nb?{U^2*9UE*meAO;DW-P2D2OMpEe(`Y zQ|m06c&-FC4pcl5Q?ERKzds?IZB=4gyu69F)a_v=@!caYv781NLi}=9|K$Aq703N9 zS0{`R^{!4qUjal8Nj@f8F)r$(7xYYUe|+hni}qf`P>s z(OWWl6?A@j`wWvCl;TFl%=g)bi6kdOQ37z@s*4kaX}p2QYyz-ry@=G(|1AjFN&9P? zgd~hgNwW$t3xcSFcs_56UYw9J@D1}HeT|+8UmH1@Csqpz?dlEeL@c%AeW$J% za_3u2o0`c{*RMrWFPc3D_U}`-8+@PovU#h`*)Bq3aSLJ$;t0Ahq-7F2N|rtTC)%5z z6dasrWI;sB)zaqA=WsZQaUQLr=rAAX_NMv~%r2bb1Ak3nK{0^%1Oer+X-;QK!56>? z13+M1GV$t`nuch!)uW@giOqS))GhlQZ?vV=JQ<9`RIoFR73){c;dUF!qry5Zh(yQC zf*LY=96yHv@_B+?X6Gyi!j~ti36zrX;IsQ?Pc79!q>lyBMXoO8r=t!Z!wyVOYT7IT zbn2W$RR7+}3?$4*KLNg16wNIP;c%OK*4 zA&zx5E7`5eSD0-3*)RfH`IwA&Aes?VM)HI{9g)XV+~Y6QwN9iqMLrd>U-eL8erKh0TZu6=cr{2av#T-7LX zOiZTb(btRa;(Z1fI(-{2jFOd2TL4wAGvhq#YRIB!jEg7^0vHZ$ci#wME4No|Q&S0k z-bsNS$MEDccmT&uuwd-BvD*rXqk0VH8poaN$0mN2fb;J%Lfz{=^O+)L+B-|gc${K_ zue}8%e&7iqpH?Q#y$Y2{7&z4#%O=$mU)gA9c@;-ADJgCP{1b<~=L@Hw0A2VA`WY)F z$3YRqy87Eb!wvrL4s$`s_8AfMVTRj7f~>Vot62kITmL+EkK-tyzl03{&z*CZ^c5a~ z)_|4mO5@OUQ6*3L8}jaI*md~N!ThZ_*4QN{8{W%*c0YiLk-M}!h)~jl+$;V6o&T-N z;9>1kuufvL>15#|#>}P`oj(fU(MEUOEoI)ELG}@I#-S8#6Ke9ZURAT_n2~dpi=NE(P@h$uFM|D&Zgs(Yz?E_>lImpBNjn`|TrD*{=noyLkmD#u z^W~iem=?01O%@{7B)XUOtWODVHiQoo<_HQ|s7H#5!cPbbBYD?yi;{6~l$V@4*1G;c z$YdtVfq88NM*a7B?HH8L?JxhtsuZe9V#bUQk2pTR=Y}yzzJ-zV5`L*i89T<70+BN! zlkcp&(lT}9;ui>JV0skIerAZ(t{x)6Pjq7si0({zZo6*li5Uzpg;V{`ptVLqvB3k| zd$zV5s)xN*m9KY)+S+tri6Xdpae{Sq4XX}V!`Eh?4X(p!I=fiPK&3Qc3Hr>hv+a3xQw^;6JEKFy&tIa5iI zrry$XeK6kq!xCfZb6EpK$j_wQnA|GIstr8I7O5>Iw{2RiX`Z--y?J?E3X;Z+mp- zYa!4madYSGajLHp&_&!Wu+l7&=nQ(MqY{_7{Y+Qtik>V}KC+#^$u220F!-mbknA?+eu=ymR+Z?JSkITX~%Fpt@3b1I{HPk%7LYEJ+ zR1vHppuc}Y-F}UB^TkG-!eF?|p)qP5yl1#coRsO9lb4v1l_ENo0CF@HoFUcPbyxQ! zM}-Cm0kv7Ad7m6R?)`B{pU&SVr66k1Tr073RbymPj9tw;% z$I8tA%7tiilCc;$Y&_NKhsLr->%_h1!P)?ve`C#T zqxFBk?j(_M0uTP6uAd^fpf==$hYofId9)hb;YeLVA}Qa1vU&_j|!uxtJiqMOiTnG+U|YU8=|;!E^;aUSMkMVPeu(gb|gVMHr(_4(1bF#Q@)I z%U6x84064KT_+qx8&glx`G9CYA87RTO>d&|Cd{ z1u+4nyC!uT*z59brjeX9tpp=a<)N}UjI{|;SUpvKk{*{9PEAI5=zp^bycG%_@&Coh zxN7udyX`sERH{hE!N;wWA`Ei|?;ki919k zeNXV@A7#9$XQj70dD_U1sp3gA6ly5EoQk$l^e=O0X0O9Q5Ot{N#5 zec%Kw1&K~wT6#``sVP!|PU}bQNDOX(hx~Ot*@eU^)bt-9eD$oY8~g+*-kqTK zfa)Mr=wTQw_~@qywYuryxsdN2bIh>^CT?`Tp~K^y+zWuiwW^!3?bMrm<{2`{d>BNc zYn14&-~KW@?$Nde;8N(OCJ5a}?7H5~V3(*+_{$_m1ER@NOa8Dxn1vrug$-<5Q3Rka zZlNR`%HCaEHF-JS_zy+FpbsAY#T)IZKP*MGd4po%W}VB9xR!wyD@g7fy-t=;ji40h z;{n>`)~Lg{huGQ=miTIzL-igXM}+=@5?{udRC=!Py!*I;wRRC8j4%+M6NxXX27O4u zh=joBnp}1{bWUtMl8?<;(F;oxb1s%suzEJ*OR zE#_Kgz_1meWC0NxkLG!}KHLTo@KeUaz9Au4p6>bwEi$Hog3DPiOY4}!&t7jtf*7Pm zlTmTO%~l^4yrmqz028LD*N&A%x3?S0r#U|u2F(0a0)COkiE{cPWxE=kqn3!7%VYte zM`#s<`pA!(AfXe5I-O&HO+WHFM5GQ8OK~x<4AZ& zmIe7>tBx-D0TR&=1uEHs-G~IrQv+FOoH>qUHm$Czu7@vlgRX_mj!xGML;QqqWa%x0 z!Nx5GQc63Ag2CTD>b*EuYLR3GrPCb!V4KE_rhteaZT~H$UZ6sj4w7cTA>`L=rM#ZU zaS^35hxg}%wkt16dv0~{>1!f_{I08Nb#XRM)IKdQ#Scxjxz~!)zy?o8Bbk9qrWI>l zg!bVUTcvHwX;-6ABL(>16A%La2s0;@Q=j654kw88p4we~bI|L%{5UA)&i4dG^{=4d z0|;9^?_sra?2A|nP@MU%+_nC#jX(mO4S)0chA}!Bq4cqfpp&k_O36vH2@7r}SaI1| zeS?AZYLGZFC7=ZN*P-tYJ#gEW029W~!1&E%bsAM?mJUY!5(x&@(WdPZ&3pfjPXiib zf45iN@%d|rCgj8&#ZX5z#c|JQS;EH@6ZpDCmU!Wf%{0nA5ogQ;R`;)$BId6pUL{Bk{ifqWz|fKGb9oO(o9d-pd=NT==iN zpkTbKc7M@#QIhPZ1!C+u`AfjkIL^4XQmOIse8%ThjWV;MHi=?ye_G`NAkgA*{0TcO zrjAbYRLZw!^ks4DoWSaqyLNW(BmeZ@f(Zq7wd<52AuTH+K$mEPO|`;_POO>*^z9or z2(y%^5L=CI^T&7L|2zM;NsE6>e3xxfZI#qXp9S*F-Xy1wV0?U)QC3&nh{A}|(6e{j z$uIBm#~c+vc@#UbOPEza3oH_ECR!qQJJj%26?aMmNLY^5pOSUU`4Tx9sokB=-7uYX z8L7eOSEl}QGG!tA?2LugatF!E1*%7(0{rESAMb?wfZBbaU+es2yf(ccGp?7ahsw3Z5%FWp$Dm#s`UGNL@F zI2pDE`I<#K1KA*IN!&%mCvnm*+7sy+neX$_kFA33+Gj=|rHRP7C7iBlQMdX?b$8G0 z>NZO$_F+;c&@&OK?dv)b*BZHQLTfITo>8y=nlJxzEYP!H1(6+}`7R|8l!PC75P>&} z=QN~sS6{>jnSn8ffZ&x9soo*&lrbOXXj>Ny=SWr7H%25AXMesR>N@>{oOvbkN(p;P zl<+hFN~-5mdRcrbXC6~RUBq^#(@vU>ZQRE?V*k@8=)M@_Ibsn~CB5W~l{k=#v_wU( z!wqcTwS2%5(P$o%EtA(C%LN$ddH2>ep(EQr$s(_3Y1gSw(vQBsnm?nxKd!i|Y$3kC zcA3x8#RF^q!Y;;wFQN8H)eVh5diodPd5tXYfkosu)C>@UKEgdNfLirxd8H4LG!wQ> zf`Ib#y2Z(~WrxXUtUR-Y#y8idBvdN6C8y%ZEBRY$$Ih#dx5v6FFI0>EFIsaiUvo?{V;0 zc>2l=Y|LQAe{QQ!O)GzR z#|8p+FHMq1aT^Q#?^_jZM%!WF%A0yeV=-S2TXDBr-g?b%O-D}y6}gbmZGt4e+irK1 z~e1&r#>qtOpB8hEl4|P!WhiJ2ah&K=?7a5oAr?8C9lHr0u1YPmZ>(gfwPQ zM?zrXkq%uXY}B$CxWl?aRU`-zv0??OP6zs?#?+5@msNCkue)~~dyqwh3GDchH;0TQ zb3k(y$w^7L+u$Kj?u_4oA@yhvrpW4J+XgEC^|--lI~#WyJ}kD6f;*au4;Q`{vqqE_ zsNocrk6iS4?z>oJBE*Wo3if+s+3x;~j6XA}e8nzDw28SFDea0L{=Lw=3WdJu{T{!E z_mcEBR51;MY@>q{Fh=9+Pw8&D*90rvn;c+6S%)bH?EZ5Oh}S$o$d$2>r_)t>PKQFCV6 z(ZJK2=>gx4Jzn4s8wZ>EYz(;@UFivIlY2jKP@Lvgg5nAH$4{)$;O;(?*DBqM3n`&o zcp0*4K1s0H<0yD~NgDrjt6-lLGmk|+#E!^8`nicMTOOeC_&&T0t#9jH(9+r)J0YkQ z@YRw2RwjGC(sT}Y89h0Lz4rWzD51NggVtM~y zEMmb`5mU3##+JX&dI8vZMjud7dR zvOfn{UOTiLgw+D}+YcB^vRro)M-fF!`=hkot&m8AwNdHLV3{_?rSk#QW^A6af8Gv8 zpXcu zx%E{%KzmeebJ%}XE=oxB0Y70&wwyedP2fw^|3}+7DAu8EQ8a4QG}ei2+jete+qP}n zwrx9&ZQHi}?oYh0*mLcPL0VrMOihjC)A9xq{^vrBKaSS5=f?Z%-8LU&3*H78Mk2b$J4Ai#JbKWET(}e ze2;0K!~8+y;ZY0jN>#;s3Ba+mdN6!lsPr|z^k@>bzw2mq0&)euz&D3Bg+sKlH| zpw4`r>Ue+1Qmi5fR2wqtNtj86NVAGyn`sLplEX?=hkUadctuz-wM7mIqf&7e{FX&h z#@)IU13USWd2^wdgI*Ks<7XbL^Xp%xk*ZS*4C1k7Mi#4`cvN3?xx*-L?W;c-``mv0 zLQ_k72qsY*in)yOI2pqG*Y+(rH$sHty`vA{P^yHS_xsZwQ-KK0*_Z>C^zyelgF+}1 zD-T1VPa(Hs#}TW@+CX0HoUB6nzG&UxX*%HrNBKJXdojyCwWAAI;aW}SPovV;2(@KZew|$YVk&d-bC)T&M7cwT zOjiKgF~@w+|F|;S7R5mhTQd0_BK<3cH@PO#q2d&*vDU$0i`EoJu)e{}qA!MMbCj93 zPbRb;;^s)ltc+5mGISkk#UZfwOW2890cL_X33ER{&cXF)fsn)Qd1`g|G}9x}Bm346 ztYE7#-XXi{I)rMyJ19fm#lTMQSXUaP>bR%0zgy`L5DYrhXpZw}ibujmGHT+9=igNsR0r#N{XbO$4ZT8@`Gx8_W8Tr^h!n&oG)mn0G4K13;Gi50P)}sNBW=MKp)3A zmn|e}H16thvMm1g9|!K%6kYL|hmDYljOFaK4*#FnYN8cQlKP9z=j`FTq}qqI~d&w`5A1KxAo^f66MTR?iA zfkh_WctcOtI*)& zFQMVT=aV1%P$2$%njxPK4mG6KMUBhhDmdTyh@`*>otXp}I>tHtY^%=-3vfoE?&)@3 zlb)5W&pFbyefbHKuDbC&=J_6cs&$=Hs~Q?wE|)G%DV3+}UKH3xeiqcu0+et;HDEp< zOfMw){;`>$p%4AK_)LU{>EP0*e{xBdm~vd2QH|D6^ixzk9%Mr`MRiOu}qhz$TfQU_Gy_}BFp^{QxMR)8R-?T)6a4<>GbIQ^tNqQlmMq-cauSw!6dLT-x=|o` zQ#D7kq?*O-4>2oHbGff@K6d&E?YGf=PVQl@XNt!D8)T4K27x#V^AHoU6%V+F+aL|_ zt>rf$+bAJjQdHXO5Qt{GAIp6W0+uZ+^I?I7M6hzyD$@5elmAvge98|=5WZPVjeL;^ ze~l4CrW37}bDl33unQXO7boV>ndUlS{^DHGdUzE?q@NA+PN+sbsG~DtI(AnHlUPJI z&5BR{TAswrAAP;qM9`jG#~n_#1gvJp{}6 z9mf2N(S4>=wDtdZ(C<=#bwT4*%1qGZ8(<9!+;%in=p8Xz)}-1x(!Bp6yQG3n zOlGowBf)niAe)dY(}lr5g37W!nZPU^;QOAJHA2pc!COo*zEZYmya43I;#Z(o*AX*s zFlv91h)nEIo2I~iFI{$Pvl12Ul(VTcxaViIeqBaPUw_%&pA=|p`N%0JH2*7UjJdUq zrP(bIOr$X^V$tX#9l{nG|0P9nAa!w%BT|X1hbm*HiUt2w#c)7ZOdNhd=`FSPI3OT~ z))ce2_>nS@QyYJxKzuz;?ij=8FY52Ju?Fqoo2oshsLRlz+_QO0Y%xrQdCjKp?X2P& z7Un?L9v0Z2QGKxx96Crpk?_|AkoYZBJojo~VGuhaU0d_sOE#v<@5@M3BOuI60DGF| z#<9EBTeju>S3HXy4(nX*^K1(UZ**o@g*(dN;h)k`-EotI(0P+`q=T(JLf!9xwb38- z`Uwbh{GsVUT9~$N$)=uJS)*4oU8yjTk6z&W!!?R($oA|*ITB&mS+}L-f&XlLnZ;;| zBvW&M3TJm$Z9f;7ORAFS*hM?GC46L*n{0CgKqM0d-!6HZ>Uf4wlK9?WWj#ik;0 zxg4XgEiDb0)hXdA7uTkJ%AbH}qEUhfeO_}y%Hn>QQv!ikaE_BBdTzTG8^JR-N_;JM z%>l&0O%dla9=Seaxn2(_N{Co2tb8*ZRz%)|dTR0v(eUp|QXZ#~2+Ov35ZW~~5^hDe z&-FiMpb2+8_S>r2VP@JQx-pu)=XzNLk$TWYGjEfymxxLyc))B+(xvGB64*-1kRMUl z_Nq2(K=yg&>09Ny33d>&2-l`I&gkfib4lJq0G2%$BLlruSio_4jauExkcalI?v(Sb z62NZWO+VU@(u)$n0qA&ru_!(8$QS3)IgL1gu^WM^kEa`YyIXZ_eaH)ILHcp>)NJst zYcF@PE+XYYH$dca`b-$mca7GfnhFg5??T${=KKc6v=izHdWmqw8yg-myz$HgJNxJ= z4=gbo@&+ETkMPzpG!<>=X+kI|pIo4|3LT`ldTH)ptKu`aSPvU)GNU2ii;xg`AGwOs z33q9h+LJRt9t?=j7P38?9ozTsY}yW8XEDFEv&9<1h*>ajdf%CE5ZY;BP0n5qbw#Wc zl4;6?!`h$)Sg`f1f)REcgxj_-Xf=sfu-63w+x2%7#$PimJ zmX(T~oWDA{f}p`x-c{0({t7Ep%KoDq*m_$st#IN|-G^1Dh+$t!#)3nB^zDxl)FCZR z5vrIei2--W4Z}R!kYePQtwbS*_UDmdka}F00F^s6yU|(-uS)RbP5k^m3ejWp9&&@g zs9qI$9?n9$XZ5jxyKaSHs&XYf%Tld?#$wZ=-c&tGW|o;9c0;Jq*F0Fzdk58*z`(`4_9eQMSkd01M@#0GRqEaJ@^FN>_ zxu`0xX!+U|e&rYkpTtf)a3^Nwj09;xiMHjM$m3OmDvS6|81_a#t=ls)pQE4zPp#0- zh%z{fxt5P$wRVbM?~Q`TB}GngHkol0J_KD$1dX`R)QlqZgQ}N90?wKi?Jt#Q8>)Hh z(cYdE7pk@|=5<(mR2)TUVfp&1u*g*dIe~M?SRaanC63z~H`82cj$BSZ&{xf(Uib_) z3HdZqm!rB$-TTPsstDD0T>h%^M7-^IlDaKv=^Szf{1fiEx1giSD^M1`L)2Npp|6Hx z!-Il{Hi?m96tH!IV2$hem~?!XCV}G9Hz5C8@Tv-))#bA)<88dB3}NQ&-Pm@Og`(Dk z!`33mAcorFi&wV0d|jCgrKA(h{@*_)jqVrP)IcIst$=qS#KjO66Ewfx{9m`FwT#wZ zLw8siI^b|cf*KW0N9i#RKCX-NlC>P8gyC3V+t1*(U2z^29Dm6KAR5~-hfz>;#=%zj zwS7N5(5IB!{(8;p)6@mFcc2-uwO4UVAg<@`@O)naedxxK`rlV z2Ch&`+i04;5cp3-otUa~L{$(jz*p=?t(Rf3n5=w>g-v8+Z!%vC72f+r{I`~|3n-S} zk*LioT+g}&!1;`WG#5>AY>YP+;{7zC$6G6)^f=9m(WP%8pucr&^(T*YprTf}jAnT2-x5?xg2u$~%f&$PF)pM&*?hYg#x9zj zaejfztJq+JU&FX+IC%>K@sgYor7^5DWJ|iYeZW;VDQ&nm zjAkC&``D+sexpTVnAuAa6Qj6e0&h_54so=A9~eUHbOlK-!D~dAZRV&BxX^=ySpYp( zvZb&mr|`*9oa7dAdpLXn79`4WVapPVD=jpQ31JVLWb1vAn~$ufnCzTa6#B)#drc!1MLR1CjX?Srx3K=-`rk>7qmOC+K#I!xZQ{7R zJJ90G;L?1=u>;Pb$iTNFWGVi!9}aaY)yRH`%F~V-H1gLmnCReW6hAH_z?XV?Lk!kwfs?C!>4FPgApW>N9j{jAIH(CxT7qUes5ytN=#(K@IEF5i)Sf* zM%=kgyT8d83Clv?orUX&RXa9WU8^ZH*Jz>8n&cdbSDV1>R&n@qs~@2c(FAG5+<#u<5;9olV-5+2nDV8&k1d@^sRB@%&CM{C>+{lZWobx%{G@JVizMkHluN5;w9;7$Ay zG@VOuHfSu^k}Jz_+f<*;1hrGs4r%6VEZ1-5zL(kt()x>xVUvDyn}Z`b06UFKu{x)q zw>7cL*~;Ad2D5?4_VvDclMT&cI*t(L$P33F|L(xOh+}=46KZ&4z(yzFcaT3zL2hv$ zo0m1_gxwr|%QVpWkc`LMwko(0Cq zU`U)4ES2SnP#i6AQL0QKmaTP%F1~I2-&Q*4>D1Gt)3&~bD4*Kne=Pulb4~WIj#}22 z#1G`T>9QaO8$g1=1i8q_sq2;|r^C;a{q-v5E*P!{bN?FZrClUyuxq7v%ca5{z{wd2y;gyLz<>_JX0y!~k;>w`Ds)%L> zGbc;fGoR@DzkO*6(6^M0QkSSEx1osU_9_jNK!<3?Fc09WZmB&7dmCUWdBfrK?xEZ&|d)^u9yAh+Pb&#c*{gFD7Y&T*7vIu9| z{8cU>ATKpwM%ldnZdht3<)hj!X6#L!8$ZNk39D3|%00arzWr#GbJXZ+-v2TqckQOayAu z^`t*@vHNM*Xr41ctGK5)eX%rFOKEZ+2>8}|L3IWAJ8#eg-kZHl2CwPp&a1H#N9SKk zSAl6nw|1#6&>>tD&dme+VBoCrgw>!v&4bH%FgPFWf;pWvS<@V`zuwA)w8l&VLoWfUq4v43pXzl0)V5*yPYrh1!3={p><40XSoq`vR40 zefklZ!P#)KbcD< zIh%Zi^2DH@1@ClXpG#|ZyX~mvSzFwi7UacRY|a?gD92`3c;)-9kwkXlc!K5y%UW^Z z>KO2z!K<~pi+NnKFeitJkdK>4v`3=NPe&J0R%dm>(zGBNWr(%Z(7zfj*d(uE8@Y5bS3CdiElUEA2uj)iWBl`LMSuN4Y^VwLQyqz6dbtUX;S^UFuy!p&0J>4Lp^daFmV!x*C? zLc;3*Mf)Sf8hm-={~wH^k+f+$EUF?*euw__hQ>`reluf6EuDu(((CYdhgQ_X#UneiUKl*?yunT(qdV@z9=~!eyGW2tBA1b6vVmV>f zMLuR8LLN#dy%qyaS|qW&Xp3Y$&t%&XsnY}g6cW>>{~E-ER`*mt5y1u!vLKJCK?o*R zt5i%Tr?a`-lL!E@zAW(=#YfeA%{gF0UCG*BE~z2?hEqY@#Roz1rB# z2^xtW>xHOT0VZt4Z;(e~ScwMo+4k*rULD{$Ff7N#+~Oqn^4aW1ZKn{_r7AjjeHDX8 z`Dil|U#oTb$Fq@MFGweX&8Dipr5G&aVt1Z$JtA`x+=33s3UOe^`bGNH-F(Sa3u-~% zpxOUzlg*viqh>S54d+I>s$G=Y>V zXhn4SotRfcMk>cWNAf#lo?iDu)%e_US ztsM$&y(Vcs7XxwYrOEqsGp+EiuC&(B6%uT+++xUV0C%6p;eNa7(2tN?*cG$I! zVR-4@c;LQTuK)=4DzOsg@xIV>i6}(fe~;b>c|m}O=DikKLTiz7eAf9qYkz4py6{$c z5MPj=GD8G2x{S2HV&iPlWTbKtC#ZwrR7!o1$9@7?v$$*#iR} z>^$wTDNeaRj?gU0RO960R=uoKZc`b)=IkhlJ^A&2^8?6jp;NgPXr&onsrtZBJ;Co_ zla&T-*AUezjq1C&j;$kjHfj?e==!$$Q;!@aPv>ib*1jsL~Y)8WnSHEKOA< z_0qb&F2(BDDDQ$P<|?AN7=Ei!^wn%XXdo;qA~k%rO6&RS>B=&{#7P$%#fy4^TbdHe zK-Z-3Bn(zGxPrU%1c15tbU{gV8K&l-H2k`U-9@FA1o1`z zHl&Vl4{$30Fo6RrE~?2mEgzCd+Gg89tM?oWn4NNVoQk+fN1z779#nw48y+`=<+pmqxzvL3@O&eu4RR}Xu^^g)V|6?L_5{x)RDAVapJM;t z`k(&WodM(iGTYkM-^*@@ZXm(3vF0?(XlWGqp*H85TREdg+uVX*ZvqgjILA%G*he0O zLeHR5cop8aATl0Idey|b6d^whF zG9jCdu845w$0*j@}A)Z&EGMuSV;E%FoP3xk}R)`3*nQK}k$tf5bEw(G> zJ`AU3)EPx~Z4z{e<1W{!Eq0F-Pc=T=gi=IlR&xcir+dd2mPvBM5hMxi3P!H3fz)Lxw z37|Z17h6C(CeE`FP%ucciA2%gChs+35#QXaZiAj&PfA?Gim#_36^XfLoBi{Tcka2; zn4{yCVreS0X!<+upKPtq3M!}|^bofqr2N!n`P2w52$GxBr)1Y+Mr--2spylrp)5w5 z$4&w4FC(;PsO<{X-Jfm{au#Lmw}&^*GV(oraY~%;bHAnnBqP{1b|~Zv5J_n=$4*2b zO8(u;#vjM(UieE#{HcWFPF0a`gn~~-byGm~{QJ+f93U@jCkqT>LC=Qfi#g{N3%LQP zMQOyw=R#ZFvEi~zZ+IqPIb4QRX#m|C^pGY-0_oxgFz=Fi(N=zyeq3L>EhJ}5I7#H9h3_TkSu&II*IwPX8)EedZK zDEWh=n-U+S;TKAuJh-rL{JRaptdW{}=CtbRd4U7aT9jSNj$r?sBe90~+OW!VPfNCY zjxQ^dPk3;R?DQ_5jZxjEhMFY@x({EjLTIX{=P;go&v{_7+fZ$XPYm!7@APZ?&mGqC zi32~%+o>k9dwYcWM&3HrS$o+#KoCJJyaQ?jf31lh^lR@+u$J6_A)vUGU4C5x6L z0&Tsth1@3Z8py~ln=QB1LW0BAmHF~2&*lIy%|3!TYb*W|!v!A{y%oh5cz!=Ja*eIu z1g*W>axK}qxf{eV#6hC%GL+_Ao)kzS9gLQ^m4)x68YVC`=$!lg$*Dx&1q-sKX<>^( zJV)^BWp#g?cB?9R5ChEaG#B;SZl@)3C^q3Hz?Vi8a-0{?cnWwB(*TaL3Ku^x!&=KR z0g3H9f!M^c@>T(T>>Hslf2(~kmT*#7;MNmpMEPjLvah;~L)D(ndy|P$+6_FD180Kj zJe@&tX+J+_PN!xDA^O=wqj?Wl>1Rk7S0@-ddjN3_mGeD92Mc)#WZR#&RJjcYBgU&o zms^|^21StmM=seZ_rkQ~PY5JfRM<2lak6`8wN-gkczMk&cb+)(@5LlwVZ1EbA4C3_ zZ$S}l;wSW|TW2~5s3KmCBy?#mGSm{0s%QGO7W;#mE=2Ijptq+5r&%u-1O=EyRe%0K zTvGipj6Q*~0-ey-dsoE;BlsmgD|Ge{r+?h)6#n!FM5gMhOT%g%(bQ`pSvRE!>meOF zusHHB4PdLw`+Tqs3nLYAC+ffV&;ANgo1m8!6n;?D#cZz=0Sb?7cVtw36{z=j>HRD} z*9m`YD)3QAy^*Kw8WY6&ep(jNz{zcC3n&oEXJ>Oo0I@LQ*Z*rZH13`xC;-{XFOhA3 z+&ZIVI?|0Nb*^BohA3{Cg8B7%_Mi$yqs+SNNDF%f*H?=pHe}dkfS|~*0&SB#-YMr3 z%#Ag?+%zV4>HXpBzdWAjnB}q0X_rrh^!zJQvFE?Yya*he63@q^jw5GUAe->OSuPVN zxxT;HZj8P}lupkd`pV^Pr2yD0Vw9;j1B~MUL3P4+c!_K_GQ$8AMCLDzYv12pVPaXu zcsU;ZfSMR=ZjXm657(pD*RrbZ&f#9n+5M(yCME7VQnv9nEc;Ob=k;{hM6pjmg|B0| zx@OQ^u|6>g%*z{@ffS=c(xtVB<##0yAYYG8rpc~gOPH1On1(}qnS(GQ2g{i)C>sln zH$+&ZexHsgKg~9XjKZENvKKT=PWebv+|THi!!v#=Yv4(8{}jl;c`W>suuX)1mkMP#1K9u{Z2lC43TA4#v(!FFBw>6onLu!jv_|Mq$e*kWNoYM+16E z{1Q^`IA+KXh&cOm2}njNNQ_XN9hEi$8tQdha$o4cm6Nptb&!9>FA<&s2r)_pw zP1+*1Wvv4fqH*f{E=HFQ>bDJkt$ppYsNYa0DlFs#^~1tAhkGuTy;EG^8zh+LMpgo2 zq3N7`z6Pf5Z5c| z_qfDmw*GEO4OB(KKFFVJ>rMg`Pd6K+iBGgEH3I@fLukhSfV_A&tVHPx&NW9~0*zre z9?7;~2vX%lp0iXAlMk$>Fh4u@%xoiSS(GJ~ZiSX-16wrd-=&*|X1LnOB$0+KR}GOC z4h5w(0kzvhba#;KRK+w6xqJjq?h2Y@8f1$TwlK0mqozy|SGUIb3Pa;NJ0M)Ye}?zY zXrH{${YK2&>*9s8a-J*y;0+2Sk#q8)1jlMBR#1SMun_I*-$W{e3y^ilNr`*U`-;usI2j0PuT zVxRPjbWw7h0}irVTV zp`^rG0E5zo2!0Ak0Sog_@A?TjF}@W>i71E z*8{b2#H|df?cx$WmXlf;IXu4RXojpRALU?AI zjtjwT?91Rje~;Z?K(6Ym{B~#TuvbnXX-TRa;)`VFk(@ULUE4#Ff&pUoc$h-jdRp&V zdQ)60{Cn9(0J#3P-@cu<#mTPR0>4Mc(d7lT7G9>=W~bOj3kK%gmuhMA6Pyz*T!-#sp>K&hTylh8Jm|rar@MXnEPjVqt|_W?_KMS&HF) zl;c!->}{(jKVyGu{u~dsD&^7gdAQYa9|$nC9q@CAORVgQQk0?~GDcEr!+idb?3s7e zD@j8H>m?ga?}j^Q{D05?d-l$cvH~~eA#qjxVYCqydR4g`CXJm%XqLBEQ(2?darCzL+Vq%;U<%)wM z9pi2xQ*Xj@k{M7Gs^E(HIt}_0zlgBE1w%nuqQXf-JdW1bo*%}{kqRQ%A6C}P&GI)- zQeVL>V~bMXE;1HL1ga$aO{LRt`4w$dL2qNtQP?zmOjq9&NS*LK(` zIap_W437q((2_V?;8Pr%A*Hak`j@BsJ$Ag^KpshlV_lDNBLAhW0q+S6H1UQ^Y;k7E z)mBn-Y1xq>NL1_fB;=ML`YRQQu3YEd+vt+fa-)C!!hNeyU2Ct8U?`0wvv_S=692sP z2CH<3K2bDOFIqCoKqCGr)c9a=&NuTSJs*Nc-yVAj(#2FVg>$9GelZX&wXY(4&<9A7 zp~3c2)G+6TIBq0@&@=hM3=b;|g&Oolf)%Ro!2Q+-Xt7_7Ev?JfT{NBC3Y#boLll}i ztoB=vtv-u@kg|V1X6Rns{_LG*)8y86{{5>_`)Hzlb1!W~z*L#YB@y za8u9LoMWqRCZKs%Hln3fSvKuU{8BH;d?JsCd~lKMeYLj9|1bn6X=#?Q9km{KlXMI1 z@LIiv!Sk3(`G>NL@VvWXZjTx>1O$HH=%Q_F!VGOLv-voWd$B&-P}VX-=mROUeKo46 zV#(tM0x%E}0cz~g9)_zfZV_>i(R2l7u7tcs&V@?tAIMvy{rpvssEBe6<_a$85DuOP z6#%!V@*kGGpuMZ!!M=Z)e}ox?HW6W^EaD$JW#M&s&dNHUP{?8HfS=IW+*lj0k5mGqZffc^!DgE3(&2VeG{6Gn70pTCV z?<~h9$>AH6){M*kU$33BJG0!DIy6HV+qEfsnk;&|s%EbQG`x+H>2d~k&2gn%8`Qpg zVJG&}VgArr>-Qb1m-Qf$4=J5a1BjGK zsDVxrzf=cN`1WkNSQp=qcRfjf%_GqRBjMz0kMhbERdY?Ft8n9t~%Fx82jWH{rsohCLrFM^ zwL!1)KoN|0K^yj#O`{o3K);{lG-sRt^VkJNk1ijbmmXM)^E?Q%w_b4kwnR6TFsx(9 zovHA>l3do6A6tI9dVxBQh>&RG>I@qgB7|u;KJEg?)5L`x*LJEeOHxNLMN9SgXXbz4 zZd<}HR{7k`Vm;BG2+~4c?>YO$t$`vxtG{JQdFR~RA?1}p2b45hci_E6iXQhQi~G%( z$w!3bFFr*8z+Q;|j*ST?bROoTymVggPca9ZxosS|_ZwK;sC(@=WKqVTI6{p3d%9<# ztL`Abux4#bc~fu_k-|kp(e8HEToAt9G&Q?`l}zonR!nZ#gQI`GV<}~_VeZZDV(>b3 zgOT>%#VB3nwyBIKRXpCbP%l&v(vVn&=_!;^+Skly0oJB0RJoxqIdTMr=btAF6XemSq2{^`1`qvfy2RX;;qI_J5nb{u+gM@^r8sHwz<=xCV$jm z)#4|j5&}RNm)uSZE~6kXgvf=>vX_lU5+Oh#h#}XkZ$VCHP;w;Sb)sYY&1*r^>Z=ru zfKWT$zh(JXQx`XmSYSxm&@{|B%5az*7@#QZ+-aQ=1+9Goy<|$2)I51##QgDl4T!KO~sYEXB#<*BZ$1aj$SeDyk}_*zLLkc+FQD|7Fg39(v5gtDf|hc^$8`^t20 zh61O_RF#bexvveJgiVSHWOww7-fkV``)_bKV)HsCGBb@VM&wL6GHcS$&vx3|Zx;@) z;yAyCZGF-NL3La8jYK*nCjB{Ig7MKe8(5>UYv`uw{Ei`9as>b_+4YR?QRC$_4mkc0 zr+L>IqMhRCxM>B*m{Q^J7N~1=HIP2U=!r0v`lGkj&WAvZ^+vx1PbpM^D%V3*i=O?^ zv(7}?0;GSc&~J@PJE@xn^=oPvl2|IJqXlq8jk|*LA;dmOzmm+}b@DZL7!nZYY}bQ#%C(<<4Y=@PI-?5X>kMA2W-$j)U})u5k?hwkETlW;mb*19S?z<|yX z)N46m#?sM4Js0U<`rB!Bj%RS}Y;ZHmU7>T$4Bq)`xlZ^aHIo|d5!k$2$7`7dM+VAU zTo8HNv9s;7?oFCm!LGRaG$lu-ywjLVL3?E7+**86C69Ti=fxiYm}HbUE-!icOVc;? zzZat3dKbTegMk|ZPmBt02jRY^=}ET1>|*J_J@uqI_~Tm%stglmHMY)UprBgxC1m6`_jLep5(b@Gj1D&jb=Zet@~zMx+4H4g z!LF12hcCYgD&47Cph_MMtaCD^_x{7jg5&89q4?QQ*(UN~X+JKzS_PxL{o5IvN&sbx z4bB$xN`*ZY7i9`AOtZrvg;OH{<}B4nkSBoyu`}&(0L2o!H}_H2G5_BxiP`V~2L=c< zh}KN9rD9ptZ>eDW$Iii#xUwWZCUT{TV6W+;af5>5pRwOW9-59e|1}Yn3P#D&S`{st zJAr?o|E&5_)e67YFnjSuoHmjNL-WJNnS04%SXy3&)OaMebDp3b6=-$RP`r2yipz)3 z_hggO$quNWa4)adB83O-^Rnj~od0_6|5GD4av!6yw+v!UIp!9r5&))vC|`n(EJ|O> zx3UYu6$}b%tpV~Glx6%JP){FIYox@ z)#C5{s-oUnbZ|g7dJuI{ZPjt-^55F`OqV+hc59|gi`t-;`L&)@++0DhsxMfGt`Ei- zD>X`LKTKsqMpYy^eVu?e zHyE(Nz8e~`+hHrXrx_Y0&N)Q(E%AWYm-GYG{wklXVGh{svl{%@E^lSh?L>2`{omcj ztNov;7!Yo#vTVsy{=Dw4$)e>D#I>Kz`fUc)8--Xce^WrA4Nl|%Zp!yMN$ie zp02vfUf2d3DLN>^2uC=ho`o^kdCjcKqyG`~@ZUneR_WGzM=X~n7Ie~Y84hOHGu(Dj zstR&}SbT2@ByfZNE^(;e=p+xEF&p!@K_m`m2RK>lxU?nO*QF@a+$mfv?q9Ch_w#`m zp}bS&*jw@?{GGhNQ*@MoI6Oz-r#$XX##OAc(;I;!-sTN;&pdngnpy`&`Ga+SLg1|Z z3w>+QFp?En7B6F*tv&_o+Q8#7^+RFJ$4NOJZ<4KCmR3t6> zMYi4_=Xla`V){cZ2De5g##|{KU2^j@chI}$@H%}{rykUyI)Au%g6Cg!f^AlP<#FRn zb@gcB76{*AfDwX}DpKs(D_Bmf7W;P2sa5wCRE2b`yl|mu$*=hMHn0N-0A1C~9@Ib4VbW@;3*O2<2&ELXcl-!V0RH?6qh!!}=xf!-5)r#} zxJwW%sQk@WW0hLCN&-m!@ulsb?Ub^SHkwRP;vJ~Q1}g9{{`I>YL|_U8GYaWTO9+vv zs7ET}68az6n#wyiy%G>^#M@-t$_wqgvL5hMU)qEoJ5jYof>kbI%S9f!vRyb{CzBfd zcqnONkdm+V9C?$hDXPoAAc4NsK1c(8zwtmwJBXkJmmE6X@HE}l6ji-ZU(L~0+uS}Xi~k{ z^3J&Xw7BjWEFlJw>Avtc(BZW^EPDFDb!-Dcr9qBopLbNo3g1J7e(mo3mTRYpKhxQg zW-O20ZxHJn}?3cNa*;&-!03G+#8a?*&(b7d1kfSso{a)$!{N z#lb_mOYM}rtSSYlBu90G-PM%3-tf52bE>mhKSw@qo-kuGwlBXMpJ2XU?HIwr-_oH- z`b{GM&;`R6G5gXev~uq8Ubvshufp4F^F;1Xaz*TYM`}Wt_45kYx+DXJ>RkarO)u7X zyyRKHb%LWKL7|EG7zGhv5)6)?Tq6LzFsc9z|Xsn}&5&Zg8QS247{L~n{;^82h0lpZBTT(!uw-frU}n0xRY zS2-;Me96BkHx>8nL&Y){1}$Vo)?TuCd^_nJ2fqH`8D66BLMX5Y7IPGUC1uG_jk}F7 zA{0Y6`QpDoq!uCs=jh7o=6rYV&RAIAi}`O;g`H<-y7X){woB8uP_w|Pu+rNy&fpGm zXd&Ofa6v($@pj$(hIjSpts2>_C_2}LuE>{%DWE0H!L+fCVRx_gKr31r!;La+yWs_g zysx&IAgldhSkvjKi5u@~(_}nZ>@ZttH9F0ehRTMIkdeW*F8AaN*ud6jJG5dSG+PFM98@ zB$u9(Q4gy)6P|Y`XcRqfGvd+&NmuC2rNk^DtVHvm*=K-BO7g6OJ#I&xyi>^%{z!kX z?EGytwkwa7)K*j{DfFt@*k&vUTI>9jZzmQUSB?;QN&+nV|9AdNGipA@ma@^p`|y7E5l4;M4CM8@G+ z8ej$50L)}k*tS8(PIGPE3W{ram)ix;FUsIIXXa$x(|+KBa#v?KaB-fsoJIaxjV?ke z(5GdIO*M~dF)i*sk7{c(dsuR{{}$%q_BIx$>($@c6&zfa>x)fm2lB7CeMmq-4pe!{ zqg$J``{oK6iBU0i)t4z2YdhsOj}k7tv}@uZV+>YDVyJ9{dy*9wf36qlQxE))sV@tJ zg7?POYtJD`IjKt>JAW8v2k#}#J4Kja!NFsX(1llE@OcS~r+yoHCIT^Y8iDPj`OdqY zjMc8pk3(c>?m`4ck@R@7jg+wK1VdQm{iTED7_Q>Ib>?&1RLlfY^$;CGh z+5TNnk5_{;#MC1PSlJf##jB)eaG2~H)Q18-QGLAWJ9~(p)0x+58lmAMPo#o5yTNm# z1{*U%VIbGf%!TcR;6i4)bSx(iGgo6LG8Cri7goX^?H$Z@Z_A3fol)%$n`Ffm&9XH8 z?{>Kku=Ar@(I6c^>iqim6(!D^FI>MAij$=W67AW*4Xcd;kdTgxU zF?(34*joHRA+NGb209<@?tsA)N|v<(9= zJf0T_kH?hf*5^UjQo(y;hNPh-CAw8T;kYzEl10k4FS$p4C(d7DzPN+;Mbmxfw44Dx zx!}t1$z0N#E3-^bKU#{KE*X%GDpyiLhu}iUjZ`TJOICw3Bz8=!F289OO9Ki0|JMKJ zr^QCK%&cL=myYKDp&~0DK4-<6b$`c}YS&4@!dm*Ens^SIm&H%rd)Zq3+@E<5pkB^F z<~#IVQ{)Rlru^j>JA;d*0k}B|CZW#ddeZG^JWisvc;(BQvEGWF`KhRyMTy4Bp4ml? zTEGd=YpJBpvkYFE^EFFlFm>Y-} z9r*{bRsiw{|rq2-dKA;_;EZX<0M&=g#UftqCrIm zdd~`f{oR%SKx_+mIMSRqo9+L(H;tnFoy-fTb+qo9@*nOwy%J3)4fZ+Enr>Uce7{tD zjPA8ulI|Z>!5h`>f3`&_KVsuf-R3KgLAo+>19Ej3sY+X`QiBLyWb%{$S1k6}($+;2 zQt*QEo`(MoY4IOyWpA~#?u&93SARj=6;(aF4(nC``&4weOgG}^rbx2CYJQRe z|6}ePnnY`ZEnT*4+qR8Uwr$(C>y)iiwr$(CZM*u5=(<0k2REWe?`S`Jtu@&hJKoIf zKC-OW6sG36((!C;n4XT4HrL!69mt-&;-f;sg!r!Zi^sI&RAO4suki#GRiaw#V)jSw z)&;|=0qc)k#v355eiDP)@Hkj0a~twgzTUJDw%8f&vtlkS@Iw2O#hm{Dk=g}MZ(v7+ z-`hI8$Ar8Tu_CLp+KHt}M^jF=x(1m-mybpbv)f&uP!w`VM|uAUJwWk}9mm?@ep8X2 z?^=Mc!(+AXz~3w0qR!DZ2TEIi`vfhg{~{ka|Kv|=ZMqgso(T_DhEiKsD`0Jz10@pu z;He~1RQc|@JY(EMJP(^d%4r)J^e?_h;J>cCHL8Ss#U{!eqxC%fZ8Ks0S-pXTqROr?#~2iZ3u(Db1qpGgMBR)S$GjUSWnv ze^m9|iaaM{J%<`7uR8E{YA0ixfSI{?L{L9N;o!lLcW_-rN{bwvzAAY&JNqYI%I)le zd&;BPYSVU~EW&yb!1MX`*FTL=H|}&gRU&9nWQzt&NaX@fCWB10eKtwfR>3v9Vr}F4 z*Ss8ZO9V`~WiJQwf|#gLs5_Jx#VjZ@c?5ct=9{PLA_Fr_Fl#K31AG+D@=oF-((^`E zL8e%bvY*P@6_Ux|V~1fnj)vVW6)ORFXo}vqiz^MXAf-GwBam1CpuDF9=MW-xI}eyj znm^)-R^z_^Rw(p@b?<9PncY7Qjym`Yk03Q|LsL$j2~oUm?dqLc;xeN11)O3xma!$+ z_UNi8rR`*Qbl*Neo*VT|8bN7U+BGqy$C_GX{jJV@C1lvsCQQ>)Df0wo=eGcoW`#fM z^0=-U^t3Jq>c@nXK10Pzr`k4UF>9a5OV&5lkf{T{pf8IWjSJV&u;YJ@FP7La+y^l~8Hk$uc>c$!w;w~oj8H+VV2R0|cuT`e)`~!<01Tjx5bc?ZB59z#8m<{EtUJ;BAlW>m1>yG|jlq(ZYaq7X`a_+m8*VIdDnPRb0!yR z@2+sST$clp;mna?R~G8F$g5E%HKO474MjTH6OW7=L-S~_)T>{IoFxKhI6l6?uCdg+ z908%APOpODDRG}-GRnpWoH^yJS`Ja_HcE$yS1YT{f8%Lvt5%;5x@(?0J`duArN$*u zn{btUgmnyTk4Zw&dd?G1a)KknUTY)FJTCj`7GRS(c(oQeic=g0oNw2!J((~TSdRen z#BN1sAP5Y@BW8}?6JeRa&aFEiihOOW*oReKv8!&PhHP*BrrkkkaAt^LX9!IVT=>NK zoX02WLof>nYzMU6TURtm(n+wqO}7tM-zt$0$t+%+)le~K~w8_-mKLnk?9 ze33AIZ9udX=eNg^iMmRy>(WGi{t#b@*z-+S^uF`lMiU_o*uPxVzUtA3SVB%uQ$OA3 zth@$_3q0Z6$&(=_J6PFI?dJV*Kutq^h+v@?OPDBJ#m5{G*O z;q@qMTlz{7m7>L2>atb~;#%*$@u_|^!64Dlvleq6<74iYA5G~5CKgv$I!3;yvl^SQ zxaS&k^1E?y883;CGf-N=rpL|)&!JDXAwCn?<*h%aKJ4rwD#jQ9b5NZoE2R*d(EJeN z-i_0V#nTw~JcaFo9ccUk%~|>_byG2?$MrC<+^F3on1l9TxU`4Mrqtsak<&AUOuZjY zul=~M!U(dE-;S!yM(HhiuTcAIe={5#1bNQxR{vZ)*uAP&;cn!p5>*_L79>VtMVpb^J(Rb-Xar9S~;9awKVMJ*9Y&Yb5x$k)zsuvz6vqMeAGv6hmLAUUi>Eh36^W8{*n#t4Ja)|?S7J>=qY zi!}^~^L4r1ompcx-%*Z|)Xn_Phcy$8wv1D(@7y*4tV{l=wb2dV^Swa$3(#eha&AHA z*HDj)zIMMdi-SV}qKpj#rQK-McOp1pldn63;z!G!EPwvEf|d%CD5A)8|qKLq8IKJ^?c%`8p-_EE3(CHQ5EL#a$tn{ zZKqpv`J7m&n06O{7_Ag@^5g}$ivD;0kc96xA+CuF&$i3S)z}*g8T^_=$+V-C;%5*M&?@D- z$SkY^wIJS(T%Icxb0+rxcKv@Kftj>(3biu@4Tu|m&5!gqbRKscoqapjEYsej6&GfB z3Q8a&IUI;`J@385%OUFh73+d4e`5A9?BFY)V^tIC~O9ocD8I^I@NG*KnY03Fa zuQoeJKiq6n&WFo|I%P}tWNDV+6AHq{Y&1_hbtb5i4x;0L4xAH1^lc7x=bOG~T zt{q*{VN4ih-&4K}`20tFgX+U0!b>P5m1S}??DME6cNeFEe=T}1R@Alt^3Ny=2}75E zl($;F(e{)!=|hL$kM*xFDkiOG0!>DT*=3@1`f`Ii!=&P>^gYULp!~Q45x&t8c@`9M zV_iKWB}{&86B-hT9Hp-o|B+bLEbGuD z>vJ4(UIr#CaC>rwJMnDwYt%$pTBB#HkHK-|5DqV)9-Jzp^~quG!J2qz51a8+*7pEF z6H&$TQr!w|lFn<$9qH@TAJPmV88Glq93+KbYOczA0Y|n_JcQ5+a9><+*$C{iV{`Ii zqX2=PT{tOT^d#(_5ZljPY0^`K2&;mq60fT47pnL&pu}}#M6>%#q;#pOuG+@7TyN8}R0i;rpu~fd;}AB22yDONaUS};@m|Al ze9qH$fH5LIyw02|IY<65#7;;l zrTYXtvZt|5MLay&nl)6l)mYko80!4Z3d=6~M$1%-8(o?%mu&G>A|S#UkycQ|E3O|e z6$(OsAUr-ww5X8u85?~&U>V#lSlvBpcc{IlKx%(o~9SMd&sj2aHPi1pbkazb20*aTOKoU7FDaJ$Ok~vW11B!MQ z=7HdB!4D?Oj;7@FRWV3K9=(@Zqw=)vme#=)E31(9hfm#W(<>5YbLJYhk0+{*0l^VU zJd)#xV%o7bcyDQOp6PY)(&m)4&bNlyZNp)3aSq=z280l6P&}KS?ee?yL|quvC5hEc z1!BCcjfQbZl}P^!yT6hdF}okwvTzl~C2%Ffpiji^_Yre$G~eXv_%yEZ;p;!=p73Y{ zqQm0axxoa^5+{x}$&7)uPUw5F+~$?la4Om#9#B-#c$47&^_r*Iw50{L3;ZQhBUY4I zvqR>a8wS+6!c;E7nw*Lgs6!#}zCQYz2`Tdj3*kT}o{l^Hkk;1pwqT><*&D9xFu1oq zsN$(ksq>;KarsyG2iV~N;<_D(O7xLX#oPz7=j3c1RJ)` ztR=7){cD*bU4o1#CvIVyEqT-(^}1w8h&kgk!QtxmP0GML%=F_T?CPZmFAL0IVO!Ch zGFCDH5XU&kTITp3#G#YB9J<9~i7 zmi0PTZ>#F8hxjD*JB(8S&c>LLaV4G&=)Rb(OV1my`1h=ajPht{4Pw!4Zb zA)_scC9*z93pk{_dK;CKwwQ);63I5lr?hQ9FD;wM z+os8Q|3G~AawL?()*2kusMM3U;I7{?PaKUmtd;8POzN))5KLX8z0c?R>61f zfw8G-qFuQ_uOE0d;(ThET5%JpDNeXHab80cVO84#7`Dm|#7`Gcw_ENL-X@3gqD9-V zGRWcTAyo39O!0P|X0A^Y&S_5VNe@F3iMg7z5byy6>{-6rRb&(59E#`(Nav8Puk`3$ z17{UqF910^nqRZVrbk2K`D5=)?X-CHVq*~(hV34z2JYXAQMKQopILIv9@>s~)gfOJ z*lArdPFAyH;IdkgL`{&MF5J8I_dLphuG?@~R+l(iAH|7p)%MH=x)$Go1^78;v2bK) zNi@c-xM<(oj#Yc~?ijX<0>7EpPBwH_QYv8WMHc3ybS!q5+WfvVa{Hj$y0|dnW)|LW zhbb5V_+3>(1Ne2<;J|MrWUPi;2*2_W4$#btQ$8iOu$MrJUOq>ORXOYmnoJ&vGw+6H zcb)op=*#*OUHB=%F|gKm8q?oc056?~_^X&L|}a*k!jomW@8P z8^c3}UQ{Tjny?b|@G$@kh?=G4Q(I%nU&CC9V;eE1P9-t-k}@ti;>t-?QAEW{Uo>gJ ztSPt5Ots^!exMl+KKa0PX@Z_-M*3I>Myv5WDz66ytMR`#Drdc&ubk^~6@PU~I@e$7o?q|NrYrx{+cQjnU$2W1DOstERvkPa+Tu;KYE+9x z0-jR}XSHWvr&`eg6y{IpCq-63ai(2b)Q#rR*Qu+ILe*J6nqX6WvZ`G#hL1NoJuJ~= z)B^>=*yuw=qI3s8xvl~iAItD(8s3E!&p%Z=jlu}$bC8Xme6!b3M$?@V{Vjw?&0v}* zdr^JP1L(co2k)L}rn3c-LN)8ojttx>H^oS+`KK08^d8|aq>hS9)lN&0kVkv$iMhl0 zOj^*ENurGh_B34od5YcEi$F7CSwA)kN#D|jb5*8K!^QcSk9hQoS*MJ zk$EGT8cK<|nyhfoYyQVee!1jGfDbWQ7h-NBNi^O0v zo87`z`B`V)1cbhAWS-uj`FidE6$Ab1q5aIcOIa%tRELu;odd0nGG-ay2Ot)xxc2AP zG(Dz-j&SLJ+5dFC*HE_a*;)|HAcU+Hk@3DcEYdk9rmA&TYR#PBg!8ZL_-r6L1;2yf zDq&@<-}tKq2j_5VP8iB{BW2lg?-V6TZeridQLQ5)eWUWsF&-0l5a(~l|Do(Iw0Yr6 z3(73h#J$(H%*A$j%`%hkX_-~d;^vojFf4T0O^vb6UFgo98q9;Yu5{ksFS7X z7X;{Cd3@}2;oUhX{7 zg|Z*CEQ&tHoA9A*qGgAW%#{a{GPbqw3Ol00bn{f}yZYFLl)*^eW1?JVtPPFVwsqG~ zePZk%|FGRcc+`-N=kCJ$hFQMq@*wVM8ZYk-Ayrsq z=5Bch*K&zW{d*8NyWZ|N9&WHX8bCDOvng<0$YVIz8DSl0Ez30r-sUa}|$R~|0t+n;LL}Sq>_V@2)UEhGsgJPRF{eTLFf(b46>7%&d&ui$-J_)*JnfcUmP~p z973x8p;v4BGYO+`=>{!jhN2`<(i`3T4xV7ASzjRyndtLwf;b~i3Ldx=JrN&SjGdqb zVC~ z$(;t*CddJd`^^#&wp~%I-6CiQLIZ0!B_*Z$(d7;8&D}2laurN=VbVE81Z!tsBCF9u zxO>$iY++rT06{FrR*tLdjSuQ&QT{?+eJe8rd}H!lM#QdpoCqcg6%WCM_Udnunuk>Q zZ#a1z^7BLcd}K$wbg_z<;8H6eN2ClXH)kw&3z_4flQBkO$N;ql#wf98`GEE?3Y|bt zSTcF+9voIu2AnYgRiuPWhkk&0nHN-zr${(B&@jM#ZG!T4#LcRHGwZq6-qh!FJnsr` zY4*XheP}<>vRyUA!}Fu3$|@7IY(b|kieTm?x$1#8KMV>>@kQp0K}bxjq!tv!%_u?GZLiQ zf}11L5U;-S$R7nm7{8}#v|e#hl0qXo5oIv4nga&CJhq~0+nNgvU#<><3d`>=-If#0nK}6elP{qc(LC$UZ%Pc?3zXbxAZ;ZBnRVe1e<^xw3qO0DI(#Tc8)nCVUa~ z;k?JsE{X?mKdKQcI(0U;NS5x6uYJgszvTYpWZu9I(ByglG-}nM>-;{lwz@(G0_rw8>4DX6KPZMH{a($hR0`JTO=L3Nip9!TgBy$1 zIxfFBMA3+5krcPcqfs?6J@Lhqqgn2FNHaHTFfA!_xt;NiQTth?ZS}-_zp&qz8)HZ6 z2nUZJjUKdWh9BFad(5KCVN`Pjp2f=L9X{viTutkTwPKZlcgBMB(7gPiViEC+;mhni zm#F_xVUqB}s8g>9GOS856(=77|8T=wV5bHPuLfx_WOt?A%X|0JxT=@+=@WMAaVx);>xE~KJx``41(9cRPL;M@FaXDNW_g0TSiAGc9}v{lRc4Xa6<|ZMXOubDeHmVFX$AH`i{KVpT04 zdhzGqG>hVig~WLyPBgH!?60%Jv)xlXy=Yk@S5)@1hS8RMXFCt0zy!~8hk;jMCi7nC znB+oU{nJ&jl}O=A0~%et9f@-&qnSU>bQ5k_=Wc1oA|NVHt3|HPgCJNbtbeUb)p1UB-7m^nUtv zcsC4^DM#;~a;a6!uz!t#UO<>c{+pm^Cib%$&U&uT<4o?C{>n>Hm23<hZ5a@Z(Qf6JP!=)#>A*uJS3hmbUaT{rN z3gxfxUC0Q>vh!vK^jhh%e%|&U*r^G33>5sQz3uFl5HgkKVp}7aPh1YU2Z#w&H2vsn zK?HIJj$kd;bT|o2WuMV?8o%ID?xwH869y78H+M=hmOCiKK?`TzK+Bqg)0&x_;d4sA zPsfYdfmXpui3lg_V?rFFD~bRaUDOar0{h7x7C+ZxFW+TjTPU@+f+ijGZ-a)02Q1QF zM;y3Lf{97d_ok!{Gfn<>+4?(5n2Rv`Oii~{GwHy9rfoD=8$7#q;{0PhD`Q76}!ODo|C7A@PHwPu(h$<7{ z-$E~w4i@D+Kca)tY08iI;-Pbgkk7NKHrZt4KgNTtYE*$qUF>yjo!X_mb^OhFybRi& zWL*y0jA#GitT71d9251^OmRo_Sx|h#BBL%}%r!@sQ4I_$QIjrY%NT?Tl?7@PLZ>Yj1MRdiGnkVq|N>Kcx4 zzHA(h*atFS^V4xSlWCSYadw;bwMFEI9^Z`y2B0=%LMV6FF zwVwGa@#F0d)r3U7RD9Sb39QY`jWEd%>w#cQP8NZRb5~RRWH}kU!1^x8g9#JSdK+u1 zAxIo|<0g*If>*d#=KGCC+;)hKadbG()XqpHhdhphl)_!`DS|y8!KGN9GuMH0J_MAe z9H=wz0)-CIw0-Bl8z@0$D)kf$aD|alE9X8PU)6fi>ig71v8f+ z@mJ=bgcUwRUc?oxS>GjEDr$%qv-S`uJiOa`%GvO`hi-TJq~x<}fq}|@zUd^B(%w?U z@SN!b21Q%o?d5W|L4Zy>%1c{+(RF_(;i+A1)|%DFR3*^ZK~~bSKS;h$R^0*Yk)UvZ z)LF>?Z`c16yV3+`P(E}D7>BcT5V&%0=nZ_sTX(1{4m+9-^5nMzQ6blKJO?ybEl%rm zu73)G2JNk(N}h9&Qf5Fw(PwUqw}D)WpR{tFFzpNTi4f;Vyv_^y%Vz@j0!5)F&8n<* zw+|KMPV!U0v`mI^W)YO4K38M7=@P zvJ>~VjhWEWQd-b0wS!MAUZT?i@otKrrw&s@TLL;_2Jen0BrKjB=s!0cHg?)l=IM{O zf05A+e1YH(7Bkj@Gwz_FT4Jwwt5rg6dyYv{sWZDdr4*>OH*_Ay&}G@ZF&UX}t&B`lt;*3aZZF0wo)XEfNQL-}P7pAVd~FKWl*9rgwRMM*&rKDHI7qn~N} zpb(56KK9@1^oX5Ul}6FzGwBj=O)q3(?qy%mi5CeXV-aOFgF+;k&K$!S49a>mT_t3n z@C`-;>&?{h7X;S(5+-mY9T*wJw1^$ZVWXi+XGR?Feo_xtXAHwD~V5V-vRyN_6m zXwS9vowi*Sl3Ntd^WVSZBWFQD)_rwYEbZ{|49HqE^R4DKw>XEx9AqTzSkX#ZS-kWr zeUfJf84FgJUaGGlt$Nv>%5uIM-I1UC2`1lCAzH|EJ^FA@yAeq*IwVjKO6|`~-8oCjaryMC%>xh;TQ7$_x&sbJA}yLa}84KDNR%+(kn zsc^S6j(Q%VEJ_N1VUHIASGPmdfO4;`{yw{|$`#8j;YzDS9e;f1Uw|s!B@dnMIf%_S z^cmudotn`@ou;gM_QN0Lq~iTE-~EUc3RCV|u4|XJ164y>CHk!Y7Ly^m@Es#7?u};! zUxhvdD{bfn%DiioQOBFQ>}f6mth6`MjiPK0)uWZf+Ow6Y)fW2GK(q{*6C4o9qIk~# z2ndGeY~|d}BdVJWIu7Y&u1v{4dX3F}8?k0-Pv486Xs6-8C#R>LK#^T#>$(ca<(h={ zbJLm93Q`5a_`cE{o1CtkfLdp8@h)e)hK8EyIHq(nXha@{3b~vj`nw;A4q}g#J(O0* zYmw-6j~}iUG&iHPDBY$^lcMdbvXYpeb z5*wZH10fzB)sB1&uUIa=(6sg zsPlueh3k1E<@^zP!M`*Qc$aoQzzzZUu*!8>aDX^i>4$RWPB@>rV*e-t z!VqibF0c0(S#Q$a+9t8PG|^x^KCvH)QH*u-7UE!KTMt;sM6i|bHilIw>L3to9vs*% zzyPG6>=+hUfufXNM;rH@XN|uj>W;4h^~lC2u^#Ou?_`Li^iLdhjmeR3Tkxh$12^-s z6o769zR+=6Th<3Tniq^JwEXy+nF-e{^gkd8uAN9j(V!xBD)8N&emqO?3!rg(=Xu*F z^-zKcv=M0oxWM1Z?a(n7%33V1IfpY6XAEmFk*8?dBsgBOO&oYwm<+hdI4Tfu0C!)h z`ebHG($*1}>W=W>sFOZ$!3{m_x;+RXY~-W%?St?FA*vK>fk$s#7NUU9tK&^c6WF#b zmpKB1BL_KcC0i@+a}RZi9lE}=9xjPzxJx1k8k({QtWZ4|73bA;8H+HAegc8mZUI3x z?{?Q7wKE2I@Od*RM^+`)WA}`$kUyJlztw%$l(0`@?rUFE1e9Pi4g5Wkwp+z@j@|+q z3ikDf{;1BZkfhlD>}*>RM&5k|elGQLY=^NUT6s!}{~MnOtAE?!Gg;^mp>ghrHE;Nf zwjZ#(ymP~v9i323{I+4A9H)Cql8I>AMQ1dFdg!_a4**J8RLL5MR`Dm_7|q+;q6s0M zo(kLpS2tphWN<~momzJCtz52;!Yf?%omib+Q;e)$_rce;hB{IYGhOVz1tJ89GyfVy z=A<6Xn@1IEx0qaSq}49T?e=xdA@aMcn@N%mtcaP{tB58q%96V3mYQ~GCXcsmGQd#m z$k(d7TD(c9yoI)@1T0an*cwx&B|CuF7QVEK$&4_PhC)m1dG9*Wq_BcLemLdd*Pen zI=$##CCgjdpBR!?F<&17^#5&cS+-#=<`zCmwVAeglbTpN69X97g~uNNell;R#WY-k zK3)aKVW(230Fl{Q7WFb+-0Rb-I@FNeR_GZ7_lWJ60akCgB-p1^f$L23WsaowEc`on z9x*lC9lK^=)abnvr;?t$+(WT=JajcpTv$TPbYDrS40n}V_h?EZyfC&d#BUgV1|Jf_ z*qm_|X0c_~+ULL3TJ3VW;`W3#yt|orZUJ~ozvY*@G-d*4WMo;ogI}@=NuU17w(YQ? zQv<|86OVh>2a&94CGx`r5V2-Hgqyy~daprzy`%f{7UH^9SL>BLE)BnVU1wDyrS^rn zFyTFg=B=aH;5h43mbFgg<-k|U(G|Rr-o1L&8`H@tNt4JGe@?Ol_64yY(=}7bO*9~- z-AfTwH;hQ^qyQo%UIgr$C2@fbKYvfSA(m=!uWQ*&O8b*OL!Nu=O8{3G7A2W|;Pm+3 z0gW?{ZG2aZaMV9#S*2Q{?`Oz$?ib`E7)}AK-N;OE&-u&cBKrYoJ_&MRZ3UqkUq*u)RY8DN|x)R;_cI>DDSbp`^`0u~!G{QE+A)45#N zr0#Hme#e4PdmdH;@G5@5Hu)g|vPa!?$LmxYQ$BL^t{QZ6kxVs7k*BmZY#*9Ny-4l< z7FcGuTT*gj0RCSPue(hZe=pw2u;IHw+q+Tmk%WXX7x6&Bhej?zZ{<`_HDZCzeS+u zOf;eKjn&xuxlL$Y#kinM5?oSf{8e)?p0tRbRYz?ZyhvKMR81a9$xX(VxNAC?l-t52 zth^<^zK(iBuk)kT^@QS!5L{^3`=&Z7foQZN?b7Nai<-vKsrS>&K zE2kF(RhEXpJ~JNla3XMqr|jdsC4w522SEC%s*QJ=Sbu%N%Gk1eisSFE&83UoR{M2a zhiH|_zmO;jg#42}3cB~W2T#A-=RelOraL%(t|nAzu4t@{Mc;L5p7%f^3$}v{{N=ba z#9gMyS1fU$C$1jvtx*2+@I|iqMsgrVVbHaK;o&%}bh~R^2c98;oQ4ft59c}JK%OWqjz z>XiZjU0Ce*D$ZY|04YQ#>d4DwK+`Skn`5oteA>VgUaqkJTj%Wmpr(3eN*~?Lm`v>H zxSQm}kBT=ci!rVaN%#V|zc{6T!_pU1eRV3K$` z-6hh4;R1(V*K58OGxF2QUYlD|{mloPloWI<%1Ybob6bbX!8Jz@Yx=L~KjLKSoXe@i z`&lcM?Fq8TwGD@@TM~-dz@aAh;AKuv#??85{DSgKA8cCQUyk{-d%5!YTPP^VV{uc2 zVG~x)$|!cUM%1lju3Br+w^ke_*()e_WE2!1KxSn2{-V}CLTySAOR1qFK0`D7AkqIc zFyZ*4jYLxe$MckFTff;NE$!j_E?w?(~iezwy zxZ75INf1bU3aQ}!WieKr9x%Af5L^B0+GOOnEk?QU9lV-$(&M#5z`;Sc25RNJei)7;gO3Af98h*Y_tb z1%7cCQQSrRuO5vNUF5V*WlHq&Tqn*H^M{)YwM`=fJEaceG?_(7rJi}i8^g#IzcT20 z1tFCB>wf(b>hao!^Y-j^6g0`!6rGD-PR0hzI_+-nB~aN|p&XDlUaB?uZ_tl8b)eNdIw;Cswo$}Asmj+U>OXROFh*62@1i3JEoy>s8 z1OLiRHl;EYOy&fRibA0%xUx}f>eja1#bmmxM3627L)9%V%6d6aw^EqwU`oRF34Vr; zlwVFXZE%l6^08MP1*DSph%-AN$C-Y{wVya6b}PR(^qEL8vRg{gi#I~ET5z(OU|0a& zwqKU>+54dm4$*7k<>p%NudiElY>Iz%Cmz<-Pb5So(%}rvZ3=J22$RiIcB{hSbIYl{ z=$1WF0=k7Vk`5In-$vr&fyKacdpzI09g3@ zjFTdHR?BprT?maqCWBC|n(p~4S&sh3y(zYrM66H}Not+Ym8`0if#A0^KHpzoV-8e6 zAAnetFK6g~YtGXR20D6hShse__+2|W)OEW0s0i`2tIa&Kh6$BzIG~oMAvCZ4t&?G5 zm0jVx+gzw$L;UJ-rWIbGHP4~Z>A68_eKs>I7a8%A=G*b0B}#1_1^aYP$86ZGfc{vd zw?s@r5r|5j+YYXIpKRSLGr`7I*&NXd`{hV20g#8}nhJIYdzUJ~TWG; zkC&DblZngR8;8^7;{FXkyFosU(}oy}w8PYtx~vER)yOdU(F)J7C^i(QB6w>;($<-= zYH9V$F;N!J_h(^IUF)(GnT-DuOBhR`A>NlD^oJ5pA2p2kUDB0EwLmbGZiks~FG~U0 zG3vQuCYCw&w#4OhKqX%{Z~XQ3i~IlPpcg0&h?BSGQrg@E_t)-$-CrNFnA85g64LvBwJ5i*O`sXDqThJnfvwC%{7ww>bS4SY=jXr z55kb67@nS8>B0bAF5VQwxfj^i7ic9t#xOntw*=5SaL9`6Xu-!DQ-?Bw58y_t&cnj) z`I|_N<&kTv=3nu^k)dU6@t3S5e4X~?%w@C76xI=)e<0_}7e{S3mn9n<{$R2uXncGu zTKUp%p7z2?Vq8%3DXi_Lzk145^HI_>#*^95QYuWw(x!IU;Lk_)WLnymr!ZQlK=yrB zXtjp{clyjQZ}oasy?)FCcOUdlSkVw$p_1kv;l0Fv^;CJRQW^LFh0&Z#5q2q}8z0Bs z!q*XQZddl$@x!O((LOrz`4^4yNHfzQIGWn{R8Ni?UXCBB2UHq5s}OQ;vwr;Qv07A} zPYg(|I~f<0@F%g;`0w7p$F5Lc3%l=HzOfA9ar<7s^>Y$Z*yKK>%$VQ3Yu{6Y^kth$ z=@o5y7;0q^OUy4v=!M`QdVse{@CzgZsI~kF7vKaKji$H6iGBTMXyF2zoHp!rvJIuV zvO^!Ew4=+nxc@r=*qc{w?h*-c+9yAw;+Lbf^ID{Cf2X;~Ym&E7)Ym8MpImd$R`Q7O zUrSED7FuOgMOY47yAcfTW*vRhWL>6>dYhUzAwszmXhLl8g?)_kZ$6{czQSFo8GCpk z-~#(xLw*dd8i}@by{A3H{H=NOFH?K4Q8n3Il#G$0&9oj@m&)nOj|)1P&f>OaG0)xP zPD4Gv9QL|`6Vqc6AJ!W7mU*_j@YBGw!tYtrsbTS!uUUt5+=H^jdP|A_Ca2YkSQ?!y z688gE2FQkrkT4~KMmzP|hAIB~s@{Z&Oao+Q^Hbfug60{ip60X9nQglU%PRPL+*KU9 z7yJDkWnYb9*<+pOxj`>R+e}8I)0Zr);KjK-sCtx5<@BpZCzvNOE4fX&%Yvon7JDZa z|BD|PUg&ZNZ#Tu!OHUAG?0k*+%g=H(w3sBgTqz;h@7=F_#LhHOkrc3|rEcHq>Q~Q; z2TILqDJb>gjLHI)KW&`FOT5A!Dcow%N;(s?L4YK~Ne#X$8paI6f z*W#*>Mgzfy-yUZ%{&Kj5&D!hOcioozuh-Un-;Ge#1YL3j-!shcMxRg;o-sZcPZ*@$ zT_i9HHSNm=_!5Oj+!1x{?m^*1GD$IFAC{1ReeIUnIUSMJx;7!R>=T@j)5!(sSq9ke zDEQ!#yP8W%%BK44YQIP!EzoQ&_jjbF@RafcDFK4x7zN>T_}*(EEqedzVXWI3LrP-k zJS=r&fn;HULq4uFCK<4hjjl}JtbNQaEX%5?8>o=OHucy!S-{)f3@je`*#2Qqn&zb_ z1$)x5wfogm9d9HljX2yqQBox{O&3ai75z*BYRs`yFR;`k6GL~WK4#9yqE@AGxKv{( zF%qs1hPffT1H0D5uodnz2M4dT{;P*@N;G?gj})H_ADRQop|`9*-^6&qNh>sK49{O*Kujh^7(!u2Sd0o%HzR0^p)WZtqj*X2y6F#Rxg}W!nTs}d z$_tu30hEvbDmh);mlpq<(I%ezvf+A|s}((5EYObL3fKgfE8V&`QfxBNB7 zxtF_KFVa=tuP+|hi*ko*d&2#6IfJLf3;pHtlz!ZCuk1_*AUuM5vE!OM_H5xSc z@Y%pUgK4OY6BjgK$r)G`E*}UKxRrxn&NOsbxa*_4dzX4^04R`m07o&YGm(u+~? z{5F>fkHbxe&7TEu`X_wSW|_RrCx$*)zQd3^qBSn>c=YxQ=m;|v-jAUXhzBTx#v7IQ z`!}BkN@2x98AjGXX`nH*@aN$MgyvygJsBNoG(K>qbBLm9Hz>$rL_B-?(v@38gpy?1 zLRX1YsG)=WuF_-#$F`K6U(PiKR}hZo1O{M9$ZGAIr1@Zc^ak}J|U+Sbi}IXWHu)+tJ#7;)R)ggf(bx`%x5 z6za?T;2L}^iX%2*GcIjYL2`#p9u0zx0E~5SeQw{5ED|z`Oy%YA6>^Mgny) z7L08yDFQU%emSuMsW-mbD|?`4Me6)*%HB)h!w?zQv9L|O+xxWx=TtN~W4Tw_8!45& z3Q+qpuH-~jsMzr6=^kWytJ4V%pPTHzzQP2ko(CK+Bzx)sw3t}JE(?)(j5Pp3Iy_Xz z&L17$=u(Tjsk_tE(UAAI*ptaYj`QsSF~{MZ4fQtFaKBxTV!HD<565Q z{m*y#bv=~Q%t|J*P8^6|XTa=dxnBbQ=w6ghOVNm}5I?CMY2zccMqKk7_-{2qE1IXl zETrd9?zQa~ca>gM_t%ef^sg`Q%%IcHWg2f0WPRecafEB=IA}c*jH_u;Au#g|sDHM4 z_LMzUHVmeKZ9d(MfZ1|ethh>0G^x0dlUEQt>ksf_zrG?Wt#PQq{x;H0ZfCHn!`uTp zpNc4J@fE1s zgA{@k>+acWA>pckPVT49x{Ln9e~jS$ znA899KlR4DTB0Y*Wq)Q3NF+CMhQxLY0Vwf0j$#Asg-uO-ip2`NrU?pH&0eK+Tf~%~ z1;ul71~|_@yb=LO4AQUEgw~Ud-+caS^$}16ftXdyUjw9~Wjk2^Kg^v|cco3YhGRSF z*tTukHah9pwrzH7qhob!r(@f;z27nR%76Hd>g*c#J!h?F6=qdAi3$`!_^vXUBMo35JTEt$d9G;FRUY*?c(Rzru#*olcB_ zG)h$?R}-&5jc59Cy2i!Ddtgb{wYDKxq2#eN@RaCV{$ODA`$T3cfsJ^cm$ls0C!Pjy z&>fo$GthxVj7D)pMF&NItu?RwVpIc5Vm^~ja$ZcXb;8;<*F-YVO{RHmx{q~}Hh@yS z$4f&`5N~>vPTwTZFaVt8+dq454{$f)d?MY35H25*|58YTTScW@G#z{8r2C8&8MyAI zT}kS~0yI;xsq1nTN&m{AOn~yT44*V^``wa{0P^0dTRw|9k*K55bHo@u zG^0i{J&^b%#3mB&6IeRf#+P1*s9fo0e=t{+(@-Vfg?3}} z03o|41qXQ6BF-R1;pnwx=D+Kjx=wqm}P71 zICO)%JG!b(44IDo%a2UX>H16AWBE9DF-iW<#~@~_#PB4!pd8oDu=)H~ADo?VY5u=m z!H3x{%}Cf@;qMHTeOE*PJz#}z9u5zatfyt;YOOs*bl$#|d$ku(uc`~K$S@l15!q>k zkLS`is54-dPE)_ZAoJ>6kM?&K5g*4nPn-jftiH!}je)1Mi5*GNK@&!C+N9XQhOCW{ z7#vHFkO{U@s=4kM@s)<%t6>>S%*+bc_7sg1P!?wuo4whqBAqQkzTy#}0RTPAb-mHy zK}IS889?m!MPmac+NNuzI#gY^d@vn$`$e)hd6%%wRC=%NaQ&h9NvLQ0Ip-?%M{oUf zhiNn#Oywz{0M62en+Y@e)$@(frw32;pRn_Dwq^oTAU!i&X2d{>ADH-UI>rJVSd!*v z&>2t*?3;-dqm4L)^O6nOs_hgH{gi=#IGeQF&>V?6jIA za{=)6zt&_{eeH|o+i!m%QdX#`Qau~^BiTnxEf#ox5i&Q$Y(ep2wi zJElN)Xv{FHNPh<*j+gedY2gng8(hSe!c^~2F*f$Ms{K|5aQYid11icKi8^^+DnDYz zQjt-7?#7nM$jCK(6O5@v(L2mYcSdr&hIS~Fd6nTwRha959mu5&?bH#mH;1uT?Lz^a zjrAjyE_Jw=P05Z^@*%acfkItP>cR1O=NL*i5K_BBKl87r4*Eo4y1F<0Nn%3DnJ#S4 zid(LOT=8KJdqQ0GVE||5qauyhht5>i3yCPQ(oyzlZKJRD_oQ>DB?qAZLL0?fv~TDr z33gGn7qS>9_yM~|j`qI9T>9NDon(%C>UioAfHUW)4$J`MsLnrk$H1#AiVW2V2ENN? zMm0^*eB~dw^Sld%y+KU;7D6lRR#On%E06G6>M4p1M|WmKOfp@S?z<1@&sSsO+(VAd zHW6ggQQd|q|9u7H@xny*R1magRcPTxSdt2X(-u#Wd9Ic5wLCEGhth+D4e8Q)d*g(d zu~H?L+}r@1^E?HeydRH6gH1W$=?67UF0I1Inr((C+Zm7dm8wI$ofI~e=vBmEYPyGG zmQ1z?&~90c)S~? zLA8x)2SY|JsFnt+vi{N$>{dq3dj6|TMy!Svj-9{}PJB6<0KlOxhI)^nhQe0iF<>`g z3~b$m(~NK^DX-(pP4<1R7T43fKAhb+lgDB@|LJ`HA#v>+iKj}38oZ%kf_l>$m^`-* z;Or3CDGK&r*>k>j4RrX&xg}=Dy~)b-+l1w7(WtpWG=k2!JInp-Wtru@1a^eANlE3l z$vs-(g8lE-y;WN&%vowce86-KsQ7(tp8wls$_I^9rYp@9AQvs_)1kNGTv2V{r0Od) z@r?}oH=4Y}NUw^(a1JUq=JH{st;In50mP?j$utAwX{xIkN+HfBYTB&IkiMtuz#CEd zLYhi3<8bU9J{2jJVz&-qzrm$OE7&McRsX`rJ!S7}fkpZ3$9LQc{bsfmN{%p}ZNc!oVy?KN;EXF0wwJ$Vn-}Em)HY)Ye!BiWKdv>xn-y_Xw-e2DVX^Ex@`Ws)K5{Geed7bb zJH~aJLfg2v#tUBn99TNLX8a~9t=n{V)T8ArEAQP$AK zx0qs#$)Fm`H*<}MiMV&tyH=gE1$vn4Yi$6&>IH!kh*fbFeIjSMIoq@F*Y=tyP{7j1Kprh?2>sS>|ZT^nl2g1t{w%pgNR22FZ`5@J(d;d7Gieh5^h_47iaTAaDL@MqSX(Ni1YE(Nxo`!T zj|N0%)#H5x7|Ji(^M!o2+mt)z%#&aomdJ8LgFf$I9KIW_Kt-E@<#rBe|_qyA5@hjAg2LM3cKtUW+$hRGEF z0>x&dMvP~$%=&{D!>~a9Ho-;PhDGqJQ~QFk{We*XZj|0BS>w82wjaRx8u-jyp8$;> zp(0sOHa?Pto=M@lo=k*+Yr}NWi<}iLDM9~tb?)5B(8`O#MAkHBfGdBOt^?tSUS1BG z%ONFY0?1eDr5SwoY36)|*1u=FbXA^2iqHor&0NE}QuOXU+poC1i07OHZ6&(?5X%YQ znbq8RqZ!pGbl&n7NUC{(&Ma~_fS&5X%n8$h&Z|xsx?E;sBK3ru7WWQuPxZ5qakWna zyU&)Bd;;$OZiCM+;;xr_$xj@_Xbh3xf(XNl?RWIpefEC<=n+dMb~QG;Z1|*D{zO{K zypw%w*LK5LX)h5dqgvQOASVKq-6>ak%Twbd3=v)p*-d~`Q=6@i)RhkA*9TW954Zk~ zGmd`v4NTu|@ENnx8;x<2rfuJ)WkFA@OKo0nW<^B)OOo_gTvsSZ9|PmQ@jvMrUqW@w znB9!tOlUGyc|4CvUVxrCVU3~&acXy_=lzWJlmy#A$o# z5qI?{gGLs|vat2ytu$63i~0ozZ$zO0`m3_ezB~ zetmDhyrsyA8LWs*S~+M;`DKGRw~v`_XV-XE8vGJAir;i8Y!AhW3mHt`c`q@p8CPBM zPe4pX>&1L3(yb4!6LWE8NmD~6H_|a0Pv<_bl|1gAit6_I>N z+_7?n-ddlV-w5roT8_iBPL8|%0&rf5#j9mq*0cSR2f?WZz^aOxyXn>zRbYi}c*DKy z>Ig6^*AB!mqV`Pc!hm3&2@8|OCz%x}bfpQ_)u4J6$@l&Nd^HWGY7Lh4-k@WH(?fm{ za%*)IvF}b)wS9OA(qc(hJ0cBJ_G;j-!rT$8)USd4yXOS|Um{4PDE-;9=bzGjztx4N$Sm;sg+PLP%qH%GCx43xht-qWt`4SZh@RjomBKmXrU-mO{0sLoI?7NY_oE;w)nHp1~99rx2zNM zxnL~N-0=5)ib}-Bc`^pDjbY4;RoZK<2Jx3omXj6PjwFJX1}W3kibW~v>hd)BQ$ zExTZS<~8C`uI>&a3K7gmSx?Ed1tEZ*?Jhksgl$>5*X=wP{;l}GJf2j^t!i$`Jm#L= z0{_iKob(A~)ug)iY^j6xyt~^FZsj!4+Y>9)o)!OD9+^s6ol!ZxOP_DSviTKghTopLH#OKnIZZ3ujpMHW2AEX-;-BG6v1@eX9l1FH@YA7`vSU<# z18(gkosoA@l{QqfrUG0@T6cQdQ;yX35T6K1%IDVpih*KlCu4i0%fjPzoQVK9Gm5+6 zO1fXp=eP;$oB4<0*i?g(It5XE(T5-9)HoW5oP;S6CjvgWFN%_ul{U?SE^;;MTx*55 zc|0hf3WP>!-}^gDw0+w1?UVR+T|hty8LUTtLtU5)hLXRO*-?D`$fA#K$IdDSQ;sl` zA%~3{2ETluERW0x`FN~-bK7cId|?Bp0D4lXS3Qr#$dgcE8IKFjhKV8x(T2PBVv~hr ztQUXr$@-jjGKR#GItA^pw@MnCsnm|y{Bq&qtIzI~r$Q(B=5f_Y_ats8Hr*CtI;{PsMN+Jx}V5Z<%XsK%fGD`toP5DErm(i6(kUHn1HzXAvWIlBSUeLyNMBEqI&cY8eifY&MOcs-dn3d4 z0uM++H5S*tpX?<6qVN0eKd_lre)3#cY{c``z-)cTuGq-kCc2;Y++?XTR5N&~kICB&H$1^fG#u>%m}Z$kh*=uJ^r5?{Ae>v(2v=5HyOm;N;P zsz>=q6y#2&j_)~jvbPBMwiqwr+?b^VH9}X!(<4g~xP|iR+uR-}w+)=f-@j9rlSYB; z=7>boBD?d$k)5mf4A`*;5DQh!Q zfoJDasf1j9K&D@s(i-6P(Av^(6ZCNHU)haL;ld9tBt)@IvW9|i;=Ls_&PPoT9*dT} zPN}Wn_=nGlycDx6F;}N5_B?$t3BkVWcB*V=&D6P{xOVg6lKwjC?5u?-s72*&qxV_o zgIwa;AP~o@>fdFjO~S8M@xG_zXM)7+%DxPpPc4@a{P)U5TvRR~J}W$hz4dDM9sw;u zpDc1puH-3Kw~oX50@Mn{w#gFsbkvKxy`c3B64|ufr?3Udwyyrz?jSC!$RlBG2^qH& zC>sD@*c<;%z%m%!(?ASfSsYHqi|;>Vu7^bA zqhQXqzbYl{rI;diG^o&U76E$bMxP|+GxE;6@E+hYLRH^yzeEI(iicLXo5r|V7ZQUA#7091+ z{?rEfS!Gh>Q_R4h@yb=#4ecG1F@X%;P(iSdA;82f$3KF^9r&fIt0Q9U!7+%v6MUxV}Ul31H$v*x?xJfoq zq^kf!B-DB0`ZMnTw7=_#ATR(|!Wj>{_lI$SEjG|4OSg9Vy)1z&4AtR#AB`=Ybhhli z{YXl%`A@nhf#7s?G#AD*1~3q-qnyN)tXt3RLv34q8~B0DSyq= zcRdo~R%KzSFXOc;$)*&ZTnAaJVSo&DXDp5Bh%t9gGsivlD&!V}%#ap#sY)9Og-?KK z2Lx+_8gWn&~M|j7Bz{Vs)t13QN7> zH)>ZZh@i1n(?kO}a(7+~bC>KLu$ce6D^@q(*BR<{but}zSkRF7S2_pz#7p4GTvwD`B#Mfu-oX^CBsE`HrAz169N(M<{AfCXwR(S5hLvYBo#0vL3Z`;g+u=XuHuq96HVCMZ%#mj9oq+g6 z8e>Xl2E=(Eob|$CI?QCu#{q ze1XA2PTnTn8!A?=gM6%N2U1>DKbd zN1i#!zKBP9CH?%q_Wb7r8D~a*Wx#IiQgXr{KzsnW|CuH{V@_O&^P%a?W;hEmqg*pk zC-5#Oeq#zv(Bgi5O)9Qs_%rJheBmlUIZ-|Tw$!v$!H_yLM#7GL;K?wDet4V+;3N&d zeB7?Q<6j* zO3Cn?wMDHeqe~3fjKw;IK5ej}XM_lxX1~uVz`yK=5|=2Ffx<%iN2R|0Dkg8*YtpE? z>nKuq?tn~;l`xEbW{TA|`?H}K?>!;;*zcp$(LuLJP+5Kbd(jYj;=8VLip|M2#<=ZC zoP^iOIQgyFU?kuo-(^2f?L>3@W|Tp1VJqKdsFNVM?*i|`jD+b|8 zvAkHv2N0h}k+b&tc0D#nEyV_FYo+%T(Z~W0{X;QYa`61>t+g`oy5+t4Bxo zMT_-9IIFy(C>fa1C4oP(f=n$~fcP8=K?*qKEp2FKn-}fYIot5IrJ0#<7}|N%{r>A1 z2D*+LYr5m<;cKg;OvxWBO`yg+?RR((tZH9)IdL|*Wl(Yo&_g2hUZv8L&qDgK({70X zN(Q$w2joz*xWYVp^d~An03!7OSGga?IQ@e1^e=*(R{Le^f6G%(o;KBWi8c9^@>#9_?t2+EUzu}WBlM}b7bEA++ z>T`&ab6VeqEvBYeqOJ$vAS5&oIIb@Z(BE#+C9=&KJbF^L?o#cGgs5xgONd93rTqKJ zBgF8;q`;G;@#@I*xZJ1<$DQr0I>a;@JzFfN^nKnc$6tCJ;_3{sN&$ylR-~p3_Gm^+ z5wm6Qo)lMaADE%}Ujb+XpL|zjS>G-@Vt#Q#IgNzx*aED>lR=(a33ee`2S867B)?ct z?`B&;7$g1w{eSNuL)c0?!g{s3`6@R3G_u~UxOjDO$TK;M$qSbfcfs9^E59ZPVa+@- zL=XV)Gc2L{-cJe0KSMvSO_@)4yLZ)AlnzRS<5_!#OPPsVwCbMRhEnSHbjuZ^ms-x$ zR=||UuAogo4E3UsMg=Io{|bTMKw@u^PgoF zJgVCOJvFnE=UReD?GE{fNtgt<79k%i9Imn=(Un4B5Xv?OGv~kvsPE+%$eiFx1YSxu zD82>pxBmqHk?ENsoJ7LI!czh`m?%Thc)ye@4PYfj;WkI}#TymzW!}&URE6XJ4e*oN z5uK)Qpg{z)ACEA@_%wq~!e5>HQ&Zl5W=mU%e)%L{eAgrKdX3Yiq7*#}P(Ca{or%nO zb!Rek3Aaf)@G{Ql4T(E&BCyjR|B*2=Tj^T~iptp2)p_PrbDLv(q5&$|oTnnCsplWa}UB5?VF z34me_W=Q#O_G0BBjSTnE)t9bsU!Y5&m0%aw?;!wG!bJD2KJ;Mh-ch=Sh{G$5H|6z_!v&E-?UGx4y(DpND%YPmQB8d;5!)p-Lvm zS^`dB0)Bb^y^f@y4|lXv*u~J{&>4016Kli7e%7np1TAo#I?`K?SD9|9`as?%MmJw& z{q(b?6|7oeKmjfO0Ka$u3(KkPK9JH5pa<|X`=4o~0?TlT!l zX!$Owx3r!U&E<~D;}{Y-B`eX9fV^C0o|nK-(a zFxMi^dg8#C#ro^$5Fk9(sJK9#59`by$7wkqz%w&|{|$d4j>(DNyCnrdXhb&qCJQHa z=Wny>*V|?QB4Ubw`bf8K81*J;;H0}J+M!G5>4G}VHh$|C_ z&t0T>NBCW4$JD-S)S?-|tR*7vC~fR8QtwW*AyoKX4=KxR#Qf~IN!25{htT2Us&Wg~ zL9YX(?U%vhg;*M95$$j*>6`7#iHKw=rlZL!WHMM-FkP&t4eWLjWv@BGUjMyc*^SUZ zV~je0s!mf9{I;uvJ66vdzJ2wI$bOxQ^+TzIWbeo%i`Ieh8otI;ECqxaf-6L-o?%%h z8Bdb?r^HqYl@Y*K!*k0g=>5+Z^e?(F?npV($&JT|j*1Vh?y6Rw5gbz?zY+C5MLZK6 zjkO7iwTkDZ5tTb84x<{m96M_2aNv0sG5{w9@xqq!r(mZ5uC@0{c~a=ex@iy7Ng^nH zcEH5Y1wK6naSp`dkHTTPr$Z=A9jQ{(FC}|qP6c+9MXmM$-m}XpK)$SPjNFmnaz3DI z=F9#Z=xKtnq4g8R$4nRe_xP#WVO+y>L0G*do1{q8K5;i(c6~ko1w4?5gPm;=1CnbQ>VFq;(cjyXecvxp z4sSe75(UOsreln#KdsIId%PXZ&1s9TK2I`i^xq3vz#%ib`w`c5hP}6TceJP~D-dAP z1ua^`{AhzV5WWc*-}^-GF+Y*d>Ia`Q_l05Jk!*(ij_h_DgL_5qR_BVc*{_Iy0C&;C zg#9$N$r)o$wF>qx3!n34nyG1LfT9wlKr4~M1>{|i-L5B(aLv3q>Wv}507%>|q_=C& zJ3mh~YN|iR=VuH2G~?GKBc>=`vS-}n0)O!d9%TBc1oFpRW46)v`utA?byUN) zK|iKhPL$wOlH-o4<|CmSsla8`08_En9gfq%6vYmblyc$hH`lD+zmd7MF%!KjC`~NQ z!~>VzV}-xZe}$;y(->dL?&n7n!~5!+uJ}&9W;>V}LsWsQe*k6I3LD1i9IoRd!6ri! z?Z$)1jnR%~O$}LHDC+c|b>{Gv(0%}X$xw!T{K}>-wBCI!DrO0)<=S<*LV+2JWNCNP z=4@e$hr%w+NKLg9AHsS0sXnd85sf?k09?esGLW!{UI;hsI|Jy?bgG5hugl}nD-N~x z(`2E^^U0Q)aE zS`6gacY2;lKB#GZbTfzn?humsqlR>mLXuWs$c$V7j{8foZZxzP(@8TCEn(0H<^(s^ z4REDIZ!Y}TshrTAuTw(ot{ortRn;Se&9gzFwtFC1AI}t&&jXyLqQ`@Qt2FJZFRVa*BW;)3j=!$ zeO{ig%5;XPc+cnbqJ#lk9(jtw*2>UM)iZSZ1`}}o_mtRQ6q?-cbsIiSm2Pe36Wf#p zyhPP&Umo9{%ARB0{uHERI}@bhLZ~+6^N;|EPd9$VFnQGCz`f4aoANNXGkGnOxa{mZ zq7ZXH-rqPPg_jR2kX{lE7jxq@t^*7kurC!6|6U3m!`yWUwFjJeOaYY$JmGv`r{jo+ zpWXHx&^}(};(#^Oje#>^`cnZo|7(q?XEe~6-%aIz*Y0|GCLH}?sSV|5{ngfe^RjX+ zT5}&;ZH-!~Ya|iJuh-cDkHoM~k!f(PIm8!NzjnJwXQjOo3owK-&@NsK(N5B=k2;+m z>hJ?8=b~|d3q3AR^l?e}($AG}AojD?EYOq%_{1boRpFcl)ka0$D93E@Q4XW#%@MvB zz=^9rp@VEzpGDfa8aE!cqkIbrL#2MOb8=egkf#Z>v2)Hp4{6RDbZ;R#^=Q_f=w99C zkcy!g&jL#Vq7%MX%#{Lg(n_DHmLx3H=kX^3z{`_T=YJrF;2WqrRGiAB>sqq<=s;f z!{opZ12s_nzw$AmdQ|x;;7w%vW!T^ABrM(U?r8__^vE11_a7(e1Apvn;r$jj3kw0n*~dc6{cX``NTuJR zV#i;E*<``UV)-||ve}+y&6oZ3--4!o?JcF{%Bu=kmD{eKvjU1xHkvyb@bOyk0&d-A z7!80ggmc+ad=_g>Ag&2|ybvf#!edbXL=_7ddH45#+ETL;=T4qGy44GlG2kyM4zNp9 z(C9w-Xw<}ntf%dHn)d^~?{z;){C(oq(6(4m2hZ#pZ@WGO=%4!x9*BA}jZriBBhSTx zNL2h9w(|6bzjI--*(LUj;LW+=XS$5N#&~NHh~h(x0ls7sh*j)VmY;RsJ$kDacF9SN z3?~GzF^7kLOG-Gd+PjfA8Pt0VS5b!|5m=*Nzc>69acy3*xkhtmh|UQUe$hb!_yWTO zPx&HA$!qhw8Zu*P7|@-jr7F~p=G1pc+W1F{Q_cK)&b|x8@4`73eIZtTt$gC8zFJb9E^Zr_ULVooVVhrNauGmJB~(TQ&SqC$MBo5{iU~>KV2XUSHs3P^ zF;P@`n%4F_PN0&{c_i}w$v$tmrFAuC&HgWVTWT9~5VvJ>ieaFCG5|fuGO)5~aM95f z#=faQ*!|GSut>rXx9M0cQ`ZA1TWC)41$qY9xlOPsAM>CcA+gt_;BSir?6Bhk4tjS^ z`H2b501jdYzL*np_^@DZjWyS9f-~r^_@Ezo`bqzdfc4r%j8O2Cq^$^J?FU<3#!I5X zopYXJLfKR8u!;E8ThrtkFFd||VQpKK-d{f9N6BMgUtbbrAPKsX3o$o8$^X@|v)~op zx#K@ttnWd2-CFO8M3Dxyawg^Zu%vQ zUo8b#UFN4dHhjeQIH4Bku$J*xc1Hkw8Kw;JLzjYr#X^Dcpdlded*r?DvTh zFtMbYo%ZIJ>RoPv#T-JVmYR)C!N;;*nO9rJ|4=*{cBPUFF zcMW8R+)%(aFQqbCoO0FyiAM87fEAd?g$CB(#%jX|2zsU&ZVDLbe&Qa)AW}H~Z<~YN z=bv#p`sGR^gwQ_GVsJyiIux%H4MYFKn=4bvS19xKFg11A#Ba@26 zf!K%ROf?HjHxQ(}Y@u)XVPi$&_qxp-5ZWfMEy@{}1hQKgZipO#Wf_@?v$0x^e~QZZ zb3KrN|HHY}!oq2xlJR9+9yR7H1ktG6Im72=FTL4{MTwUOK+l=OzMYpRk@D)18stYl ze8F7ngM?MC1peCj0yq)vs`Cvjl~VxYCQhLN2$BX9Kz?ap(fSX7!B@n+UXX7!ny#coQk7$>0=4{HF2- znEipVgJm~%CQL;E1#^HN`l4L=TJmk4_{9S*n{ibx4D5d|vYXo5G%V_y?F4mZ2_?c1 z`O#~UR>_OUtD-b3u8f95%HUe8w0YWtuJ0_hSpZIAbR!8ZGwoVfuTxGVSl+9#g>SWE zu$~h+ZY?r_*m_%g3L8oWIc)s7|Lq;Q?d?2j-#+n}b%gFO^N+@<+ct*?07pcY$eDR$ zm?|SGj{>9v)04Q79=EN9v3u@ZN_sg3V~5GgA9`Z=Drc zDjnXlQqVh~AO5d3u+%uc;~FfpOeG$50`Qym%_uY|e-J&J(32(I5e-3^&4_JgLt#JD zr!TX47|8F~C+P}F4UrFX;|K|k>@d48$;l^0UR{i^NBJauS}dN zg(*7DPJH&~{u^SSJVy`mbme^;^s7m=5UAkL4eiIT=RVNZH=|d3iUZE0G9Meo+ZQpR za`x~2(?ol_9B8{5MRjo1&XF-})jlmFBTrR|7uioAZ=+ypxvfAB@T*DNXPcHbCgq;14U>0sZie zen@Wv-N|zOUN-Q|&5a{(P|Z>7L~75)SE~CjF0x6*>3^3htta5As!ol2V4Q~C#g7Qb z1S3M)b?gUlX4F))0rdR)%@~45p!_lD8OJLyCR_Axic!ET(Qt)0HGQzg8ZnP@W8T(*e*`B;&xo@CmALJ@K4Ds%TkIaiL;A;!H!g`QJs#2!RUFMG z6mb*CL4Z+3wto}plGVG^O&KP_Kc;t)8qT*Y)Z^5)kAfGqx|~i2Wz!pgFV9oT81eVe zD9zVprAW2g=)<74CGo*j+LT|nRV26Gn|^{vvaz5b?mKu^RsTH#o#Hfja;<@6oaWYf ztTqGwDv}TIRSxxrO-0UPr<)!fU!VOWSMwuAzdgaOaAx1yX&LvjjHp_3s-8MX%qZTL zNyzdNius)Z8+MHgVT=Tna|bQ?R2aaCAP;(1MF9RfM@t+el_a<%W@!2;z1|a<%388E z8<;mSA8i}>vDyggIT(qksm0sTpsO5_%B=E=5C2ahWdWgQ4#43~b??Bb?oo75d)Ll% zMP9XikKo$LTX)p%A|WuNL!h{>u8*2$&4#-NxsqJ-yXLr(8r+5e!TnMbUa_<6{h6T) z;3V>DyNhDQu(x2)z1$(xws9lZA6gjP9P8ROK7I)YEo@Tf|M~HFk!O|Ic$5aYf|A{a zW42Q+OPMJ!8_Rva`C}Fk=X5P026-&BrIlq$CG+h1B8@RpmR4)98K9s*FqfjkgA+_y z3u^`4EJzLf>0|imU{ijYj(FjfSSDM`b0zNGn(uWR{w&+nYP)ort#FjC(0*viPw z62IDif0WeIBp}57u3eKbH9-h3bE-;T2w853wB~r@2EB?+wE{0(D1uPE;sAQ|d^_79 zi5Jb6E6hnU12J8rj_YN$vOAWq(3|X@#Khv(zuXybEHq${9^Q(vJ2y(F?b=WZVTT9a zSGS+t2@S8l&q;-7yofpxu^Gl6@W3x&C?-#;Ii1f5llES+A-bkoUAE6yT?{ccGFj3H^FV1X9THO=F zO4JKnQo(*A&YeVu?B+5(SL~~L1Y66xW+aZliIk;;d3=>vXaEhsY2o{0XCuV?FPrg? zQzz)MR>f!!9G(e2Q3vd3$96}^UJ|6mkdd>@tc?)t)qrX=xu@V?A|a09 z6e|Fyw=6HCyJKI!d&9d>2;W}doQ|7J5#B>1YY^eik5)k|fR8gWtO{&qye$<-iL;MV z=I?<`KOgRIQ+BhBp|u90@A0Y=#9^nPtpG6wVf6Cv3+R9+*xx*EZpXZ#8(Vv15cTXQ zqdrcS?^>g2G5W^B6WqCmQ(^L%*ch_WXgr{%F+}Iz=M=u?YFOVNm1VNRhPDStOh~bE z2&%nW@n&g%sN*bLB(7`SiMWn(c ziaVmF52pO;QvI&iDt42DH}dEH%ZG5m~KBCn^NJb7I{=HhJwUl%00j zu7H#?AXKBQU09nwK#d9F;f`eyB^W_%#_PFJI95A#%X>{qx zESfss`&Qh{AYhDi*GbR6S^`wFUN3R`DYLjd)zb5zBI$w;kQ%88u|G@W)a_-jJ0Q`j7{u|pX%5!zQ>b@a*@IFr4bB#rJ?vRqs@ zrD%8`n;Zn4txKkaj@o@8T1K(x?33R0%Ym~5$0a;XKECT|&5f^L>>rtAC5K(|($IC> zd~AOmA+}q;6$fChfUUtdCwU<=$c}x?E}7J z|7#7!1OG{kXb^guoel!=O5BeU27%p@f3TB0bZI{}+4RI5V#+(0!iO0mTDfkZWaoeV z`T!UDkV++*Az;|H4EQSlt`DD-)H;x4DlTZs>w!~bNTFj-3o>%>eIgfY5Brm$*rCsEhieuKr2zS#4y%X+J5!r zx3#c@w-G_7r)l6n73JG*zcZZ>B}W6IVGg{~^|4rs<&+%~5##*tl9joy^-pO!K!9Hc z{{>eWG?eQ42I`em@B#6Wk8I7|4Gz!7hpqfT9gFD_2fa2Id#DS8H{WzMx8t|iy|vUi zaoS9(j`JPP`CS|W&~q2odDOT}iCAa!NPzoOabn79mK4e@{-o#Z4UL94#E?l;X8~FD zQs~~HEH6McLWxal5*7-YtkRYY<9EGORfFR@UMO>a+&L9%S$|r(guafwXs&J)4B+eF6R~3>#@D&n zJ>e}99I@DqVxc|>_a2S6cdR;5b@XZO$5YE)SK9CNUgn#=O25t`iA-Bm^^vYWy3apt zC5NHd0i1%&tOofdQErW$?BnX8Mq=ARzQ64uQlHhpC>z~u^C80X<`pV__<5`E8QF?c z3e@+DDOS2VvJ*rgT+s-xk@~#=Um0?A{hoBmucl2 zOiJygm}qZ~BW5y8-S@+c3!{h3xITVMnxtN-h`DX#hu?J$aLkQ|Wr;H|%uQ8$hhT{o z#oJ97J=3)5A$%(IG1JjaF1bmGi!b`}rh@6hYyD5CPNX}e4M|k=_=*QHsKPc|-|J(* z>;4T+-abxq!Nxb&k_&zL5Upi`#*bIfRZ=x=p{-^`QCG}^^~qz1rSHe8FV)Fd(?D}T z7O|o(pPDf;GA+K}^X_A&uv$bRpN?kjmf;(fE{+g8im7<9@)Y!PoOe@c?p%xg`MP*e zZ}4AB|IlXr;)u@jHVt3HOY^9y9k2D?$N9d;tM>~3aD}!a1$r-k!47(}W+8?m#?P*G z1trxnh_lc#U{v3ks={=<;USm_y{fjrALDz4ue*LMa1>OCv*=#J*bD&jRmCz|$SoyG z!&0a6x37MXxFPNVMFZF$qJX;dP^w-9%eH&U4MxtAGaZob_wX-gmSOb-K zx+ipjJIdHZbcyVA(mZ;gi8|<64ZPg;7eAI}jLN{)_^(nVZ|rYaOmJz0b7JX8+yg%^ zfP)6i%XB6Y+AgZJ``g(4j<+z?Y6p=+4*Y5mw|{UX+ASGhKUzy-O9V|rtqG(6v-!(*Og|pDz%hhJ~}(kg#D@#z*{eR(ab*7V7H_F@SR8OwJkFR*)xw zuxt4in5*F7*sQY9IahlE)OKOrf%vECV$kZdmA~)b7*C>gK%#KDM(wc(RM-4tz#Wj^ z=wd&CTpl-dCe~g#xzNTjvSQq@F|cU&3@MkIh_~5_=Ez-2TVkQO268VGD*^Cz0~!n2 zCL=VJm1K8Ljc0R_;>;YIWV^AezD~HS0*@mNU5oqoFPhK0iuF;TzwHo%sQDBDwnF`N z(E8vRG|VU@Kfu?vhbY(``)0`osPK(HfrfO#oV;Z@sXNr$n&BR1B&B%MTaRPGSL8DD z+S-w+wEF^fbIoF(;GL8L%zd?fz0nvPK#x?F3`WQxMlv+;iNRcsM3tSIbN`0YxM0NB zwe`S}8AY8OrKRf03%u|cMngnRMEAiUcyaBTv8&1q6n({ zm+x_16K{dRDsss+Yf*}}83p6-l1QjtmU@t1mt)5d6}suK5YD78xO_6**e|PHhCtbh)A9Te_R&-s%7>$!J0|>yM=F%4rcO@IaYiw5NFpp8; zqD7-A*EPnW6U9ofx-)A>SCpzp%L%7BdKRKzGM>k|Zz1|Linj&v0f;(wDo`q{-_P?Z z4DHY14F|&rr}{q4_9+Wic+1Zkgi&hIlbWF)A}?`4ji07H4Pa&5S-K<(yCvZ>mijvPb-2|M1)e=S*`WBvQFd6j z%iqX7V_=(f4d3ftYEqNg`j>({`l*njJB#W9@JFqFxjm-uXo!iz5PA&9NP!%D#=t{3 z&gMQwBt@rL{KK#|A}ZeoZZ>-r_UwmzU?ORx_x@623Eeqllg@k>XLA&Q1F-3TW^m*W zXgQ_#v%5hAN&fVBO;Jk~__?|^DTIwGEe1(XOJ`VEqIh$CWaLvYT+b&@GOgMGx#_0E z%+T9Ik~XVmANbv`>FG*utCWV_swLHQj&V@U1p!J(5@sDyikcPCF8C~o2KX|D`irE> zv4nYSZR1htZ9+N8Tk6uZ|52Lzcb>*>z#G7yC`+D0)IVp@ZRUdhbuVYKP`^s`wElfP z#zGQoNv}oxsxF>dGj5v(3UQotM!#x%ODC|e9I-oGPQ~2Jex^I0xBzfCMC!=8tim7D z9aPuhc?}Hk+A}8BntwZ|Vl^qrBpaxki0vxlKoD(ROA5A?g&X4k9lrIW-(<0>GgSO? zqS)2_-tT6dv6*U=|2-L=#HqhwX98m=1*cyqhq(B6Prs$g{KH#MFn2qWaA0j+VuWmD zb+7T`8A>LX{Fps#)e;5X@%rolkm*%>jycDYYM8x5l>q)bJ{1S$?Y1>4l>VQJFm1WE zrfX60hBV4K4;(yNx8psxDGaNo;h!B2rJA#i@gW6D1qf&Wj)_y@ZRgpX#OE8oqHHXJ zNfnouLS8}(d)_1orYjx0(}5#1_~1`y*h`}dK}fMM;_x5AwK^I0xG{9t66AbNe~AG8 zT>jT4pWEB^JlN$0GU@JDNjWrP?VepRR>!~P`otNtR)%;M*oOMo&9i5$(LIOxe z8Go(9OoSdI$=2+Ed#eB(Zaqb@G=FBsdIIu28xFpV_s1s63&go_8#EKa*8(?$J%3)ou^k8%m}G814#Y&*j639aGEXiowDkTI=P*w4O;8uJg`MV=G)ArU?lN}FR>68)u_ zCnO^qN#|Fr4$V<2u!GTSvX5wY&GrvpeUS-!jfi5c93>EK=m+R=Z9hn~W0c<>#@)SG z|14-pY7y{TX|{5PCvgNjJhN`?AeDn8E;G>{fsbfkx9w0^Pn@49*kR!O$l68+MfI-yd|9azbOO&hyWL zn4f`!Zt3h#rxf`pPS&8#cafAj0LB@Afpv5ypD zdDpzJRTNv`+H7a>EGYF>mRI%)=fZXSt|yIAosk$2m-~o8L^N$PUQ1rMf?peE(hsrd zjOXGUk+NXqj&ow!08YlaF2U&ap&yu$_+=txHo z{I25|taQOtAq=BrzMQGZr2)pHq{J(OlcFygPzQzI8yV=oRXa!5 zF_U)C{F=*zjsbc|2yzNig6>+%Y#hTQ*h2Wo)y@)m?zLKi?(;!`zC$UP3Nh$fn z<(HSIopRct;4)+}cdLRNSN6Xi&;nvBP5`GoX`2=-MTl$*c$Oy{EOBF38aD)2YNAHI z2-f$%JDZ=;GB>9&ZGBF8&{EbC`pN3YL^XX;L`ugV9*}nuBCFZH|1Z4J$Qu~TN0^pc z#TMtDT^BUunxOSK%f|!CCQ;M z#|SS0{)`%=vV2*{|jj)95^vvlu&sss^c8^s6{Ry}Wp9yma?z zWC0swsE7RR+q--uBHL-IWzYh|m5I*!+GS4{{vcJ6>-$r#2dh_tzvU3VukVg-)-DXv zf$Eg`Y9(1mtQMQPgZ?K+IA|a)yeDYKN@Y^@IH*|W12RAl+SmsX$V7?p)*1NRj=&hJ zJK@%9v!9y?Bl|?czms=fD^m17ZeMmF!u(OjRNn7)P)yKbeNW-0b~w@H$%u4D`v5&- z(8aOOaL9va3Mm1-8Is6nNV}IQS=$yt$kWw9+WtGVM#_aR%WXXFfetdYlR3C)72`r; zb9d*6u`TT_L&CM+=aML~e=RhAToT^11r>qMW=^%99v}^#Is~6^;PKHx?(v6Kls)0* z?!C&Ki=J(2if}`1dY~<|%?B-bS6n#N)9QZ$;#x_B*U&7DYia`DxH=EVtnB-A_`NMq z|3$@XNOd{Z6qosLv0F$)+t|wXj5e;NnuSt_NaF&_z59$P%5}J|EKe4|VShAq6=;M0 z82M-ohuG12Cd9WiA@ZHI(XGc+5=2L`{;gQ%)~$fylx7gj<2Z0aY+%1&Y3fxuQg<|0 zVW`IFYXsmJRrIAJB-2EhO3|ydAjMg&ZAK9YMXay|may8ykdgl!2^u@#>FM|?!EYXA ziGFB6xO<$nG=&&UnkO~~Aq-A82Kel2&rs@v3{j}5!P|1`ITFE)r46y{s!5!^;XiU5 zjw&0W=~ESBz|tKL@)R-(60B&cytGa)@kXKuXwmIk06XP zYykmX0yh?g)a?I4HhBMIvm^j6755Q@MT_Vc`-4G_Yv#&!kdr>)(QwY$%Q_xisTicJ z|8(UzZg{OMOxUZqYkk)329U3+6LTSgK?Ckw`$e0y+s{`U)Mtg|g2l?$aNq~ig&)%D zHBpfNeD+n{sNa6TnHFiG-{lqxrG~2lCk9Xa&BgBb1?Yh*>cUNP^JWz^=CK)&7$UZ% zoggb|S+p#XoM>p=nqVrE1gbs~-r6>X)Mu|eNcMnNKXm0J%e_&awlBzIBC!0f+o3nB zibX0eqo=Ss$Of2ucCCJ3Nar-VVr#Q-X)|Nt(e{yJgBV2 z<++WG1X(ec;s!umg{Y~jGp1o0lv5!~w=8HMzcQiZX2Nvw zgJpa;@F9+qx?ULa(}|u7NgWq2F$vfeXrEQv&IFXu#k!Eh3SsQGDEfS82Sgn?%KMpunocUX$ zecNS)fVkp~<8yj7=HYhSU$SX@(eHzf4|99NGDb(xgbH_TfBdd98yQQaSDO|gdf$8wE->W93;j!BSy{uIqkhG9DA zxDGoM@-k-I)f3!>TCN>&vPXil1lKrh_ufM?PtGn#JNT|ctCb%tKgTO(g>y*LH~JY! z^yk=CZ+xKmu{=w|0+}E;t~@pY*)Nps@{Fkr9`0B1E%_cA9%JdSIUXJ+ ze^wOHRY`ksbDW9s^!zF;&iVHccvWG`OCUDCf5(VTA9DiYL6$7?Nc$tOTQRp+M_O;N zZik%swD%sO)sJt9?7}=OyI{9!u8zreGI}{WEU|H%i!v8!xn_zurk=9^aUDpPOXCt& zdx`X0uY_HCdClzUc1k&P__7C`Kk{(d4&V=b@}^^q98r~cA(8zy%N9hp#g5)Ivsnhm zIq-#`!zlo8v{whFJ&;%XD6!e0#Ol#ttj}q-G??)_mN!UyoG|S(!}fc}+DwOjKr#H+ z741GPHD02)3FwSaHi;ppd`t=Z>buWC&SuJ%IW8A;^RCM*SkPdxH;At-tWdOgg+RQs ze0Sr7g1N*MUMNiQ>=(av`Ok~9&EI-;tjmYzu@}>mXO5_#2mn2}LGtOYpQm+-Ji{-mM^EOBzj*@+cfpC?Kl#h+R7#2F>h@0a5)1=1EQsA~0b@GK#R4UDLp*>4B)oS1AJ zLe6E9_y}F7t{@y1PxCeu1Va7``_m;g!d@+VN#E(r2?oPK>xeMtHfXH zk;EaFa!68rs?~HqSb0S|J8dxtOq=QMc@PZ2EIgZrccK3T4MRgUV?+kB(h%?*9ut=t z-PrtMr@)B06{9aGm6t|BXD;$Ui5?9zgrCGBIQzf^I>u6ku}y{_zDQLU)%1{5Or%Me z84+oJ9x2Y)F#&Lxa~D_VX@WDqBqyYztLJ|u&ZXZ^w8tL~^&V~4S2%65#^N}y277Q{ z<_0`2Q?%XGFIT*0jnU7<9gQPB=ooBh1N?bYHCTsE1eQYO-ksSBKp_H0&eFjV-$=h1 zI6U0UFbZ7mOB&1)uR6WKd0icJ7TNAZUrM_^M~@hel+w<@XZ&y7ld(pL{+$v#eN!(G}$O z#xl(B%T}_;dkLQaPRW)G45QPVHJafHw_BFj8pnUD$bg&48Tv^TVQy;MLzeum$dspq zwwBykjCGqV9X#X|#se>mYWsRhKJ#`;nm+)}|Jo34!lG?LOkd!f3$Q0sSvp}>`r|0ko~Il z?)4`jcR}FKZ4e4t;2dEkS zow@E&v5V2_Y=wxrLuz`eQer8xz_9;0ysa%b>rPb$-p`o(yB^?@InXfK>`lXmBA-+; zj{%3Hn!k7A-%?DN9SmG>HI?Gji~fAPZ3(hft<&*6ijV_ID|Gew=kc;SafIb9x4xN5`HCw9gA+W#W40izfb;o~vYyHkH65Ev#58AB;}C2+S&!REl3N)}NLR1J zsHrD6BUS?kJ}Lc$N!9Nxwk&`n!N zL(sCT6zJ&_ZT4W}zwVvI3UKDAS41^t2@L#f6yM*~Dd;S)RySFdRy8F?;!{Q$A{539 zQN3Q;wUz6JQ~SfT?wkC2c&>LlQ)~$DEpU~nwF$=zoZ=`?HqrFg)ejU?aRKv`YXc!c z5}f9Nw4!7Np}6+FtMd#_{qd-l^F0E-X6cLs*l?7TGtEKkZ;gvGWwCEr#Hkd|UWziF z-I8WT6pE>P7$9F9&9$O@{<(TCnqd>}H+?UPXsr>@V7Eqaox(Ad9qN!ivmBqSSR*li z(OwAlH{RtVAR*|Ef-SApFyEbYK}m{n034}{wB}L8c!U-%wQ7HyhY?cHl7l}4o-ECi zieuN-qEV20?pw<&VfUb!)$@+iV)xWzR}6Mgv~7)^aua7`Jcrl-j;4Xe(*(ncJBhDB zy{<**$}3O$axx)KPt62n(DNG6ub*}Jh*nqhW|hE;@=((F*!XzDx>Y`FKAS+me_^j6 zdcXHQ1zk`Z36VT3E`AP{eDGP0C7-l`Go;|=XZUJ3bZJP4 z<0)!(s!>!oZWl{JNO(mPLX4Vz&Q?He`(}hUZsOd%0pJLpMitVUUG6M*e;Td|(XM}#cLY;9yEk(&*HT_ntzdSGj4E1e{{4-wxyt_B4d$kND8z=**E@~*2S z5Nz@K>r=V1T7t$3p2XG4aWgOx(mcATex^iVOa3N^G)vYinDP{x-$Gj_AOvkU#b$=WDD%L#X09ag)^KX`&tKu}Og>UgR=rQ2wT zgF$BczJ12OabQK<9e)T^u<*y{^X=_9StdV@@-s?arDAr1HGrn*FA^xDg}oQbRDWV~ z=i)L0A{*JRxQf}3?ga99@B8jk(GN^WSp2)+i1WaMil4XaR$*Y|7c49FpeLx&Czc<# z0PV+Eki%(}wp&Nziw2Q-oSp4?XJ}~L08x`tRM?B1*7rPZl!7b;H)JV5J15z-F1V2l z;GPX+f2iI!4-8E@r#J;0yPlf9nez>vrS9b2YmaCVtZ0hfrV1J*tDnXYNh>N~x zb3q(1DHA6Ce5u8!s1}Gm)pRXQ{}dCNq=dU=Zu0JOs9QnoZm*!cgliCK&;&>1EjUnr zIWs^O`rfx1oP8Y1`6=@+`!BLZ?;AB+QMqDY*C5z&_D1hjWnJ@26v0UhzkBlZ$Ksfq z)2W8{-nC@j0FvIwVBB|%W!9(2?Q>AB9R^eGhX#DEJMp|I<7`! za}(t%GL>YQE;spm{4?@tvzRKGv~o4qyr}v7d@`QUiJ7SDyAQb=?vqwp+`4E`K4m9S z69oUK-}%zFg|;fTe+OZ2La9 zrRT&N4Bs5{>gLt0FFL4jo_b@Ofwv1y5Wdu=W<^?ck^9%gw~jM4T)3bJ#fjfl4tP_I zvWN&r0MF&zyz6B)bQCZz&cNIc&n3WcR2qpJ{0$w{qQJE`AF>N9;-9F5qa)vqR?gY{ zUXwewWU}=kxBvSwuzgcsO4kdsXo~;BKUGgdy#jEM=JF7YoD_rSwmV=Naeh;(jwdT- zLEJwEvqH-i=xzT##K$*wsh4tXifK%eEXM%Zw8J}SO&mA4sFZf4L?=Gy25_|dPZx@i zxw~)sry_N-u45G)ofSvl8QZVD0$xW$y&?mUWYs$uh?^_(@4AO* z32ruva+(+bd=|!Czt%dwzC9pjuBf9A+cw-j1NLC`o?Jaon-+F31Jwhw^>3T8jStjc zr}xW{Vpf=|n5bha6@-`X(?a%b{}Tt`ARhJPS7gb4+zcD}F!_d4%T`-o(G9oHrf#>@ zp|)CSpcTBDj3_tI{&vn-?bi4Kq}G&RgIs$t^aLW;^V?o2;JYs6ZhsvrOB|D^??BEk z(SltywtWGQ(B!~?jwA--rtN}QPM^caeqd=5#A47Bd^v*i(k7agkQ>e# zI?RXArgy@xyx!55w@2SI0Owo-s6;eEKuak1IDf#~9u8P!dwS7uKkJuZKXA7(Eo^am zKe;AD)NC?K9~j->R5io_t~K?|5z#K+gLoD7G1d1v>M1r-V_J~74q99_*jZ2?UWpim z2}|QQnqeeY6pbA3-p!i93lRj?d_jbFVawv8hPhjM0XqCU1p3c%T&&o2o$tN}Aj&lm zXEDT322oNX_4^;F)pT&(U!GU&puhanc3iqH*7S6_f9i0tSk1zfAKCi%@>2CSC?opK z?SJK7lKs%I`(B4vMq(Y)1_@=yjO&VB&Q zkagR5Oq8?zJ(6Hz$HGLQCZxm=9hQ4OIHz53=8$^_k(wq-k`VpghwUToDy*1x^KcC< zk6J5{OiAw@O&WCwwqgez6^79SdBGkntA~P{fOZQUvV)nc&&^6gK9HU2k`LNFGEsf} zt62${7t+Y6g1WNSl-;V{OKgy?aY%tKPi)W|xt$8rY+5J7)Ks}0*!*F~Oqprcu@v5( zm)6r$Vx1u4J(eX~;?|=NRsMjyYo=+kJ|&ikn4)8Mvq~;?n!F&FNCQh)(RBw{HBQP; zxBRslkR#A-2>-+XN^CH@tq+p!X{s<%h<2L!=QZ;tY#6{vm+gK(`uE0$eIFM}(k@cl z-lCcORvG|8u}iEpHXudSg1|=wnw_Fx^pgkm2V;jN(6MWH8?74G?1!y<(|okx_qjHO z`m3JT0m9o#VT)BHoe_&Ahs|n8HgM-UGFtnQTPlcqTW&EJv44BM7HnOj2wJX+3soas zEK&uhNX$5j55S-c0Dt}hTjC_gQ1a8<_3t>FU$qda*ho|n$77C*NA}I~DNmH+Z5HH} zz>)xyNGQ`?Cltd>OR;d+gAlFG4B2P|PqJ(QI3Sm(ppX*f&Ci@-`BHb=QQR(79;ub# zrML8-ZskUTE`wU2wP5`I5<&d)LYSa|4~21xt2`v3x#Dr^kUdn=S&RTq1=t3w4o?V+ zwT@Q(6VUBto!Q2s;OPji;~E0vO^{B7R`@?}_qWiJ)a=|kad=NkF!-f+pX`X=Qd#Q% zx%p1b3*aa}1+yo(IwifJ--qx0F03?V#|!t!S8c+ zw@eA}x~~ZJK;hb;t<1k4wzz47A5een>i-FTEK7S=VU@u-L1s&YA;zWu8$uDi8IOOr zyUH%BaN$n$kGBUFIsl+2l1cV->}G?&{K4G^*i z7nQlb415gQsL6fcFVLx>XqcNr1m_x9=5w zNCN+d^SvGu8y_AAg@hS79bcdM-|UiB%)E9ktn1(;Jv= zbktyL`dF=H2pxh8Hvb#8bH6mz1c*zKdY=p%XtiG_o|GaLvZ1Nxg!5$jpK)F$?aCcX zVQ&y(n>u;KeMbSa=*s!wT{u_T%C0L*58=vSh-9u?pSgoCfb;j}h2Rl9PD}KoY8LF* zVM?=zyL)#c@cuwGsN|5jF{6=)+e#YhDg&*hzGk9jpf{aDN5E@!C6P88*^FKj+PNrz z!$;LrxDfT?=;=!_=9oC)Ey1x~p)KjHIDlrl;esRnp2k8A+N&``?n?LcyWq3YUdj&& zZfI5)gfPcXe}ny2R|)XsdEh&vyMm@L9TNO0BK}?Vm;Up?m7Q7d55hf#KP|mL)ne07 zMpRo#7^ZD#oiy_HXWEZ4N#>nd_B6(Fur`i51H zSeI#JM7xsE`7lP=uI(+exn)s;H>B3zhR=85o|j$mG^C~`r0Gm^&ZGrnL7lc(=s>5? z_3!SXRzIES0ebMvCLgZ)iH>bWXypF81qp7)ykr%D8r@9lL7hvPZ?#R5@?_4?Liab; zQncLfzg;KmQ0uTtDrbTcS#0zX9ct3w_b-F*?JoNX6H)wgYeScREdCcJVI##D+tVu+ zjj*RvM2yg~$`PHzNqN7K-l#A_I}P!(M2J$EaK`jK>SxUtJDqD5Ag*Jw@uBNG{=(s!|_c^!?$8) z?K$h9Y`|Sy!-V_Kb_HxSk$>Njl*l@1lfo0XQ@jL1lp2TvB#H~xe+0QpMI z0go|<1OpM4ZZuV4lj^ zjV@w>;qN`|)b+L|c#n&g5|{(zJRj(p-G@DZGpCkwee?(!HL;-u_2god(UIYU2yft) zaAubz5@~E}G_hd=4c9p^zmSXq%JtbSzegFg`v_xOE0P(w|*+gqPd?+{9pkVO@ zN_4`?0|M%HzU4$Uj@``uDJ7Z(z=4N5En=IGpOXE@2ltqQFuGZUZ;a>~F>4 zucEb$>7{!ttg`Xk$}ul+={C(RSt1^xht3&ov0DMRH=Q;dGwte#RG z?*4-3vGKH~jatq8nxb}b{#iz~7KPMS|DNLO31rFZ{(3(L+Y1F&?BTP#(b1 zc_n$tiozWeb@x!wtZZ$IiIHZ2836JZ;9KQzzgQaBek%-1?387$I)_;<=-$5(nSMU4 zn}=Pu@F&0bvt5Js2XJ!MX}fkX?T6ls#6+0hWW3~FI%WouR>M$;fjt9oA>|V(LoVy8SE42puT1bZL{&j^DG*&*F^RCer zsi{7H*x#>}P1}o@_}KbmCD4ZAF>;3WZALEYLDj0wkaCCpjGw#@BV_mJ_f>VJZoU2`q`AiT@YAdu0rp<)G39ku-@vL_g}ToUkA z1f3GL0$034XFlVfmvty|YQ&xTSHXF^` zfr@96(b!Q=J%WGYnRfON!{qKDFvX52u0Rr@23FAP47=)ky~@eRS!u`9ilaVdaxo0Q zOHq+1@}|_0FXX>~w?yhq6@2RO9nrx`_Atc^v>w`$&xik>AGW_s1qDE)n& z2q5c!X5i_`G2ai&PgVsn05Sa&OPe4>d92X@D?>Z$2Tf2fLsEOM9#_W#*X$xsj&jky zMD_MzH9mWhm4DOm7`g$a^7A{cW8pr{Y|%g042($%llO;xW^Nyc!SogJ6{?!STW z%)nnn_o=;lU_x4qJnYd8mRD2lB9LhVG2us^L~7;EW{EBiT!zQ6p|{GW13{{|r&L|u zdAANeLWQ|y`2iXa$PhULMa4!FPiJTeyC{+*MWZ{qObol88NIyZjMb+owG7+}m%7c| zo=Izwt|IeFS{V*|r_fYx#l9H4{C4U>LR=3Zu3Hn2+AnLKpeAV_&Q;tT^ife~+6xn`7m^B}aEnm41lYe&CCAL5e$HQ3*D}OXf-TSses=j$$tx!od|4h(5n6BB) z2p&)fz2YpUjVS4r#}iMZ*naU0{=wSS2!;Uz6kS!aINot8=9qzgfWp1ns8Z(M(M zj+FFH`b3vU-+9n0@GT81S^B8$eZF$gLM-SL!T6eexU>zW#bR2{{E@`t=KYFc5QhT9 zWh;?8AKe(S*o2_CMGt14?i*&?>&qEs;h(vPR&|o<&V1wz);xZ}*93(IG&=hP^hAoc zXUj_yZ3#9HN6sht^xelbZ3nyK6oy70Bow?Al#rS{MQoKuNnJ-!f4>OEyMk1Y&a$x#^=Wwu&dIKYd z>qvFetHzpad$xTXY4aRMWqq!37@w#L=_h8s!5pBkwLwaAEc{r{N ztzAK9`fuwSn}Ldxz0vTxt@g+G1s<{UKanjZP)L;sHLt`*9v%QEdC~D;u(K|o@J7Uc zqUJ*OM1GJ2*v2T07CwjITI}Xl9fEq^oh}g}GOqwQQvotOYD&k7wbtGw_A0yi<&yF0 z`+gaW&*?0jFSDh?R(0CEug?-vc|3BAtT=88bTn)CWZE9ks5ckAN9m;NgO^G}AHtPP zFv0XGQgpkI$EZso2dNlwfSwLZM1Ez?N~>YDEyV-MY+m_0gk_4~%stjoQ8yBi-Xyv` zUW^os+a83C%*!@f+D_xf^Kk`(&M3|Jt2n1wATuWbPU%R+h*66g_&X0V{eJL2xQ4mc z2Cr@eLbwaKa*&CWRoMDMenKRlK`x%JpI5FR=)b^hCT1Q<*E;*vO@=mj_DBGnUf}*x zg+q=7#b5A|3)s~#RtavtLJ8++l2hjr=#R148XAxj6xu>h5Mn-KLi^DYNd&#Sc*t$2 z{=c}lsx)JmzrTxKG`ShzCs>AyqxopoYckmU?KpK|=^3;Q+r^G3ZN`~ps0-J>crMPg zilYeHy)xK$jWZK5j;o_J@|)@}p-qVSt~YkrA!tPF5&`G0zNSJ-U7*9wQ3#iob}_@m zyg6dW+z~@$`C;U{ogb@BV-+{uQ`ui2Q#ppXY*kmYXRI-`11k#-cn;lL{^5%wb9)ye zWx#uweo<@`VwPr{`_#ITlIs$tVsjGU0X5Y{kR$vUDGEhbZqgS|C*@)kt$@vRW= z#bo-P?*_J-e2GS^bvkoxPVW1A%@JV`jc$w2N_O_?BY)J5mXEMCmFl47j_jXFr?m)8M$2ycF%sx|T@|Dej4Z6uxdpKR+`yVGN^*uw~KX@^T$PY_TQ>6LMLk;X$)*_OOUD@bh(ob9s=Zxh0^>5+eU|!DZIxHy~oUe zhlO!4t(~4-#dm!wk?v7g+Pye7nR3iy~UVcy}pNs>O!gEHj}5iej1gNp#1|FvP?%?2vEJ}n4)NCriUh~i>0=N02lYYUn*YN9%d_i-c!oaJWBWKjPgbq{=P~J{?ZztG zI2uZ=9HRY5zUSoJXZJ9I$@}zJ{fuCLq@Ww0aoLy%qxt)6Y}vVt@*~FAQ@YJh$A%S# zzjFXRHT{K=YN_FK(R$I)vWA**n6aA45<7F#}7{;BX8tE6WX@+C; zRO|SK_6F20S0v4o9q9OckN{3UL81KwW=%+8mbz`;yVW!zbG;umk)2vbPCv=~)8Kag z-Yxj6lfA{h^GNr#^e`skim@XrDd-e}HhHpj`l=}yAg($#%1n?HBe7-Xs00uHGVw4^ zaO|W}?q98XM@C!FhkB}M)Vw!tss@(Kd_jrU>|3bgEp>Uk&d=QYUwq}mFz)OCJ*vq+ zCH`VG>5bfa6f!P2`9YHQ|GBAP%GD6^hNFO#Bq8U!$v{3%iG53LjCP zwg0lES*nNqHH=joOjqp{8&JSWRE!&*+Y8XMG=ler#{(W}PWU1H53WgwxMs(OYG@9c z{z|Dd_lH?QFdbY!fs+C!f;M{*VlrVG*V0b0sFS9I<~e1EY5(8uMSz|~{{;?8GsUl> z4XLBT_P2(U3RRqYAWKq(WuBmY#&C6oeYv5+qvs*i3}1?1H-Ay?`)zFxis(RTQc`M^0YCbxqL`fLVBVZuvpkQP``oCI{l@5a`ga{QOC`IvJ5{*-U9-oDd0nE+ zFYbC>VsMs~KQ696HC*!f#Pu2#;59r;7ihCngTL%F|3q$hcPXgs)~csupNaph6aeTc zUBhMm^a}IOxk1}j+JS5(SvsQ1<9)@Fmjstjn>al2^|l+W_5Dg&75QO%BVnt_B8ghW zYzmAYxOQTfKfv_$eb20$>zz$t{peV5wE_lEHYhVTR4yWpafoZK-|@d6*VO+tHX_(a z$n$znmY{ z;86W5mPhF(P`$S~1FK`cS=W1$`lj=v#7dLB;lJ~{c3r*@S^JH)F4Lml``tRvE2|?( zxTD>LONGrTp7r##H+QF{c<3%MIu5sAn(>t>)Q(o>j*IFFB59BrDuFZHk|+*;Jojs- z2NHxf4OhAWdDlov$D&G&l&Y1^_>ifnlv2bNf z{Rso7+jJD#C9d$B8{^8GLy58D-Cq+BS7aJv-B3OwFffw#F~@k?p7QJS---Je&|0eV zY~thUz+yyc4?e8q?yw7KLEI!pI6_ypdJUHKqw-zB-pLp2lNTj3X2s{?Q}fJX6o<1k2uZONywcs-N&v>`PL8wxauLk$Y3wxMZQeXhNYp? z#P<%40ad)64;ob1gE~dU>a*?-xRzcrm<;HUZIK|W}r@+Pu@d+u7 zGP5-f1m`o;umn73;6dAA@_cvlCj|+IWYX~9^* z!Siff%o8|ad1sX$p1dgwr2_O!UDY|174;3b1cGkOUO7*e@k%@q`n*Jf{9Q7bvndZT zDJgO4YAhUvD8Ye4_$3_kwiOg29xWG|H8Twn8~VGT0g!h9?tf;e6SC9R)W}q5s&XkQ zS4=r57fjWUC`uX!Hp-dAJSRuBT?6@Cr$s#9(&;Z+9@q_~wz6w=o9~6oqzGw9_{(_l z{a$eXOSz3o>QeMx5_yaU-ErYILdDER#1~uTQ1v%Bg_i!cBV2c7{VV4Y(0^?+b5|9; zA>2K<_ZKPF#!bbK_cYRA0siz5M87^#Q+BGqocNa@v2Ym^NQUccpURroea!|gFf9Nvbpm8T)7{MLUGe9VM;@{|-bX*D@a_aE;6Q`8=X zU5K?hL>%aj;9S)e32H%fqKl&$(Ve>K7B8f$krEIDDIrCRHw7;1{pa7ptT2!(#Qb~CLsa6}qDKet<_X-S zhn&byGkD-eg>pp%A=yWr99UQ-$0l8vFtre!MikFqc*d z9RCnqdE-~=6A|uR(oV$)$gVE3sDLrysC!DIe1PX6$F>1C47GlahalLGY}ETUolp zgM?kO7SAXP0CIp3vMn^bI9xynaL8~&ySNmNryS;I*@^~z2n z9f5{~h9p@-!n?FJ(oRICRhgfZQOK&w4j{AOb7w)7ZGJz8MSG$+u6n`X%EAV{L_g&i zMzPW|+)%G?ZvzP=%j>)$#06)D`?FTVzf?zhdT=-TxN5c?{%*Ky-3e#@4emfU{H{w$ zj4$emWvJfqq?Oa_F7XW6l91cIf`==+aiBI>dcO~#YRv|s^F5E_YV;4cxXD3%{S7|~ z{o_a?$%poUmu=TC8W3056|`_2X7JX}V%5*S2)*vY9>$R&%1uOYjx^Y=F8(}9?l$Pk zpo6MpjSJ0=^;@hw@X22T=r;Evf5{IYbfL>Y0UW{ZAv0Yhw`j0GTXZfta9ulU`F~hr zmYeX~HBYU^jKRLxhIwr6y60xpT!|@<*rc+O$+e$XA?LC70;-M1;U>P%TRFRmjxf*E zlx0#1W5&UKMZ}gC^4xY*Yv`30wLdr;VL3tfTQWN4y35*UvwtQ9h+ru0%6S}(n7GMe zsKIEe9s}e6RdIQ0ooQvt6W~&4E$)u$j(#P74Q4C_O@(L`;oQr2{IEpx>lE+{$SEslLPe6@_WRMN3Y>(5s|9KbskafAb%T7m z`Dovp`6nxW@>m&CJC~`n9D)HQrc8WyrieUVG_eFn&ty$egBO5P=c*~bQ2#)X(Ki1h z*V``n00wf1sb=5TB3HWR)h=;u>7CxCylsmz&(n}LR4*$(6Rdjk3HIk5_tDc|E(55$ z@AHG?g1+ngy^=iI=A~qX(55nCzl)yvtbE7w*o5z^v2(nD1HfskA2*+UWc*YSCCJAFT=FxJ&g8~%e)0jHVGgH=!j-~ij(r4Nj#Y-00Hrlj??n88}3 zFOVsxN5l1w|5396(MVSvbBGr#)lfzWO35Ce{pSI(BCL&H;(q!&wSi#q9clo;vCc?I zR7n)-nAh7uqoyKtEt#c-5>=U6==BY^thKyz7t;O!PZcJMM5o99=RRL4m*(SRET}Om4e8SA>{8D74sViA%>A))1vS9WFLyJurWjfB2>b~(4`)8m39ETrmDfT%9=z$&N zm47ALqNCcuDzidIi&gmwfE>?H+zB4i5=tB}lHdMHRxj8yajg3MIY-S`PKgc^B8OK4 zU9b20sP%h~ZT=oGzo0=A8*qMbHulhmjg2Uk9Ly!u9TYW`b)u8$xfE?E?Yx42Sup00 zPnIjyJGMj2uNEhfUecxV_HuzY;ko0CzG4CN0C4{^gOck@H)v|cNPpW+qeo+qlD6;- z3~$v)x|br4uxvUgwI3e@2#*J(QYruSA6%$3aW^rmPENbOa({>k#HNOqG{66M%DAVG ze=0Y_!B@dV6E1_QNb6S3YU4aH5q5KgG>aU!No0H|(2Zh*#K_uJ!?0{;uCBQJ?1$^BIyshh###7k;krRvJFe$(WQCaOXLewW^hoRqF!Dx_y-5 zXyB}s#?X0cHp5?!dQ1WuKV24WEMANp1o$j~ff~t^ z@>R@rjT@x31+<|mXQ@Y#H~1Uqk(kPnq|uvunSMr$x(+)4^;ZNffP)uIyCP0v49-5v zBFde=|ElOIO`Y_A%za~bWzXAmoQdsB>`ZLiwr$%sCbn%(Y}@9^g+z-Myh^Y1;b?g$GeE!0);3l&C>qp|}JJzjTQ^|6=I zS=~(C>R?vMe@VQLYB*PzN?H@O6W;0=NBX1Fl37$3jOyk99F$PH+|0dpR<&Z|MMhEWoety^e z7OsK93svd+{5q;K5K@H>OT5bK(=(qw-7alX6gea9!0vDQ(yJgTq}?|-t9x;|?Q(^s zSFB3CLTfFUcdqX3Pyzm^WH*ubY`9_kQtlH*6R|y2>HxDen&Ex}fkHI^p_2o82iAQ{ zMhwQ9ot(>|N?Wrl-#3U(r|+kA7}7g&vTRc91L#uSUQ45DGbKK(x)z3gAuBl>=8w~@ zGpMnho5S486UMd)ni+A?dl>#?@g80F1>fOejBafM|E=n<@!zH6HShmE0XU>GlRPNZ zMi&{(a>bcSIwLxGXG0(Yo0k+%W@QoQ6mfF4!Gg1VTr_PK^D))%Sj2x3m(9+I6pE|$ zy^S`x^@H93y7JX89^huLWDYKqF|PD4b7Q@+nOCv4f$g%D;S*18GDG8FB*90Jm0mIx ztC!0T?fj);g=i_h7`7Xvm%2%d**XDyvIzaTMUwFLY+b;-jpopAa?Jf~ZHb~fA`}To ztsPhdR%#7XJ%PLni}8+X&OTG=+htet?h??K)M{ICNhD<)zh5|6$YEkW`S(uBb)zQ= z0_E1|=)n3nYK}GONg|8lkB&NqdbI<;>JH|^3fA4&yBw0 zTYw6I`WLtoZ95-xGdk9It`1&$T9h6~9oagUsl;26EIVA5e$L>WbZBf-AVnB0W&BD8 z6N^iP$CBDI3|kt~M`%0*=#r9qH6#8MdeBRTgEB03;JgXn166lXbA~<3$PyNv2RdL# zn+^)v_z-J5!+1syu+iRZwVM0nhIKI=oCZ>cMHvGK^RJ!d*_jng^z;b&%83Racie=U zki$Pp1xhADd77>bo%cBH-B%+bwLaH1A&k{BTKyL`JX1jLmA2@v(W4W~}Q0VzHby1ZC)9SZDG1A+$K z*jx#1CGmz4m<^+D*s0_`GS~#L%)Ghpa|%8(=m=|uENtP-px8fWp^hNd^6p=g$*__g zk`2WM&%ew)qby~;MVWI%3xS%J$2hCa$P6qU@FBYhSa%K$(Qv+X6{kN9X7a`pZ)gU! zCX92d5}Jo7f|jJ(87?R*aw-F#M?_z8mrG&%*CI^p{-{2(mNC18r+J?%L3#dLJ|9P| z^t(RxP8ika#2jC##>K69A|IX6jw)D5?*Cf2m2PlZehKuo9CgpPs-oz7c;M=hua7{` z*$HNfupvUIqX)N7t}jpK0+=rg!XA3{{yt|-WX1)^SyqTP!avvoST%a`ac(WD+=m5H zVj%DNcW4OIS=i<4iofptm}{YAe^~+3xHOHft)_7$1AHzProjw_xFLi{E}7NC<`r}B z9WYxYlF?Bysrner}1h z#sSJo`Kbrw_76?04`I0d2ABS&Cvtru5;DjT$?PbT5uSEeRWdHuLYkxSi4s)ak_Ko@HRiZG4{McWY)2KFX|XL=j~s`2cSqI)9mKI%B&f z+zH?e48Fgfj#^FV$kCzPU<|^(zdgxFG0p+HFx4sD08tkzX;(m@=q+@^TVYB z>EOj%Dci?5CbXM`_R;g+IKTG05BB>#^uIEiL?Q=dd_f!b&6h79e;E~BBZ_Z&l0y)U zhe2#QZSo30UuThw(bhf8nA8oLm~NsD*!K<*TfV^l(sKzum17b7{y&cwo<+MYAZ90I zbSLyPUX|?dBoOx?vgX_sGVY#jyl$Ei#S#7UL!pbcEprkFfffQot)|AP&?d1VeO~}MmxH{W zS{oQ!eYI``5*pEu^qILJW^k*H^l;vg#DxOzCu~*h*;{6GkPHL=x!_hg!!O`*JuacN zQ70tP{Y7V!$%7;LDG}LIl~le8zS(NzCLmg(!*i4`^k-kgY0Tl`<9ALn3ejUqayRYG z>FulGJ54*)&8uvp+E$mBdM!o~ZTQF%n)~)f#OIU>OObtWokHC6Nn(g^MV<_6fSzWm z{@eh_cYh|c+rnfJgM>!>smRfMl;)36NvSB)d1abz4}Pzi;F%OvKJ#||t#(OBFOepg zUdn%&el-a!A4|3zv*k^C1-EtM0DpFf-C*ewK~2KN63a+yL*^bBi9BFU`?OpaC;KZU z|I8fA1*2cdooFomcW2e=FKefe5b$(mLg8)O#=8OIq?Y^deKa}RoF5~`Bow<>s~B%E z+1;)u_wREx%pPqJM~h$zqOKFj!f%b&sJH&X@3-@exv`Yc%-SI%9u(J|_DiKF6g=Pg z^UYsm3sQGJ-QHz*PNEyho-VLz3 z2FbhHTara8680#f+kE>o5KC=?Kgz>$@ZSY$@u2h{(qtY;CZyhaEK8!bz(ye!o!a;4 zUDi|EBCb1f|ICSWDu%}?H?c~8sU~+vIZnjpdIRR&bh+#*QSb-BC7e5QO-U^#%_ME)ZD3;f|>epOR^lvir{1y<`i8YaWWW(I690in|C)u`)4 z(xq+}%1K{^khvIp@Lo8&_M6zwI-d`q3+L(l{lO3{gsRnumvtQf4ui7VPiKC3#9?u%5UHJYJ*Oty!?#NQ%k0h8<$UeTh=cgDq%6s7a0Yux# zcizQ)1AjB|?3R0vwUY2J`MWqIVH}{Cwn|ri0~0&n`l~@m`&wU>NoVi%!5R{;lLAvo zuIkWLfy^-{1&yN7+vvsbK1N2gfyU^o=MY$CH97ejV2_k!Wq%2_kuQl>d**g(z9&#( zDU75kT_eki)daYtRdV;&uzk7`Kzy3p(OV$kvHy6_?x3rpC-!WU5h0G`b&T`R{ zz156(XdG+izYkoPY)%nmtd}&dVgo9Evsw*~kzP{4WpE&51(6Pu6sAhwp{@rcj0GoG zV^O=X2w>iQpU}e-jFw4n74*4V; zY)Jk}&S7c#=wGLsf08Tah9XBxK0knk1+46^43WZH2L%FbVpW)obhX7atsB*I_CUOP zXiPNH8%k6EBR2q{3%5K5gB!c$MngtxpP!4Xfti&G-L90Xk!e0j&jJWu_}4(auo|yC5Vi zV$)#7y)dbsS}v@=`?pYn508F=!uEUXC?ARfJ~cP_8aGZ{yP#-x^3&LcDS-KULmj(V zU2d2be6?ZJS=p?pH^bo&yK)KU!_(OD8kMfDAE1o?7%Re72GUbhA;eIm+^Mc1|Dr1_ zh4&^j8i7{%u0O+_Aa8?pO7XNWX-50j%?7f!v*Y1qgL^G!WFee$QP+#J1d+PMB)QlS zuC_ErAEWhpTl596s&U6%zOpr~Zi9U1w}5LJ2I0*NIl6JT%w7`;{_#>=-Pp9+QnVF_ zUkqp7aZ+eHrmw5Jo_WLI3?R_@6-LaVY2L>7;Ii7OZ*erPcJp2L^PT8c_E02 zTt8eATeB4d%Uo)@DGR2QzFJ?YKXx!ogQtCm427g`2-k}!rw=bv(wvHo)y1X}0WuUm z{mv(OL)p!kEfR)Cduc^ypdsooyM$(GS~ z?q0=kbUXv^3a8#IpW$Pb3s``>ce_M*z%v99!&?r+_EXfS_2z9;w#h}l^5#(=B!*XS zXx_H4KIc^B&bYe8b80VKpwX1Wt-5kwkG|{cUhD7g0344Wd)o9Q7A4 zxERt-E~wKicyH`blx9h9gvV4cgz=)mO_hR_>&=jSRO1g7(LiNW(^~P+XK7nT7c}d$B9RPf;_?ga0f20oNSX4LsKu?p~8iW}oIcxfI z?~u_?dU0;Sa!taW7&Ok8a;2U*Ba$`D`uA}5}yEvRuscF1*~TX{C)6z$eobY zuknwq5ux8mVz`K{fyU^EY(#%fQTL!FVd~>ncRkrKPtW>_zRIfu=xsinAp7AUaJ{0?D^L~Teek8q!<)d0 z@EVLcTbNzJoRjkyHZER4s`{n^fMZFqa*sqe)F|Nj)S*2gc2R`*uY#6+AtH{N2*Nz< zL()c|=`*<={mokoN@wv^6pBYT^^3wv5ff_oww~@Hi}d@ulH6e5Fi%ib`SFMy_j&kW zU~u^rXAlJbCr=TVc(a{nWEKp^%xMjn!@oUu((TD9$;QtHr|l=tzT`JIkCliPx@7>L zQ-i6~g%qKDx-lQni=382I(1cG3@^fPd=-0qF}1T@6bl;=R?wFk%i1@}fOo8&@6rwZB)%seX`LiR5K;}7h z)`E25T6x6FWJqWy$q!QDADI`U^{|{iSJPm>>yYd5!|&rON; zt_Tb;4^T|P0(;ZDlz3rKjM1x z?tj-sriPLOaN6vXtDlhA)mBhTtdTG7+C3&%T3|9H?V7(b?3|?FL;9dyxf-|A_mLp3 zv|}<|N+Gt0Ade(h)j!+*1}R_GYl?sS{EE5_!QY=2*R=wTfHK?RhBG!0{YT|$h1o(b zIXWMmZ0?GN0UJwC$|oY2amvc=#|o^M)_h~o8O-AvY>B1q_xmUHzFXxF7d!KIaJLXA zvUFc;voAQ~!CT&HKd3}Kq_y_ZXD4I!mOC$6duL)TIt>qPAVj~$-}+j)BgR5>q_d9i zd>iDmVW2r(NCGAvIax61pVWi)O25judhX3lTZ=?g%yum|hC2rslk~Hf-OTnGGkUQc z?+wy+#1p7YIUX&*O&G5L{tT{=&U2VipR9}hhQAQb9xh``Sk@mCT($L(ocDy9wUrQn z2j%~hay_R&OIRh*tMa=;7#2fse==Bn%z@Hxx##%ra1qdZ~Wv0vfZ?p+)t}>PioBhSK0XV*}lYT0)=|OlmY`8|M?%^b+Sh;4!<>6|h zd|G$Wfm5%-DC)$a2x4VHF|H-ZGa&|Ln5kZjNIHtl4z?>Ih#5lwU3H@P4hM>Zi^O;z z$arl68**V43i5ZUP~S z_QL=;09yYuLkNMs*Yla-vly@#4U?fQ*Ky(Gf@?I$?s4Y*I(VXVcaF$C~r zCIOs9932b$v>gs%KNNoIB78Ru(+N6gnu?>2C5=Zwz0=S5FOV$QxCOph%HU6m-N4>C zB65BL`B+sl@r3rh5IW+2jf?*e=xI52WmJnf7M)qY^Vmw5B~~^$VL&gBsL)$sOUpuG z>Oc@?pl?8Nu|rIqU9kJcL5kA!Z5>RNx3cs)e#D-n@EO2o1IU|%??AMB54}`Q;>x&4 zAc`^IjmYG7Qq={sSzKO1kJh};fuG31DB`wldy7eT$*eSeEb-v|F5t~hmHTnzyS@>6 zKEEYh@6;obR>s=^M2XlurN$096#jEXWiQjJl%u>s>4{DeV~OI|wxFx1eSl59(!ScR z$Zu3E1i1#+XOpGxeFwa)ron;`g>mSZ?X#X~Pes;Lu~4aWY7=nJ*ZB+r#XWcMl;ttM zKVw0!#`6;8nPTj1YbQ;NaHAA{meA^O6dG;-e{7IuS~4gr>?(ENz>eGUD9>-jfFlZk zozj=;mw9uzH6!?dX!P4>jB>D0EW_zw90#=Bqep@U)eMm;qcz=6;=}+r;m=kefm>+5 zgp3D8VuJ)9$tBDExU08bEcyRS5?%^R{!G1y4y3(kSk05XC&%pk;bT?Xb+aiHvyAg* zO_1EiP7Cl^1gS`q`GTm-wd>Oc=G|q5Ex(TS(mN=7O$m}~)Pu~7QcP&TNPDp7H%h+& zg?;PPOOH)!a3XZIi=xeM#X@(#LI9sXXXkOtsjh-}7j`kWgLLRrL}tx;J0t0ZZ(o8$ zLJ4?^3bzqjsUF$);#)j*6Tw2DwG_6c$p4t{GPpzj%NlW!0q~*Me~wN4GD%lvG=xg% z24%1LWIevL0(#x^zY36&Jw4Zb4Zbltw+xj~B&pqg6^5B+zYC+RUd{lb?EZ(nv-eO2 z;KL$eYf@-N)nCLIMY9xB@!E{`ct6Y7oklt7SYs*W&oHTZZM{LB0(uGwtT@Rh;7-m5{u6*x6o)YP+rjrKEwLYF=eE;l;FLao z&P4Y^D@GwJr9r~LgD57xQA+Kft^mpqpQ@QPwm(k0CtRC;0BBt8b{hxLD8Qej)p_oC z)3gP(`Kkw9lD-C+^OCz!Ik{8TD#?OCQHH|?ESqd4iQm{BC-sIl)>jEe>%rL#JUbJ^ zpx1b2euv$sq|s%pHf zWRO8REhK_iapX&j;oggb3XIVsxwr&D z$p=p&Q6_pC<@N=GP33Yg)pNMeN+wYyeKvHk>OLKnvgl^^4UymTRbMCj-mR3L=9;@kGMyl+5lO5!qZj2uqO?DzVbt_fGDos>#GDimRkx3^* zZAkSSG@2D1ZYdUxRZvj_-zJyCP>T3tnL5yqz%099sGGb&oJcVA+*XA{k0nkeT>X}T zz@Cx5yNoIC|D6ws-rtXi2;L76xQiJ{?;0qQfi9^0vubGn6x;yRl z>UKb@f*&f}d0mqJT!(5ia@r@Esv6_cj9p-)kFC0~S!!)XgORKsn+{8VXsSq(C6$n_{SA(PLZR0IKN zgV!ox{ZzfudEDNLzy-@)KzAXHdV~lyFsC6U%}NfARB_uI?+P8zEWt^3I}=&mGf@yB zT?R=PLfTcG6cOPpVgwMg<+KBE4va-EA^W|(j>$3?OpO-$6$OXPPtmq5d@>sJXv^9| z7Ijv+c@o7MVdOT7pP_GjkiE*g+78mTA}Am24~H)2bpi9W>iDTy{89KSQkNac(}ZmF z7p$T?wZUaCgJY~df7t#8*7M=sGI$9LYsu0~HewYmfMjOSi!E`V+WQ^JzocB*4S@5% zHo*U>EclT_M`IafjFD{?C7lYa*xG)&rzjp1u`^R>?~Lhd5EoB%-czzG^8ixnOY#p4 z-K##i)=O0#7Q`Lr{QrY2?lHnVPS8l*}|0 zo)_WdaazwlhCM08{YhDm6JQ%$kHL$*);}r_Oc%1&O4*h6!g139e7wXKK=BLtGe46+ z_-T)K)c9*p_W67&PI^}sqn(v`EJxdSP2P<~REzYY5G};iUy_?T{#M};opd;BawRM@>tz@U^YF2yo_QZg zOol8R9uHhkMvEB)A1#kh8UTFO+fQeu+4hgH3%>H}-b9aY73F!8Jv=&r>eFhrO&@5s zzIwYp8QG9y1=lZ`oCD%`{`-t}O)q^G4d63mmx{CNau0Jlv5Vn9T;6jZ zD^{AXKe~2!a$rHEip-3R_O*-4oCZ%XdhYr;Ot4UXP>4ryVVwsl^b(WT4Aij&;3M^> zg^)t7EEZHJ;jzK6NKXnTVn-31_VO#zZ2|7+Y1xf;)i^QN;m0Ju`@|f&%SiKNy_^4P zXRXsfRS%CooBekmtk$%kt%oDN4?f9%-5C`lW4xWEa!h?bND2v6L7B*>Ar1fBx0Jky z|6Gl-R3e*bpJtk-;)`VE{|*SR^o)ef%_IltiXO8fNv{p=BbABUQ7}@du@rwR8XdM? zhep_B`h3ZG|Mz31HvK*z0_>KE3KT@iq#P46BAFsulheT=>Sa^h@B6#p>tB(@$QwE> z%VXt$>emBau>KY2k6%o&C-=0}X~0wrAVP&&=B)Hhp=E{$SmVM;qQC!wkZ>u83I?~lb6%f? zOkwyiP-`~HUKN1)QA4N>G&sLM3($qv#$1S1W6T;fXKu!>OF>8y=8~?Yj0$D_rubFf zv$c>jjk=sMv6c_+L@Ppu&X$$xNluyIOEZz-Nl&+mje!CWz#(2um8SZ zPjuOHP>^jYVpw~P@G{Xk{L7Y#RC;$fN2Ye9<&w8zEA5zi!~0wth9gzwWI*XTnOh+| zfAqiJ@2E-Kzca|`av^~ABOBG#1>RDK&FZiq?!Wx&vG#Xt3tnvRI7K{cpmS`j)& zZ0`7YkX$4}fX}zYkF-Cxoo-j86Sc)~B%b%gv{MRyTLcY2I8%i5hkNfQ&fkBr;*r!6 zTgG}&j0lOY;r%;vFhSt{KvlW$Gt_(l@S(80S(^J82bCKvNOTl1;c=sGgDZHl@~F>o0-U_$v+IBj!%b$N=s);RJ*v z8}e$GCu(cP()M7{PTNp^EAO~5-qmpvK;$=xNQE^4MBHztK|!09d0x&prjToaF5J3M z_kF(7x$?glM*6~kJy$8Pk9WJ}2tZ8Q>+o_XeIBf6$QkZTdpQIVkCdY8UY|tp^&xE2 z1MskRqFk6tosA)iIc~dlssLRJ<}1aMF|~Nus(pw~`?5sOub%HGX$cD2{m+s{q~lZ9 zJ%+1|)=&?f+(eIZcA1MF+@sjJw9#f@8$V-PfVV520J_pHu^B*=Ny<{|h)0Zt5Uqam`>|s|XRS|!+U=JMn_bU8=v>hsSwHe|sz)IJ_)wYF zd7HfdDhzWvrso(s@WvD~q}aWXlFkcvTNGfA0VdN+49_5K zO?6~JV!OBq&WgR@7W$-~wH#0*__lJkvcCa*Oe!wx`W|Tqg4T{Gs#Gd(TBalImw+Ua zYpobL`ySV>p~P^zSHnKQQ);Ze>Odfim}Hnu67r1_cp&Z60y}@A{{Maw0N($~P*rwG zM+9NTgv$W~OH`vSI*Ub!9}7;mn@_4Dk!1avkCE#$RWowUcwSuEP4#)e{5ZtokU;VQ z|0TfKS}&Wo!U_<=1XYyf8iR8?s%Ac`Jf(nz%5h8iL=q1_HEYJR-*JA_|BBwzSrDW= zUZzW`cy_QTe=Y0O^at+B#f<=6K}tv&1Ar50m|9#q?|IbRyd@U#DI*v&J`TT;X(}jT zVz+8c=UGESOJ!p9sLoD`Nhx1~q#7gA^htmO(ND;UZZwqxts<)j!0AM+Yar{Wl#AQ5 z0RQ-_i+v{@dD;uX^Kcf5NjQ3IP*|Xk6phJaeT_Sct=a3+7HGsz;-e zrt$q;1UO-PHE+xb4~3&DFQ(RpBOjUU0DooMsInasARPZQ5Xxc`@Q>q?u4}2Tk2T93p;OQBs3ZftxrBe@8Lm08lK9!L!hK}%|}A)*ne^1 z1>KNrFA~-*xkM)_N5BD4+xh74#0!BQkB-ji;0L`Fus44$!j&e5%|bHf2=yOf3{N;yj`9Q;Vs z6EnD)%Tl;oN_h@OskoKBX(_gfa{ygGmIuU8e#T?6yryOxa26J3&4qqv9c!-)|&_7F>#rZ z54nc`PFf3jR+%8Ur)@IPZtrb;c4G^M5D6ur>JRbaS|MRa;d%_Mx*d6qk`)oSm5Tn( zE9f=Lly44K?(|ZzB0u?vYWR|l*P?&s^qjj4ANcP#X|H-x+W1Qh^MC_(A8A- z40RT+>l=}Cvz+uluy)tOIE6SeY7->OhM==H3BK#A3-0vy(^sWZ7BPtjvI@(A)crOx+b10LH19M)HK?KwWfX^I3L(FuRi@^9N+q9LyNTJ8pmxk2BC&e*W zR}EbaLfId)AQ{$}MAYBibnHpFrYPkJ?P_ED4@fE&YB>o64UFHp5EsJZcER$oZ?`gC zG_+E!fgl2WNOhV9VrI)9%mMCDM2Z_ z@NYOHXVB73Qc!T*F9Bt!D*|*qNLN-b$-1sa89cYE5%F1aHge_Rm_nNVNCy$%slGw? zeHF`0-CloL=b6ivF^fC$W$+h3hiwL_8m&rRLB?KZCRw^$Q)y zREv>DwJ>x%!0$?nJnexwxT960zs&wJ((VM3%|*{Rb;~Tx9ZlzGL^*?xbn)f{=pr>< zAgvhna~q_?j6f9BXSn5;O3j9d-}U5qUS;WeNG&R02giiVHHaG?gb{FnTuIS@$9fNA56>7<1~z z(bKvns_M`8IYrk}k>YoQHK_*pGxT4x8a2t4)^1ikP|&7a1x|2R0oQfK3cmhH0GWFk z^8CCkkV(r7EJ%fK0ayn?!SsXKl@`~w+#J2anX;)+QwabEKH+E3VOVFaj)*reLg-xm zH2+qDT9G0ck{4Xm=s0y7f*IxuvTzC_m<$9`7^-a{-+)!1_-<_kBiN0lg%K7=676{ z*0eZYA2ho%HErz!Q^qg5c;i^+i-EWdSAgFJb z09U}sCs+6=+ByLE-1)W4P1vUVT+vD+9-H&0ETMK{DRC(vn6&^EO2==_XO)5G#{8qG zj|}4xX)W{C*lEJR;z57)uw}pO>mhpWK?Cr)Hl5!6>8LTd)p10IvOu~VCc8klB+_-l znqdL~!@y}#+RY`kr3GnFoUbZHIu%xcDxahpde8{H3$(Z(TK+c70Klic19NHGWd0(+ zV|XJP*eyCTe~y<3D+>=CPMAFS^1o{pHI@aTjIkC{g;)X{IHvH zAZ4%H0{Cbc_Zd z3{x(KBEF|_Kzht|`>(Q)A7Os)UzlAswU-n+vK>;F4cb@KXjH)G^I4#MY#|7E@^o3v z=wAk+KoFg9XYn-Vi5u67ND(Xja)H$JlDnHT}K~SqYZf`udpg-y*0m8lOmJ0MaHi7S#@Rnr>7foyYD* zHTGqGF78UvUxUEP@9%;@*9?uj#j7!@mxrQ*gX&w^*=Eb%8y?Oxf_UT`&Sf+mdnO4E z<3Jelu=Le8@WVUD7&`b`yd8bh4}@HT@M7>?t?QIX0rSI&lozZ zuu(}!{i~etiBDK#jjm2v*}vina*nW0XckQKNZ1ylLHeo}*j=t5_&&bt68;6`s}J@p z)xQy*>ce#m|3=Lv)wCpb1tc-m+(&+X^*1_7(vn^Lff$K)96`~fxD|)SDhj#sSp&cS zvuGi%n?wxor`uCaJJC;F7=vz8Mo`s?bdb6d{)^|B)j=3xnp2P3O1eg?0D%sFn#3TK zQXdj=252!)cfN599(H;QTHPGS&=9Z=|9e-0tz?fSab?blFPO@vq;=mQ%NWo$DQy(e9G@IbCJeM2mi><8tHYhml z`KjN zfq3Z~9B?B07o{qRRAw9dS<)!FlOE4O_UPtxZ>&8Sve}PhyA#a4!GK_Vi$qM#Z#<{6%~yAFg^4(Gypz2oMGWb=`D*3eg6ex3Q~fL6w+ZvudkEKo(I+Jh@h@KzWc?3rA*N2;cW$a44SuUcMsY}g z?XNlM-))+U_y6^%AD(*UZ(>xT${07pF!2>g)mp>-OLtejc85>ldA^k5!@X#37&+oFGtg-*m_x9L1k9dzkGjx*Cq(5FE@HsCL{bSjxqzl0b2m4j@Z# zjV?}IkL(&I&+Y8#nGun1Z+TUHUL%bg71vp^1C;{-M{ANz8=1=WF* z5yoU!2tf&h%e$CRS5K6c@G;`Y~IpY+c;Eh&z zdb12(O)Wn$`ztZ!cm5&J$=)R5lWgK=OmmOaeM9t*xb4y9gn$(Z*tZ4a_+4YIXIosA4%fQKa4cLll8SDqF&b$Xi!r%Fst{Y|R z#?Er)7Bu< zAYBe`tFB{?cmc*#FaUs>aI=COo zf;suy)6ODovey(GW8r4MJu$N;EgF)2=WE3fEL%B6N{e8}v(;RC3(waKwOY_>RH?pe z8Ds_-KOP%?9(U}T_P#NCxwY)_R@JXdBB3#vyr_(MONjE8O`u%^)-&xwCGwz~jn8QR zPtELY_1|_{S$ggUPdc2M4quOx?tK4zJn7B0U5SB9&E*L$U#*iEE*U@uRO_HvZ)&y;Q zIxwI0D%5pnu@81JK>Z(*DkA~@M9kx-O&eSGf=uDhfs$>s*3-hd5tWmlU1G(qyYdeQ zAr&4$o8Z{w5{A7w^|(2vt|ov?rd^{s_zv&PAA1rfS^#jEG0?O%aKwXb)%u{FyMWs7 z7Ts?h8SllDX(Z6uw{my*rnlSvk{E==I(~v4)*X|l$G(Dz`Z9Z5-E4^KL|qt30Cde> zh3HE183w(_|FqXFar*LiLzcLHPUMX=hmK+O1Vbdp=qlw8zfTFt@JG4RFr%CK?*hKC z7ck3=qu1mt491}Eeq*^G`Vd6id^)CW2(*m3u6*J`Pd+$wm7HUir80g`8Cm=zU32q{ zBqyv4ee6}C0pmWA#2j`w8zN%#D_^T3a3J63ID`_o>cip60}%vTED zTfFlMIX`R1g<#R%AJl?$0z3RxGD*_qP0&n_tRq&~} zZT{H6-3nZV^@p%rsdmx1j+O|dg*LGcR`!p|gS6{9slwp*@B8|niC<90K8#H392=Gh zE<+D5K~6pAIls4Mtx01vJ8~)tFv@$MQC!xf0tb0C&lw&{7ir(6Eol%2#&#-{_j5b| z{%FnJO^iXE6Cn4Ak>V~mi&T5WEb#CuyAFbH$cDAD{0!MDsu_tM;@m}3tA&0m$*?Oi zs*|<12p5TMd(JuBzMDHN`1s%%A-xCztHYBs)QUh=_<*C7o zk<7H#!Rr2MUNLgL{Srdu4cFk59BB#fTy9n?4pfOY?B`+SMWl#$W2kMG0gKKyZ2pMXb03@ICNX&FNSd)c`I8`OWb zrOc)Mq9VVZU=mlY?kvyHjHZr{JSfPjK@CN^&kJVw#k%fN_4xfecxwdt1B7sWNvtrl zaqJY$cbp#rxs2amJmn-k1R~J1Q`OWOAaJCxl z@9%S448+WZ(=v+5;amdnbeY>UyZ=BfPhe@8v(pRDcrR#TW}(Zpaj<5%DYK;f(>r-z z#i=RooLE*W^^B|fY+4e{Qd}w@?Bl8jrCoQy9{ddb;Gx!Y*{<)CItPg-`{w5Zg8msmYSLtm= z2_g#wB}(Q*%s5oDoXu|3k-cWRpOaXYcsBz019e7EcaoZ`C=P5ZFh*km0W-ewWSc9k z1g4{w&HwUBHm}qtlxqEyI7Nh$-GyxBrUnVrct9aiG$@K6-Zm5CbOON1l+`y#9k?3& z5Q4KSa%ok35sa~Q?l^hXtbe!@DjgEzXS-mnv1h^^WX9&WZb0ie)me)PhGnxH8uwY@b+owVx+oyl z@Av#X2kEL78rg_!`S*)x)heaZqw@(ZQT_qQ8fhzfitcK zUv2%-Q zXL0@h@GWo*J<|1(P*evSXcx)aywTVAFL&#vO!%Q4psSYN$HXReW}H?({gC4!JsJvP zSWuz8C4x-Ng4|#gznd0Ze1=lub1pKm&_yg=+ucs$(U5etHv-Xzn1oMLedc@r%Qr0Q zfkvN($G0`zwx zB{A!u_*x+-=)aUpmVy%3-~M9Y9L=@%0z9Xs6~KqZli?~oBc8T>TyWRe8BAgdOYc3- zwYCGT(p)ckJZm}o@W(|KaU|LTRgO^mT~Onn=IOK?7n)I&Y+5)&0DoFGc%h0S?_=du zA5SzIhX>yE5?j?2cnxBN;diM#+wru58Q#b2qi1M}9^E>Y4t8w%tO^krT^@@Oub<>J zC^pwn?WH+j{oY)hqfO;xO=lm(3{b)TGDpJz@vmtq1NfN1 zD6?}ChJOmj!4y_=8=&Ja8@LNj+x*#`Xxsb9)&56nP*a@wELl^daL=cid8Epiw#~rD zwPilh$M>dB`73%F;7>flV(ubOR%!&Oag%yx>Nsq^ZBWd-P6+AbEe8A*f6t`qk{(?L z^$ri+Jvd@rwIvUsX(Ez#51D5HZpI*pUf&-8pNY@vy%IGHvzyprYC)@WB2ErlS;ZyI z;8M|l`V-H?Ja!zx&{m54325YKs@{*IDCNf@*4C{5$J{x!SJpIPIJPmdH51#mZB957 z+qP}nwvCBx+qUz)2Rr}aJL}&k~i(IT5R!5yH&~aIxBcZa>y?nR@na{rG9NpQmMa`BZ~F z->>wY2T(L*%mEv}_jTT*efpgKIH&Wpmtb3H3v1x}W58PAQym?S)PddX+;@7SW#D_`5DQXz)rd{6T=^kg-BMym_AU zz_9Y;>e^FhQx(BUR#%60OO+2%fhmO`taU?cMU?g76Vnt`>6dCEvR4dO%vf#Jcc0y^ z>+24h|6is~+62)wymHTM*6o;!I=n5Sm>nf^(0 z*A6JLS`6KcJ1<<=fOw&Ou*Od*WNn(ddMVBQ5XkPrSm(}xaq$_BSU)7GUA-ZmkFE-T z&Zy%A&K!0$3^O}$rYdGK^udGyt3c1{ON3JZ#0xJ>k(A&y6+`siAfz+{H%EYnJFbG`2d#$-OMFWd8!VkqA9Gij*uhDi9s z_AN_j4IE%sHf!9$eMC)0g^Q_v*(SL)eNZ6e+>SAGq1m&XndB+k>S+b9|e?0Pp; z?7hHZbpkS3N^_6(m4+*`S7X)VcYUU%ky|Kk`ovbjAZhp+4ZELKvzudue6WNnIDa*L zfQ4JEVofAZ2ztFE4Tm%3#~bI^>-7+;R8YMhlg>qQfnVPUKo9+4*kQYpnH~LMCkBn^ zKs`I{)bH17@Va~+ynctERIkmJtGkx%s4#S4RsGAvxJv_v&eUS z9lOX}h{A23Xe5fszH7dX`!9YCn>0#zNRmrwI98Pz)--cI54nmY_>P^7${U2F2jU>( zrJ!AMHVhTBYLT6t;qQ5I=$iD&6o#@o^`Q3AM0n@_Cev%0Hp0G6uYwnayy5IPJTMb0 z3@q?&)9tt;LKV1G9xllH1zO(P96;}LObqy*ssVAQ<&?ID7ndFsYF%{+L)8=tuSAP= zaVG`75@sefFnna)h&9Cwg*TakoroG?O4L!gJ21w$>HCDk(CVqB^^)lF|K5kddp`?x z{crgXyVMlgZB_KkV40X)5$T`~8 zo{EsKti|G!!Qz7kPJU1#>q}{fy`oEa*!#c$EFnf9=MNvY$1)v_+;5J6^gr86t@U0+ z7oR(jaVOYxe?VI2*l9e=4I$bHH5oc0^l%gp@e$1*;|Isb-~d7M(>4HbzPvGIH&E!} z!m`WglG4|8d4`pTpE3KI4QT#rdhCAZ>0{>s*N^k*bnTmF=xVTiW|oA#Q$?<`G{bL> zL<5It{LV+=ja-__BK9Jl54jD~I@D)N<~6|(fCko;LCFg#h{DV4VkV%82jFu{iJ zUv%2ZV9Rr0Upm8X<;|ctB$yH80K`kgM0#w~EB?0X?k|qzY9#*AsfhOISP9ww&-~h` z0Z3~M4ymh(Kb&H$P!WW^r?R^24{_%>EPqDX9y~CfUrLxY0UUFp{8l$hZ%=Q_IfQc9 z%7O{erujR_zy*G`0*icToKKBpdrYoJInQ9d{r1p1{no}5LC9gBO{VSYTKZIpNa*kLsX>;A`udq@FjQR{y-DjmlTgPl-}6_GH#EGWO_zp#x0FFu z_?Oq3SG@nzAWYur#L_pe5*6NbT6uoi?0Sn7-%AIYJ#5*_ZkA5ITs$r5MlWavHCC1U z&L?mz_mJY)=r4}3_an~$F`A`2-LWyY{g8*kL(B;_<+!hs*r0@XDgQCQb_aH>w2l!Y zTA-|?x0975?Um={U0nA4Uf@RexCs`$5o9c%HQow5At<6vA*31zyP%7XmQn~N%dcx< zPM$({_@uIa8D!=2Uwa)8qtKlcZ(W~@C2Jjc!?rviUR)NL^S_a$tuSbgJdB|GkaVUU zl^F|R;a&z7Hx2)}i~{Gpx1j>!Rg(*% zN{1S0k_+N?mF*Ul8FnY$Zc!6!qTnax1`6@o$){Om$VS%}fvqpTquS{u@xEr8n?!N&KjnBZXB72Jk5}pTXnSZrpFKMcuPKfr9fPc8^#QY|p_ceVK z8owSO(JNsI`K!eR#kE|A0E={e4cr0!5bTA5}?$R@_TkQPq7wSU~sa(`B>% z`nyjq#Kca+$cCi7ltiDvsfYkO-$O4o?oEH;v1%#+QKGYYB8IQ64$u>7gkaNHnpHjA z{aCjqHwQi+4v|3COT%USQm?>9_P0^dVzx@@;KW0$3Mc^2M|i zd~Hhzz)3LLDY5&UN;xn?Oud?bPpvq_bdGlXq|-EON$~#sf;yMDC1sB~|MgBwYlA<9 zj@)7BQjyX+Cx{X&S|XLPpIHy!V2dJp&kOXDD4aTCH=YqM-pWFyq*vhm3G?iV&g?;q zM>xuj)^)sUxrU1i%|AB&Bm=V!&I(9B+WFBVx>aC~`<+(@Yt9U!v$#GMEZD<|`XoaM zQ|7q&2u9eK`S8Rh{QFhS4a8cGOB+S(pA&UxC49CvB!uOx4&*hle-VMw~Ix{cl zf*x{GZDPTY-pQXW0FL8N?7bs#S3Dw=Ta`>@UHUJM?z2HU%j(bBPsCxA_EU;kM)zr4 z!D^45u2K+V;^(L9EVK#@C5VF#Tsc`6-K_690|Kh4$BA1~hYP$uveFema^@Hc=I8Jg zU{3pd)J77!mXY`$@T9lKPdynn!b?B4S0kFXqqykG)(cn7VN-CGs^S2Cev~l~*$cN+ zA4n75B+H8!6qdpailty6k~6kaZT_mjIl#mWxP;Jll^v8Tddl@ZO%=Fs=V}8Hl+?EW zY`#1?{+{cYP@|rq@SBl4F?WL@f{17?&eO&UEB}dV7CU-*jr(LGZ@7B$zO~5*Japw^ z>2Wi%&6;JBBQP_SN80~wD+)~Q`+d2u|4~`5bhzQtwY?$Gc+AlN_J-5aWuttMo2X`rn3eB3 zG(gz?Zx-l;QJ%#n5Fb)st{=T<7;};+Fm{4k#-rwj|BbbaWDn6_Z|>0mIKzjbA`WdHR%O>n4unl8nZTBJ>^~c|1+7 zA4m#fyFk#tnv`hBOZp(r(w9|?niRgJaQ9JbZ6L2x1-}d6tXeSk_)<k2n&eU zEi!#u{0CTAke5YutJ*&_rmk~WNTgs1x#B>5V&1!pzf3vv{`QqDj2~8$S>&2$O5M?f z^?KaHK&@0Zb+Di;n*a_Z_^#xKLuO%ws98x{<+P}+&^5eyKUfb_5bDs%=v@v^P~&AU z6VYcrHdui8`U3*yJtT$%0$)eWmdD1~i8HGUfTM?Oz-gc=Dx&ck=bN(~D3R^9O?fW6 zSPner7i%ebvTgoPsF8|-UeO&@OKbA6wAt>bJMTG9j7X^IFJTPi!%AF$f09$jdCx~O zeYAhSL^O_S0WHenlp(mfycdOd|Ftaiu}lHYC^v>SYR1G}{luMl)4z2__D8Kl+=7|T zL;lp-;gA4u+*BY3CR=B~L5UB)!0(F@$+=&>XvqG6?EXzC+z8VamRQA`U} zzDpwFoc>KQv8;}qaa#^?e4n?1EeznKG{2Zhp`<^cQ%@MC86bBe#Q3iO*Sf)^JJ^vA zn9^ys_?UomHUwQLDH~;ecK!4|-yUUUq?xU^MH4syJAfJa?t>=DMYKvsO?Q197=#E6 z?qnG%RM@H0_bt-S`rIQ?4_&oFxBc%Digv6*`pGfD)AHXh2{r)=h&miK&g-+Cyk#ja z06lmzb+nJ3P5;e2b1S0AZ6LFYT%vNSE|bfIRwOS9@TF22byc9n{sU2U5=HJQEPBpI zr|d5AAuZY9Gxy^StDxiieJ8y@C@bCv4^v1#YKoG$xc;y=_y#}m!%HYrnuV{nv@l@x zRj*0={?hf8v23S<4?kcXNw}T>V`vJX2fv{A_Ifx6oJGAsj9WFI?+_Fo?tQf&M;@&p_Mh(D8%RF?`QzWw@!(3 z+_~6vCZp+<-Q@(Qx>32$EfL(LCR&yr*w~ji?{*;Tlv!|+2DdMWvm~x_XKb9w9-lYS zae}AawTH~g7_bl5Rr-+xC~uS7RDHV9$q-e3*k60)+i{cSYt#fTL=Yh6&+iwD(+WjS zxlp^~rQA&@AuVmGMz5eR(5EICjp>ejKc9K^J*6hNb{D!PtscdnL4){3@+fYqrgp2m zvS`!WwC3hn~%C`p~&||3d9Aokg;CY2vRJb?Tnj(7vc0H!G5`OqkfLHm7 zCQCU^bSmBbv=LL1(1vlitxFe}sU317iRPhl9iBs>-eef*k`dWq2E)vr{{5E6F7m(^ zf&HET89QtLh5W}bXxvG?G7AIojae09;L|I$@z zzv*7p@`ytYBP1pbn8U8Wiaj4M$`|1}bgSuE3@8485MWobKR8ae^;*LvSXhhw4VaOa z!iidooFQE6MF_u`gkn`v4m^K<3Y<|naN8~^iwM-X?ca4i(07LYiBtKY>sq1j@23}Z z$kf5e9sg|f+4?COlQ_~Q@E|9CH$`q&PYF-H<9s{`Wo2m1QDt_P; z*CUz;B4sAn0-=_S#wnXCkm_>UZMI)a$!3uLZfcfgpJEy0UB6@avcBh84dqa5fT)1j zMnsa&v`G@I3Dd9O`#)dW!ELz7PiAwSj!s}emMv6FC%`Th1ofoq^JA0#m$s8?ctwRck4CjDF!V=YalxEr zyFvA4$rEmA@gg8;-m5g5tlh{43O^Ns^onC~Fp>T4d6t82U7>hqvZi~Sr17Sy0iVf7f zJUX64;13>#fKH-1knjJCS_1A2A3Yvdce2Q))vOlvYx8xY7hd*3N*y5}nF`L~#KNjZ zoOkVd!$5xx+C=;!l|N&h9yxlRg7{*Bd-!~ zYcQJy{37<8ypwQ?_97Y*V&#t(5DQll@K*qbr7K-ag~b??nt66_o5J((>e4a!?xqi7 zW7g&9WHkjZC@O--bXRhk`|IDbGfu{U*Wa715r+XLY#@1i)nrhq1pp_**yhu-2AFfE z5%`?Ut^N2~i__M#=0z|D4W4ZZoggDTR68_K$Y+*7H33*jC&sSvIKprH<a6nj|Db^!-*1tBa$cUT2z1O22=s{pUC7KuMhHjjHHw+AKP+0jX^I@-QDtnqn@Fex zn*3+;rm-Nks7`RoTt7`7#U9oGdd%fmb9WLGejkt>We(*g^@d_$gR}QZNAI6~JPz=zs z7+|xgWpJUSLq`IixR57i!x;2TZ^Nmvt3aPx-Wbd~6s$BFgmwUexQSWv?;6F)n}M$3 z*PmI&`@Z~Ey4q6l4FHGg2xdGLsrENRp!%G>>y!d-G%&$sUX_8`e;Klze^ryI;9H85 z9(txgd^nUMWIu0-HxmqL?`R?rooi`$p(Zgy06&Omx*VzQ=TKLwrSv8~-*;krZg!a8 zub20p+1-|2yk$!=FblJFJjf|DF1|9gF56<0W?K}7r_xs=S6*s#z=TiVd2C?jlASM$ zFA>VDV&x0jboa0Zu6ReZ^yDEg8N&oJUDU3z^@WDw@E$uuzkcJLcb83SB!+#vo7yCy zHeTlFk0w}vo_ylRhEj#Or$ppx#eaH4KK9%Q2v~aFNw7&i*?LdlvdC6vQGp2kPE8fX zs_5_(*)~sKMi@J+aY&~Q`)tm`K2rey=t?hunx72J6~KxZ6Wq}z^`I=sG*vlDI+F8{ zl#$3t^Zil*^5~fo!~mrv9sK>(&PXk%J+eT-*8*I2Y2f0Y|Ida0lmaG6EG zo66Z@JqSa)M0>9L10Q}|%davKCHFS0$h~ZCl;G;$b3Y6HuYwdA~k+UBg`8f?S zRz3G~TqEV~Ks_II>0X7Tw^eVZ-NG*Zp}%LwSKkKUpX49zj{>9Drc1{hA3*6^YY-^t}sGZrIGH~+NAth z>{2QM$~)wTDF!iAF!Z$`zT3)j%7XQ*VZ;=Jp4UGKf_?LJfj#9IWe zu5Y_OLO>A6y(bh;$moKXSOzR*RE>a(yF^|mvXL^+{)leC&H2?icaq?HAn3a|V7J%n ztzL|8?M2-SQz!h8Ebu242iVo0sdMlJfpInrJ0(Vy5RGzii924BpktVfJ3^XTa_LOBSjAriG3fm?W-n% zh{c~tXz`HvK~_3p+KtCJgr3LXrYQhUXmULNGVq7s2N?&7V^MI@*MfB=Gs6xb?w>IM zsI82XzrnQr&Pq-sG87@jlIYg6BK(1Nm=DfCF-+yNTz|C@Nv#=W07^JqjRN_5=N644 zB|r3OGO$YZp*;-H)3eQEBTK)rFfHu2rSQ0YG-(P>%AL?XXs|>2v*ZFO_eEfpr>VZV zxl~0Ne+B+U+;AVyof6+K;zW2;_RqD_;rG22KNtg1tfPTCR0hPU_Ni~AU`ngsX4Aiv z2@nGW4v$XsRFrp&GsbAZ?)ufF$M%+|l6QOFVdB)Jr{h;HDn}+7GQRbI)3S5#TA5E_ z`?d7f-D8wwtu+?9DTwbYGxRAESd?Vq$sj7x0b#I93W%hYsy2^GU%zcys%L3ZcML0=0xYh3xcx0^x);@=xN0=l-kkzoa!g1%8!a zu#T5|*d`$C^CG2RC;!*f@8SsXvxVWo4i-TN5Jwk`Yi`{RZ>oHJlj;PIAb2Kbi&rf{ z;tI>GzrJ7)q0fAIsxu4(-o7(h@Cy(ZheBvl+BJ?N&mMpSkoAAFtVQJ%nY{PG3YyOQ z?yvdFVYLX$_NKFK^lG~_VrTPP=hKkzuUb~{1$49`uXi{2JFw%Aq@fv#SSJ}g3>g3s zkpt`+SP@Ue#{x17G_YOiFhTUuNZCNo2an*S_@P$uFkx98Lum$t;jn$GGSdA1_O~9n zdAEEZT3<}}??$WUuy8b^4uC^nsSr>J+2dB+1P-l{SyQ>TvdqimSD#c?CW#hPp6l+k z(^24gGtWc+W3o!#hF zPKGGZKRkc4}w8o*ywK zvuMgK!ha=?g#9sx3hRTxY3HQZ^A}{tSv)d{K|!_;qCUV2(+WI~iCl#dYZ!&0S%sHP zI-Pz_cUDM^4DAJ=2T#8Y8+c8es`Q!)kLS}y2E|xeC^pO3_iCc}uUwGmbDT<8N)4-v zoUtKSg3kj@)cmcM`ZGGyN|#A(wD~ib<#*o3+WD~1a{UBuN$IqwROIXRh9VcY}EY*UG!%Km+_tcW%emd54Xk zhjJ6Cd7B8EYt=eXmBMR1bh~kPju5KF57+SD?Gz^sb-#98$ggmjHtik?-YwX+T!jTH z%|uhVo5XM!A-T32MR9(iNjyJSROO}<8*;l3F!zcW24|?Ev$)ey!7wb8 zuOWy^Nwz7?wD}+J5I&2NI)_DdD6`{LcSN*^>)88$ob0~qa%FrUO@R93+N-*!nLm4D?5oC_bC`<27|LTx176o9sSFTB6&LilnFT8#G zXR{EhI&UNAaO8bi=0_2x&npENdQ!4q;4o0&hJH{;v)Zp(AyI2+I^k4#h@){mHw`Zp z_hbVHlOOkjZKzSze*v6d?=?G><_DqGm!BcR(j_?~=8#?3ca@yk?$e9MyJeAkjF{ph zAR}2YMs1h#*AyS@-KowieVA5Fb_>jW^mB~g^;*syvY9j7BT9hK(>)AW?9k(FDez`K9#RiVlJF`M3tVUIfl?8HLFB>5}ABHyo_F>;?(SkcQ z8LRaA!Kg$L=5CgsM4bc|qN$OCrj^>Fy7;zp6;lzllvQOAdT`tF)K7L9p2d;!)iR&? ziLeu(99kg&hgdhJ?2-(;osU%{$Ldezq9td0CBja5P6eNiMkAP^DO3Qmbf?59uO{!0J{BZH}#Ay|8gI}gU=>8pPm3JACFv<-cyMgE4b zIRZ=mC6%PYTGsXmpHBsmK|5%BgcZ8rCQrbw;i0@PV&PTlyKmxe#8}yE8bO}JdeZ1V zOIs2Ijwm=P$MtNtCp~Ue$@NLd7Z7>`wY;scH_yI>j?QCV%Q*XvecxL5`o|$hjeyVh z-1xH4w?Dkf>5x1gxL*ah)UR-!Jq&wm5O{q1WW0uw>2ae=jaml#hzjV@XBt=tw8W}+ zow4X)5y+k@8*)5@H22?m5*4W>jTIe5w1kiq87GC5h3S3$aBl59M3p3<0S8|00!(hZ zC}Qc;S@q{^&ot#aoS(fwSK-`iS`I064vWc-k>CBoFW3V5U=kWcKigSw1ebx{O6Vy9 z!6tGqKx+SG5d~JX&OSp zj6Uo}KWwaduaj!VaXpm0#=Gs9I*l>c8j#LTgi#1<0ns>?v)zkL=>2$t9i@6hU$3Y> zp?aIPfV{@HU6)wCq&?Jwk^|S&_PcbwrUPyWs+1m!vbpBWu3p)upSsjeoFg#$Mao7e`M{SI+_T;K%0nN%1FT`jfL0r9p8P#PjUNBD>rDI1!l7`M8If(Gk7v+@t!RZdbIsrIKFu9tGa|l2B3WYpd zo4va}$Hkj5$vLtR%GXI){}6sbi%)NnP{VLkoW*RH^JrCw|PK#hms41Wa@sc^P4rzSwIHQw=c5WrU|`=r>d>s#BhVop~WMZP6Z>!wf(Do zuXNfH;6ty(b*3iOqM-infB(niL~BT|DWl9>XFR35oYFl6+B`^AZ4FnWc6_YfU(Ppf z(@{t~)t@*30{90vU3Ol>3CZX#AxYz+es}9CJ6?tPM>w-vI2~+It=?IqDVpX~*DRK1 z^r`ua_YmpH2dPF08$Wgx1IXuO8p6gkK+oavBwHQ?pH`8Nq}PzdI5%!sJ@`O|-SLgR z5CI1XYT>ngalNQt1zzn!Ja9?_X$p+;+JM_1-lpDSBt35>hl~UOXRiE2`d{4`5eGM< zh`1ZNHv;=Eyq$MQM{<6WHEyNWudmg|=>7Ei^t}|YeMpjO6dFIE~jWJpbpGbPr-q}zY{JM>*gbPWnoT^zxchwXu6)<9e@j4Q26N4=bf?VL!; zx4n+$t}%C|HWOQojugme@;iQ?pLal9^$k+>&r|!!_jS{ zsSTDsw@Z0@pR^7z8gsfvntHk#s+dfTJ=wZ}M^`?B?mPi{Mj<~;($83tVj8^7d#MX8 zZ2Gq6c#~-rkZD%zyM0naPIZ9__NN-yQH9PT#^YX^NIa@7Bk3xp$KomXFuJF3`@T65 z@^m+gEAG{Hni{sYG{^Qo6_TBPO_OzG-$jIP_Fmnyp`>{?ACl>Wugo9Wb7g-*j-68# zee6uhD;a=t`L3Y>IGjwqOqH5{sD@yl_H=X%wYK6p#tDAltdEWCKyq6rar&H3WHU*%QoQPK$DT@-@=ihPD?U@8jL+-CK>tDy z(B022S@T6=oGGIBFTmAIT5`B7H_omEw$|Gd5nwkCdD|wBgxw$Te9vLX^pP_fX}NlP z6>FBSd_a{S$WhMOiR3QFyMh{WRrL59MzCcL%`g2?T2NqDUOFn_0qcr_nRwT(!OYf_ zs-d*)2GGM=L-+aLnLZDOpdNENt*q|~YKKb0s<<(HleZ1!ziJ}dfZxX{6~&fM&e=(I z&;@JPv{Wzdi|`UwZiM8U9{#UFfZx}jewUUei@;O3Lpezz(ZN!7qPud_cPykhXnTkW z3n>dNHUR`nYzFN7I02Ya{mX${0Z6@!tbF)(w$V^ZoeCsTnazmh# zz1d)b^AEkZ#EQdNvrta#`iARwtz#Sc8w)PY6@Xp$uN18_(KP|YZ12kCRaPWD7XJ6$ ztsdBs@-6V7NZ)P9E`=3XE%6JxvlWOwjaKaqGTP8_*Y897GGuE(@b zFg9l4zj79G)@a>~{v-kA_>1?Kg?Iz>^e$ZF=7550>0^5>Q8qN?$B0$bM5}bX<$R`4 z>*GLM3F|Jq);esQpt+AFg`N!ohjhBaw80Sb`IQ?R?uJ0uQv>3aE1x-2MoCfD3^7{^ zJ7dc-ui4y>^s#~(P;IFU+qkuCZco6;>E1jG$H4d3M9q%Lx28Wke2zlDwfWry!qnruJJ>9&=t z$5E{xc~(*jZj59Qi9h}-K+pf%vW*tMB}F3_Qvs861Rq$n3i?K??vF&`I(z7S?%#Lg zQ{s?`mBgV%a|9{Rkq(QZ$9rurX3sWDAJpW+Wgpjj|2<#yS|R+^@(c9lx^tIb;mxhe zR)EYOft?;X5k7aK1(@X+ID%6S-&qaG)W)a63`;SZh~1#DtOlC(rZ^`7!j&xI`#aHN z0LYl5w(zcY)7&-Dg^3;K3|xlc+f$L|_9q*Q*V5mp2%C+>QzAUH$hP<+VZDE)(t9zM z!L{La5eUuuM67Nt00$G^Pw%LYG$V2luf5)Ov@laY{V@v%1;Q+~$?i2H*(*`{2h}RQ z!c%N5_*#^hsk*eQ`3vSTpji_YkxxBomTLX`{2VzknN)Ct$65rBfI>x6DEi+G2$*pU z=c~5Bj-M~POGj}bhDRkc>|_qZQZT5y@s>Hf5XV7e3*~l2B;!{F@IR~p`}sI^zkS4~ zfsebgplPQgOr-NS%+DJQy1?D?`Yecb#SL*o2;@NJfsnVqc`Ag=#|S;YB#}Q}mTkEr zdLpBk25mY39DL5->PZ}=qLD60h7!u%k0Tkb(m{m27=Er9e24QngrmOj@#HYfsD+4% z`pSRA>uSb!_r`q^5F{7yhI*MEk)r_qv3I+I%tBCLUMmGcNGzfyb)L5@Bo|vRC{0HE8wg(XPR>Mt#G6h6P zs57CrHQXPzW1eHaSchE?6*H%mZ1!}w2Y-6kLB{`0%m8) z{T|mLo1tQW|yVOHQPOF|6l&LQNINmR=$F^4VKuteR`bwiJ`x< z=y-kNo~?q9uQ~zj1Zqp){W~E{{J5^$FX5wE)?msEX+)A%!;@S`QJfUC1V3d_7+_Qw zJASaWTALBEABIV?LO^m#@h}=&9FNbxS$s$YkidQS!%Bped2fQCfWA3qs@2&vCkL%; z7xD_+YbRO8kAJ9gotUW&*3SBz?>}%sujbi>cB6lEb#$g^!qH}0BN+0g=Q04|h0sa$ zP*_;;EHDjiNoC@dnN~SiFLv;^iR(wbKw(e zx00Fc54J~e!1r9)sr@j{R-ZMYs3Z31s!0%_ziV@XJNi4o zS*loQHz)>DeJ8w60#ce?n>caxUinXXxD^nu@Bv^y&0%}Qy{Zi9Fgp1wasFk2B-5uz zKQ6>Jsr={oW&Dyw3)414;a>(x48w=8FNw-fwn8{@MOhM*_&)chG z990*?0R91+zO^KCV9WXPP!HE)1j=-J@pu+!F=<$%)q9d*E5Eo%?nTi%&x#6-?+9Hf zER#>D<#M7NzLmV|9cG5QsBH@Z*k!VVIj%uS?YxzJh6GI8b2C~Rpm{haR-jH#RU0|D zV_|}@tPu3#%}N)j{z$6Pa34Z;sk8r!wsZ2U^lKP`kH8I}=gR0*2oz%q5^o`GSFd_~ zrpR3Va}4dDd77|M;eg6~V(Y)>{2^#_m(jHBG-k4&<|zFFTXs#8A%g(U zmT?J?Nh0n&5-DrkTj^=kU{RK7nGZDy=YcdW4QXYNIT!vQUy97b)C7%H3*D98R^#M6{Hw_P^yO!X=FME!wJe4v zLjj$-s(J|(r93UOT}XX_rzT>ifFxVHhdWE-(Fg4P7kh z0RNP_nO0dbv2PDkp_tRQh@hjEYm4Uno1*Nrfyh$+ZOIXqel`DNKh>tWR`{mqUZgmi z3fi}SRpXenEpjx7qWj(gpeHjn3eT;8Zyru2HHCOv+$8~DJA61>B_vqm5J6G{X_+P0 zDZ$;pb;!}v#Dzkp28shh$Jfe-g(pztG$VZ#bmBV8eg~?-L!Q61G_&cmUM;7drs?JB2OQpp0@bC*yGZI z`A7XKVqhK2_q<$ytpA$@23XQcCR5rLe4{<(j3_8e9C7FhoaydPu^$3bnvz4-;(a}V z(;ZPqeVUmRLa6^IkVc3=yL0|p$d-L24b(gWCBQ#tTdVKt$)P$7M5~6Q zKo~HKSrIrt`Je@`ZGQiBHyLt-fY~(46B=fp35`$48QvYj9LD8rCYE#j1aMHH;UtrB z{nT$0o#`QLY?_UR2v0j69)uGHbTj%j`TDRws(t$9c#nrDk>q7U;=EFj`}14CNe%gW zxJ4MA6S1-Zo)v>SAi{ve@>#gLi(N64a*c4-E>(+PJn2W1oUV!b)!9+6EsYMI%dh>~ z_aeMv{VTno^~&}0;1hbSRI7gJ7-b2dheu=4H4YRRBV=!8(JCm!GY%ndM6Ya{Lb}7Z zS>>K~IH2?-cRDPDm8qUK2@Dgj(2OY5ShCxQ<74EP5PuF?nR>PJ_| z7=T?v%`MxZK!orXMg7|jlpe2B2YaG5I-+6*?QDU*8#7eQdB(OVHjxf&{UHzCf{J=zRipU<&?OS=|ki4^*-Pk=H*FpA_;dpX==N3M4euTpgK zSY0>V&P@BGs~tlo$LArUV)%1`yt{m`ldjeZ2Xa=Hxy42C$t2D%RI$Rp>iv5je&d+p z5NT!;YN-+k%3%^68$g)c3D3U5m|`SJXG4+gT;CbQEVgD%&j0@vr#|n>PEo zLA&s{AOvq^>?TfP>F2&$m`8Nv1dbkv4+~VkE;uc#L(fd{KLz_x-`@-3XW(qM`aU6w z41WS$Ry$w!^;}zgBtSJO3^qSc<4*r`W8&`LcUW_~a|7f~oEF%>1g= ztyu8+0f-l-eNZg!Jv_of7YbHY?bu$Yv2yRpq$SXxC4y@RAh=wS7nW9TRFJdZFZ~=hk#nOiC|s^=$Ubfr+>)n=$j_-6se&e z4gM$hQo-{u?%A1gY+JG^3gPA3Iefxk;YUDws;%a^+VME3n1 zje5col=ruO3eG;E;ZLz@@!BM+4ahfk-!FC`cT(9}I;tk8ahJtx4-wzCm;nw6 z$Jl3K=YK3~_Z_+NP=)|K3w<~j=X06P4$3+I|DfRF;ViiXG#NN((ZF4?*k>5{Hlwz z6_K)E#oW!l9tFbMN3&!Pxg+{O3Ys>|xQxzk0?U6?_!wtU@BS`Yo+Flh05}CTr?~VZ zd|<>BziHqRo_d1Cg{-b)ZpIdpETwWCQw;GlE15u99TecL{Xd9JBbBTS17rx_U2~)S=bS;ps$5|m23YgtCE0yxSxuU=;zA}W-If1yUyjWAt0rs zI{?Q{%<$_>tmaN@jZSxKX+Vvxn6#v!BnRoWL(prv$^z3sKV)g1cIWG^#+Ss0MxOes z<+q8$eABidZQS916YtNJzT>4+ESR;~6=!*fnLTQR45^%Nb$_8;9zgbj)n077iDZv_ z?t}bW&<%?*h8;L$=|*2Iz^TqDgpx}skv=I<=Hvjtc{yji%^|kj%a8SA62i|5;CJVO zOCna0bkjt%jSPkQ#PZHmOUwb{nq$(~YHhc^JEvR)r-YbiBBmXhiRA>gxCC%Q4H-Ow z^GKI*A`eeSNyoX>@xrpE#-vbuhEC=1;xv_Zx%|zRpi`-@*%2d?tSua`uUz~OI33sv zYUy?`JN_hp&!NI$s^%>B?0%|f2n-RmEl;oj@UsOfB&dR=GG|3cuJK5aum_MMDz7ZOx3o!pP&L^RjaCuI zMNUck{zTJSUu6pgh*%M{^bf1GkkwR5={HL-W@PhB*vS5FvA+D= zWy205Gb1RazB{mZzaoR+33n-TBi62R&Kfb=v2iA}s6;0mzzK0tT?4_{Dwpg;)11-v zyr<~3YW&M5p0DX@Ec=T%;o7C@kS3i;i}-`m75Uoh^R1*$TqK9V{kE8^b1NU;uR#OA zX$>CEXE%2Zc_o{R8_UFh!Ed2|=YM{=o$OI+ zi;$rOg3QU4C(n;;OU9m}uAK&UZZkRNJ-$f%2vqvr35pz%AcDEEZR zLIJN-!(-$J*nq1R3y2sq;|Di761M#Y=z^>{Gj(ZvT0yq=`*T0N`L_k7y~f@hp{zvG>R`sBhFDTWv`KHTe`W|aAMpiIls@Dgf7}l*7Z$HaT-+S z9hX-^DvO(}nMSHy`6(NvgfRl>Rp9rAjlR=RVG-)o(;5U?Hne_UUVpV6A~>#0Z(5t( zzVpVu+6Vz1zoS%*qFsRJwChnYV|Y<==mc$OyCzm9|C`diwM*`VL<#x__ey@&buA;*W`4^(b3?N=MBssS}%a)zG)boTD_XnK8`%o75K=rfCYv!R>yHqA_ z1i+B#G=@h}T*pU3Exrk@h;aDX$MMU06(8&$Mz+#tw_X=AeG!G8>=Ie zGT!Dpy9W)9?f0eD`2UuH?(~Sd5CJ1E``k{TuLf=!s7i4F6hqP;5y1pdmQ*JBMk6DBQV_tyZLFv4{*X9#FP@ZFmPx>r`d6j>9 z#cbu95`c|?$N2r8+v;7@`r^7zzNyL8vz4*`9D7W*KxDRU~e9#OS`leN6G*X_HHK^arc8yV1jkyYP)6+LM1!%?aggD?C||aseFW#(TCjeWY2h zS76wEsqwrhrx0ZyaQ5y47^E<3IIJ<=>vV^m4nJ%A58Z#)9ca*5b%#OfBvqaCXFBJs z=0B~D0Dd-vfcmT(4bk5l(&{{i@C z5Px}QLU=w%m@U1}$n)VKG*RxUs1o9kxb^X8(86p1H22$ zY7SBl3j;h&_&F=xcOBX-23F(zYbU-{8G%58g||0)pX^fc$bHq{D*=9;8*e_Y#wPmp zjYXofh*;~6kx_hNx2AU_lu7Wtlb%6k;R3hsc_8M))JP=U3Ki9D)7|DLRm_I)<)@Gi zU}DgY3*(Sq_?)1TxFzFkw@P3mKgMtN1fB6xXsul5$&@IJvVK0dYOF{D;x%#jK(1?# z#vr^o$fgAEH52bj3B*UYrD;QFjbTMV;IXUD2849G^xtiiUGYI!lwn%P=31^?G@Mf5-BuBpA!rfbF%CzE7i z+qUgYY}>YN+qP}nnb??UV%y(yu=D+f_o&b6s;bqy@7~>Ot@culb!9rBNRMJ@GBr0V zg;l@|FjJFek@!i~67!jD;d|`z&Lce1Jpz6fVewHYkKHs`i2%^m8^5JiNz?`5H8btI zAKui9k+OWvlvplTC$G9(=!R)KzqYaaoLPkkrNo7N9kjHcIZw8M-$7mcLUI8|hJP0j%ms6|EveQLh&YW^3aCrTE!%DNAe>G|^L8zdig^ zily+0Ki;ED;bgxmU$(F7V~_5Z8WDVhuL!%s^7~sC!fZpY;kM9Jlazp+yp6*9^o@bV-WrRD9-bZ>9i`+stip)-%!XyXl!5IDoGIwE?nn zd1z{3!n`dmoXb^pgA+I`IeKPwVm2q%!P7c`sq% z9La0Uh%+fcz5Baf<|ga(v9p^fM6Sd`DLJW{Q{BFy=Us7ifeEsSUc7{aXAWz4`#5Y4 z>2*-N@xO;h1JzFl96w)xd7*8>TI7TCPRSa#VMP+F08NwZNcE}f+niN2(M-1q-dbFO`Dlj@wdZmSau zXeA$>cjz3mHd$mZMFbTeFjoXHh!O;hcPt{D*Y~-oJZkgCEz7>jY;OT9ckyeTI^v&l zcc@JHlhUJr@kvKHDL<7g)a)hov~@gNc7dXZpdUi^rKrix#(f7mM}lHL-~D9nk%{+h zXj|SfL|o&6jc(-FeTw*{6Ad9~{hT4C@?GjMT1i2EaFWL8b=!nWhnA}pkCSgl24k-e z|3lDLdZ*D21?VC&9=N_i34^)DeD6U@9+DGDw-|Tdle?S1?!sVA8(d1e_Q$ z4Z#DSw<}}+h|ZCNiKCdVl@kQHDHyX~8?%$FC=$c^v z%8L+$t+~kqcpdJ$pg%v^5z+75)%bc zzo@H2G%y^5kPSGnQ^>cWv-=fr+Y#|TW-9yb`e@GDhoK^LqvD5qsXU?=>_q;dg)~YF zPBdcf0sxL$a(7q&-M&#e3kJ16Ocr~iol9^|kR5O!qf_fozVe7ywzy4oXhnJ72>uk( zz$o39%n|PK<+}KL4%`tZ=?bLp?=raVZyOBD@FqES(EEIQaN)}U&1K1VFrh1~+O%DW z*TGZ!1=7Du*6}GOOW7vYZ41}k(`BcL_fG^B+F`|sVL2{?6DvxCzy?=-T`{05$>JqauDW31Q#RbDjW72+&2ON%dGvPM1D@5Xbk^(%kkUCTy`F z>-gq#hP1t(t}X;3U=L>%pM=h<1thfH^d(fdFtWHdV|>)qCvYd1fRgR|KG){m__V4h zb3;P2ICB_W(9>WqApFhdYK#>WVgSD(d!WkKpmp*Gr#BJ^C{`?y`n*{MG*h8Oc;kcb zcBLD|-s3&MA8fr#m*uvM%~!rQy(`T0a@46>$j4#0MzS97IOk$*|LMQ3f>JU>;l@so zpw*8b_E+JqgjVQ&M!BUsK>G|P1!w?Wo~F?|jnc5&9&`F8!H!drTGVEfVCOhyPg_-f zBO$j3WK3(6MxE zB^aB||J>Mal=y{Ak6$w~4#0Vb*dYFaM)FmkmlG6)NFbbV;6E`j;$j`= z+ErrKfeXs_dwF$G$v@+y)`J3GSAB$|*o-lnnPdJ?|Ux#MG{@yIGb+!G~{ z=>+q$wJ626j5|o8!i{*Z9zPRi$|ipoYoEUw91QZ}w(-S2?g-z+NLih^DR9eKZGxgpaZFWxKoZYbkm#jVV7`jqv zzCm4%kMPui2%-s|q5|uEoeeJVACr>_F#~j+%dioL+>S_zCiQO7qF-e1%5%etZA3o| zKz_ad#^^4o!A#^WDHPlMXMD3>hL7m0)>RRk>&UT4Nhq&g$?|UV{k|vdue2njBb}Q7 zyxywu+TffHKL?IWMK#Y$0*idFSY!E|yl3n}@^>hydHJI7ZKnj?*cw&pO1p$NEd%~;>EOR#}3odI<1 z0Hfexhs(bBvdE!WocyI!Cd3aWQ^-I|i}vR5jgd#Ja}EwL{-^R9(0=v`-b(0F)z-N0 z0r6tECB9>O?upR235eJAeJ{dMlH*W*Kio*3(*3wDfRS#??rSP^|jK|K3066htyg{o~l_o)(Q64cv&0JA*8lD(5 zzxia>zb1sWlRB50dRv)eGiISyfLT`WAdy*;s8|l{-(aNGdHE^5M-wsux|AFlBZu20 zb2#T79J#&N!cVr9ZIGrC@#y>F4!V>DJ*o42z#h8gm446ZH&G|!^tmDKwc{hupz-Na zt%;2Yg5 z*XtwoU&)@)$~p{oX()@zSvS(v{qDy|hjanIYDpF+D_SehnQ^9@|ADElyG&#{$Yg-f z(2OMdZ<(W_b)PJ#tUyk#0EeL@8(oqedi?I4b8@e**w!lf`+UBNuLTJ4PSd0eca&}S zGinl(uB-oahdVO1%_g}#p3dG<+`WCU&4agTp8qff&ytP8rY_3X;R<-3N^R;M2Zs2& zUf4?DriJlLWNvQ(#x!-W*LD6Lab}13M3$`ojsn71K7}Yp(_TKyWD3R&x+Mvyyta9_ zW;a6mCkymex-l}JJKy*Bo#|&&2gqyMb-S)1o7fwApH_6zSqsYvOdr$+SrTL zfYfCsvk`t=RX2MA@5g=6t$Sj!4?u~Jc`XKxffgL}4PNG$IL;LU=5skzHl7|PIRr5JP|83An+i^?3H$~a!sf=zmkK7N zco-%C2REZxPmh=K)V%5IF#yOR_!IwlL3$i+*N%x;2xj)Qvnx>DzE)B;E2*J^%~71Y zSuUMO!WY#`;TIK7)@Kmp`wc)>aq};f^ju$J$fG6Ny&L{$1-!AVI&S2fmQ{GFtegn4 zwtz6bNbrN%y&wLtj-5`>$}f5fzn6}YiIXZ9+`Ume#sGXIdvk2_m0>pX@M@I)qURl5 zz5iE_fKR5R+p$Wv)fQU~qdsi54wP*(qALrmoQEg7bX5=GheDYOABajIWDwVr1;Du| z!d+@_DMos-;4t1)-Lt`9{yg^>C0H|;;T82!%iaiM`UK6LA_mC?8|F;~rH1I4->v7{ z(6uuWTRtbY8*S$Y;Jo(vPBC1^C#z>Q^2lFhB*Q@m7)vHsMjQ^W{k*PBq-4(pBo2@Y0rKFwk_NGm!u#+=(pI zgid(IIeH|2l>>>AZPrk%G_k^uFy_HP**YljvEl66v9FWe3W(RVoZN4T6#Slow2IAL^nUYo%As8fEV~Q!26Xza>OmX%RZ{GPhcTr+BanFxl6JrcP>_! z@~&(F{@{;LQf1@z8a_ZrS-bCKFo2P(P&y$!J_@4D7_o9BO59fE=0n?~>fB-MI5d4c zRs0U2v;g77O1i{A-fmBwg8&2IEJ0LWr}h>+7LVNiye^8)wMmJbti!c%UEzt#mvEFi ziq-nNt~lamPS^afJ8rb#dg<=kHI=z{7hHyA%?j`*2DKq8)+lrIU#InK>#PFS zQi@hdFNmXOqF>-9yxb`+R;u|M5^6=@7ix^c^`A7uv+ZQ-Vnd0Nic)VLaP>RZQUD(% zj7u8gX%XKrD1Sa79IKeJm;F{r+U-=cpV1`RW!&WM2-SMZQ3)G*vg?&excPSE2;p!I zKAAL4J}pn31Op140G#T*zEhp3;xnkA8#}eFmBztc=0dZMdWNh?lg7pSBo9i7se4RF z)#hIBXB@r{gHw?_Y>I6>roiW-U~_NZFp#(<&CbXaP#4@1I9iL+R!vw;`BiaPftr&{M$v- z-E4nGXRUf-1mFx_1aEQ8mXxDl0B{1Z2x?3$3_b#^=}b4=DsTdQFhlWAtMg@rbd~0O zsq0VkcXcXwhJ8?{IuVd(veR7-n;gh2=LW{LmJrE?KE!ANI2YC8+|QtXcHlFq#Ox6k zi8A}=wQ>i~Ch*w2^7{FMRQcdxJ(t291S}5qVH$Q8)S-fje9DgSa$&i@pGy zuiY7sNb|S*XDa$hEIABqGq5Tb$Xsp?Y+=F=*Oq5g)HSeSAu^NTvG%B}pG-VEhm$XY zm-uIRg5cF!GnWhJ-}}r`+}`XzrFJ{l&ofz%u4-9F6dBE{gHP(T6_)1aPLWGd{8wa5 zGV2f4{qsm8OQj2Q+-3g^H${oLt?h)?`A4(9&!aX>X`lE%LSD~&v<=--c;IWK!HGv8 zs)kKL=V$PB?=t16A9)q^*OowreY1m%uu=qT2u&A1D6OU zhaNmOU$^+*I5fQ$hcIOqbT!by-S>U2?)&F}TX4L$8(pn=`D??uN<0Ljw&<1-8~f1u z+rFc_=my1c+KKDMej5I4xpo2H4$5Ax&H^S+;@2gk^o6JVi&U_=Jr zZBF(gkquYyztr6ieIFdex3;>Hw26!nFahn1Lt4gd>{q z0Dp#@tfQgu`^!TUkNPxdsY70_*e}(E_vq!NWL?aC3>S(W_f)ED`HfsRG!E$VwZ=ON zY{rw8C&Llbpl&yCvogQywyKJq4$?qe#R(bXJh?=XRDMQmBgGCS$Shg1PB3c0v&JNx z9D#13#y%nShUKhJ3S2baVVWnc9F+z9uJEEAX8u0cLQ)h%ry{of;RKqr_7K*-T+)7Z z!wP@w)Dia7Ypeg2n+Mk98>gsQ5E+*kC1qMs6zRj<9h{i2;uqa)X0o#WnY0pKmktJO>Q+bY6x`*r2TV-1pdCXuG=8E%$ zyMB`)aE_I&{ZH#JpiQkemO@Hx|O>~FxOYrRp@^>i8O}>!&6yCMx}o5e=3%1i$9Iq(OOLT zd5g~G&xtC{)fJZh$uIRk-5+=eC-#D673c+GyAGvv<4jcADN+i6&YC0RJVsz?MTZ3K z@T&p%=o>G$R!#)|D{nyE#&5oVl0(%^xK2(Lxq(q(m?D5;GT4Fb$GCo@4rlOT^3qx~ zDP*Qfo^(0i(M_6BLYhgcdjj|afcu{r$VGs1q3cva_ZkLHSOwVde@PTi5n5Q~u%;S3 z{?gkVj)avTD!aPV$(R|~vQOis`iL>JL z@rErUAniL?R(gt#k4|>D;TxS=Tm5-Lv<6#tY!4hIF#&h^Z)@U{BM2@ygx_9v0704U zyAM@eB;>hm8&P0Vulepm!zH>Uw1_gvKxNLMf!6-#vBU?SiMGYFasXy)L+Dp*>Se2{ zdMbxlfx2}$)<5KjQKX0+03Vq`_{M^}Lj7p#U8#idR|0u9{Z;&TfsX9`qp2{gdPMQ< zoTru4(g2InbzR8~_2fq%AJrK_C1I#?dDFj{Q7qs6Gqs6yyldjf3j9!5<@MX^^+tiA}1y`J)El6Z!Dp zO>ZFado!5V}y3Vz%d{bsSLJNQEUHRkX#wgRG8V^Ry8WiuiozoYKE-pMc) zmS7cUWz3(ti0}XJJ}0$V$7v>Jh`+*Hv_&cT(iSVmYgBp|!%+XMRbwMw zo?vYdIB=O?|7owxw^$lxrgxtgIkP@;rX~63ioq%b_D(6eEq=|~1TceB*b_9LVa8is z{H_;jEnftKpx0nvxVpDIeW3peb#6q&XMs0B%HuB>e-Ih8x;IYYsy>(Owc=7Ecfzl# zblo8*0C0Zp&__FV5$P;!R!ZV=wpO73cHs@sTb3S5gT+B5CrY{&m(wJaZdtYU&7Y6J zbE-w*tDCk1QtCgn5i2m&vC045j|E`ieEz}RKmV`-rRdX$Vl{o>%df?EBG9ySb-g~M zz#^i#Pg~c<+)@=BY~dmT?`S?zq3K>Itm@KirjyP6NR!rwLjz0Lm0KdPOc3EmK8zV zhCsH{5L(3CLHy|LzSpsN82NG}Pv0ZeGDe!CH^UKG8M1cZLUv;sdf?94`2L26P7}jh zCnuE?1>72~1ks>l=1Rd5~T!WJqw0hW^R$!F- Gj!+ z>7IzYyHZOaRpV}&;4as(jL;>dPN z;&`!Bw6Opj=<^<&)x3>gvdSV1d-kXn3{h&xkU;9T-tAmFv5pM>sH2~=|Q>;-7Q zW=Oj8i{-m?w@&=eZtDlT@3P}hvjOu*QN5xF1mDk&dFPEtdmpl0FyDR+bJM%YyW2(@ zJ>vO*Ys9DYF?{|Hi{+M@c_1#b#+#I`=ugq6k@eyJaaJ%cerh&P)TUV^G=cM)pvHe%P z)-vLLWCKDiCQ8^~-=DjxtoBYNW*{D#Q~)0oTsERebBd${tPMU`vbDAR0;#QVgVG|s z&Bcsmp{+GwvrR)iB9{&Zg`&3o0~i9bBcmsGp@ItF3p}baXG~9P08S{1fi3~uLHH8r zT|rvVpk4Q8EkgKiRM};hj^n@6K3K~>^k5zhXDTM@7lTcP={s?1l~JZ6Ut1T@xTsr!F#`@=QUb{k z{#6t5+d^a<=B25;>Xgo#-UH8P!;>hW0=kuRcYZ1VpU=jB z62$OdE;m>sE{k# zLJ50`;@Qb0RViJ&{yL66N_fGdm4Pn5rNIf^uheoaR%J@juG;V5SxunbIM5W(GRhgG zYw%Cq)0^n}?k98Mr1)<26lI7XL`Lk<)%!IPX>w%lt-5X2+}g1%D;!pWKdHw2-GG=lwr4h_va0V4RF6gn);)~S&E=iwCR^) zDemkpwj_afwZVt(D-qbNK$JqIRuCn(StsH|ksg1Cg<1kBlmQ|@7a1ujWd1V!7A-@4 zh4l)ofHmb+sJOBJN}oNj)2Lio)<4ms`;KvcGeI!19pjs3eEl_&l6j~bDE*zeextNM zMs5H;KKII<^-u^~%j3#D*V~h?m%|2~S(uTL-7v9dqW8Y~NuRh{3ASj&82of83I`sy zGJObyzbO5=vWXI=GO;}CwE#G(3ZfAQw?GMc=`C;AloMkWx3L~v*sa>HG ze54BI5}Y_>+WAv3k*l;oiCQlG%$0FqV21Ofnc4P;033NFX)i6*TEY^@>?VUNc8IC8 z3-pke9lyh2f#MK*u9!uNr&;DydTy;TdOE{Ip~0UyP+YH0U~8hoV-S)S4x3y6e`2u} zFIvcO-`n7Z~Az?e8&H`j}9}?fm~H)WP#lugaj)G?!`8OK5VgxMy&Uv z=*ixux@h%PTG|5e!OhBv4xBPEq&j`{l5Qxo-WthchOA=#Q__>azoq1!{NMfgz1dYB zt@e&=S#cuzzhS}$&!&k9&^WYSxGf3B01*J6**M-x8Rk6Ntf?s~`zbpu{$oQRWwLXf z;sv=LtjRzQ;V&r90Uj#RPG-w3Tn_gqJKMX?wlyFt`+eHyxjOh9KL8&uKcRc>VSl&H zaKsR91c+oJeJGe96MmS-rLvID^N)^#yl|~veRVnV+J(AU8AlLjjH9e5|Lf(-}Im9rV8Le^LJQKa4XZ zPD+7Hix2SK70~RqG0x;>@}D>W_|Q1(%xTUx3Dm|Oi9Xlk&oo1>LIO`wH0%9ABt*Dj zUQ3+7J;3sz&DEX@kSQ#S;YzNc8&Ska3mr9@e1+W%jimhW~Skp zf3hntT~#@r#d@g+TtJy4WxN&YmBeWUqRENmJeqXaP8o>=o2=8szgaWCL%!qV{cV8S*B;}0GvYZ@HATk zs*F^Ty}6*3Qikq}9HfFu`gxrmyLtO0CLD5@Wd9W&tAKl97|RhStk;>&kHixO0`0Hv zQ^i#aG&c}{KO#SDy!W{5qT_BXhjJDaPMt3HeB^VGt>xNT6LKtk_|;?uW2C0ph3H;U z=_6#{K1L7Akd(_PK;4o_ie4&6lfV1&LHGBngIRj3y_nd&xjBH2`3&5dE*kV>#}M zhO%KNU97F9p_$SkE*gkneAKBwZx<`Fj2t+nDIX00U5{o$B$WsGE8N__D>C1#x2o(+ z^FBc=1=1h&SsHn^k*pe@OC647CUeZgwK;<#1Q^vqvx+)zZQOkQ8NGl!=vn}LC{4yK zS)$LJ^Z|*-}X`@1O_AxVTSWkAF%gN?&T7uYA_;AMZ=C z3@8Ax@-n~}5<%+5g6pk>98c)tM<0%S=NtdP8Ad2K2ya|X@a}8#eI9noJuJ^})(3JBj}`wFpeilEZQC(XiG zJ=*ZnO~$7Xep@g2gJ6udYUb68A1p2&3k9|M)}aXPsXhv2fH2J0Fn6kM>n? zmM{ORd>V13b5;$353FAizlKQ_NFC-~)+Zk@al=O8@>TkzlBogyaQ%7`13xI+=>6O< zUBmm~_*Ebve2yE&g4WKUDaLIe*SOj-@VEgL3m4uQHw}f?yD1Cmy8`Q4S)b-@Nu=H` z@_l|NG?@MXgnKnFuuN=M-yMz=YO;SH;udp}MVi9O?71XK!P;wQgVk-W2v$jD0Eu8m zRDWPbd2NjDT)BFDw=wJ~2jBxxZMQ zGyS+BD?b#irl0s7JC^vIV88tt9|7H>au_C*TN-#R0eK366N9FYh#!j>QQUX_7}Jjn zD48-#Sx&NBsX0iF4Kbk~oP1whOw$s_gd>6P9UFBEm(=OVZx@!p+&GIe^GT$mj~=MM z;9A%TQF}xU|9?p~_Rpj%A-UIyUhFQ>kT17kv{=x=bwzo+sZMiQQACdn= zLNQw^<1VuRUM*Io#VH|22J2Fod`xd4XyX!|eWpMIq7(r`dLXpL<>xiLbUKK+w1D5eEp?5(@C8QhTNlt6yVJ6dxjMgBL<^+W z>BdnB;KQ{36z}0u%yO|)aB$3t8XiKNb%0OkKd4j!UxJx>uME0k=2yr!-3Ecj&6jYRp>* z3Q#niO132REN;+h z>9xy%dX!in!Xpvw_kP2vZh0cTt^tnp-G(xYO+t)&*{IT)E`&kzmiRqfGTMWirM^E`6rjOlp61vPu{2L_(snrzohbtXuZ*k`I8hg*ni3YQ!h=t}}P<7C-aOTuii9qhK+iv@jTq ziYDpS7tcb>1C>Gc&l(;jkMf^IyeRPGQzuP%!LAX$S9EpdcOJ+X$z@4U5Ju@ka)*3t zg4@Jg{S^i|4}Z!}xCEF1qE-S3n}TJEqL?#JNu}L2FZ(@Y?tX5(J9P*&Yy#t`NC$A#sq;Q6KmQ) z)XftIdd0ig$1^>$y@b?Ai2wuehu-XgYaXYV0&g@1I}iSn3nERf zL;b&EyJwJ3m`aLFS z^JY#1ePY#b`kCr$xsI>7lM-jEE`!A`01hsxX06+**J6KXt<5!4G2LzGiNuTz^}o<_ zP{y3)WatWnxuEm>?kyU9Vw}g-t1ar~1ZC-WmX(Tdqn@X7BZKe$5}*gwxi=}~zKqiw zYLEwJbc2O@zE2~USF`m!<&x=072w4@K%GN_c{Rg70+Dk;pb=PeqWsU1qR|v0{?CsXdIv;4|{x zXlk!IHF|4d%o}*&$LaOJ@f|O@Cg&IaeUHPsy!DDuTcZGD88y*?z?CmU5@=9=Fti4j zn>U!s;KhEF(dME=ZJp`fIqDq0~2*mM_u6>nvByzr1Hmi zy?fGvXb~D5b63!ZM znL#{!da5w}2(MwxT&);utMu3qvrJ;Op6abc>p{Z8mFuvbL=^W1)nkgJ@>^ z*m6}joToAyw-ns|v2;zKmfa#trgQV{j~S!VHjcsU>QBU0FzF`foT>fXjY*b%l6`3@ z5GkkPGdiC3k!b2;jXNcLo>hsVR`XN()m6>i8x|D|OZ5SpU(ngMw3c`;FS@=xfDW|HLtE{-vSeg5B#-h*#u2?t$QyZaX zGnTE|r2&b|#ZmtGn#1S*T^9-Kpc0@9|8FktRl@6m3=xexSLu->?Du0`daF5@ME=(% zpEC2?@nUsHGNuy|#*3FmM}Fc7-RpRNl^+xcbB;DXK-bs!d|Fn!7UfA2qg8K8m~fHl zU;jx)gC`kuRW?qw!tM%vSXwTBf!Ayai2jzE0!w>5^;ob{2 z_(SIaTOmW<4HvFzqceSZ3(#Lzv=l3de{@3isMTkPlPK?87yn_cZShquy;+K&SM8wU z^+;n@MYAa%h0Ou@Jl`U6+x&=;g&MZ>D2}`bn^S@jOxFsC6vh`=*|L9Q$+5!A8{TIwK-tH=jZhFOa5;?+}0L$1*;K z@-~PtVErlhku}pC!u)7()ZG3=kbpQbhnO(hA_qnY>NZvXGc%&%1EvsIjQcMCoon#U z*&8#v78(1H{^WN7CI1W;;Q{wK0OxY~#sO*De5#1)+}#xqbOXQ)OJCG#4( zt{kL5B!ODEx#JP{3vUckYa`Z=*OeF)_wN$9`uHZYFPCH$9nAB6Wn|w8fRC9uorrw( zDqFna0Su^1Z$`?Cylh3D)JTEu6;r63FquBy*BRZl@fO zbKV0cCx!a2dEzPIPxpkQhE?)&fj_<`E($qdGevG>j;}<5W8d)teXuoQlIT$hOup=5 zv*v*5@#FgipCgLZsZGxaH7&^V@f(>8oCq#Tht3(YlM=dpE2Xvhm@9OKkDmmK369ue z5x_?a!AXyAxgtdMa^Hqws+Do=$vR&Fgc(J_IfTBO5n}Xgko8cYU2@PqH3}=Xc&2U4 zHdHV?2uapUxV_*VN{{LSfOGb*Ibc6(^$q^YFLkio@s?vH z<_;deO&g@&hE6K}?>MhT2#PD2ukk9X>a27oHZP|yYBIew8h6#yJG5TMmV!K|tJ(IfgBf!BA2xr4N0(j*nAe%*K)4 zxo&qD3*0S)?d$mYkAh;bsa)aA4 zb0<&O{ZZl_`PL?tc~|NtDF#N(WCn1IA|#?eXU^2JD1^Ow$#$Xk0%1QuUY%u&veTy{ z6ae#3nWMmZ#eVDU=S22z+~pdzDhQ0BP3@;K0_gpSpD-Y7vRcP{M9D}m#B5SMA3!Mh zqQW$sG$|aZ$}RVK(=0ym-+2xn2st$rmJ;d7ll|7YtmSDxZGQgwInk^g<^Kj540prB zHoX-;4m-BR^%^l3fb#Basm_-QZ>=e7U@G zf{-}QhPXhv(VGDL2_2h9s~f(?eN7aO{dt{OEKt5^ctMso(HO4=o;P=)5}SOa5p|qO z5?H4XSFCBsJz20T0P0lA_ScKnB2BMO5Atcy*8r41rRyd~||R;C$fyIv$=$ zZuGX?($;>}>kr09P}%MAs^T$4u^N*{n{g>7+rbK#}}_-JviVyReV2jAy!kY6gs5#4UFN%qO7q{lM;n2#JT z$zzPDSpWNRzG*P6-PHcWySCWM#;6!>jZnwZJsjk)`s%wrZimZ_+O@X0mRke~w%b^C z0OR!x!iFpe3(2w^+`Z)|7jydxvP<0~60m`-L3H~L*|U^lju>4M%sGA`Sv(C+&3Bz| zTS}yH(0hS6bFYr5^lb)c<8OXhl)CcLp=M5(Ya2AbYBUu~WT~`8|I6g~)tthT=c@s5 ziPSx8l~;cxtFIcjbbu~sSvEu>(Lel;7y-%Yv;^TDUhQP~nd(P~Xv#Uple?I9CbV*D zs9^?CoO>?MQ*N;~L)4Dd#=CAo5T;MqmCehN030&q&Kj2Kii_l&GW;^24Jy&u_QG9B z9@>f_&}dPl^;bJ~m{gi(wPE+?Ux_&Z6eRo@KTMW((?(Hnx6>`OL5xNK@uD#^E7PF? zJ~_E4U6;^F2JP|+BFVVhy_y>2(VGS-mvk($BVH@PS{7YxV6SGQv(-(wO1C?5(tY}} zAStR2i@x+pt7=AO6xpb0vCl>jTvp( zo%kmkv}g}VlHDF;M7xFe(J@9EB>|u-lEoBbUjDWEoSkQLtEppvsbu)C78HSbj>ZFa zB9@bCtXuwFLIZ3Yvt`JuDu$0w-4fQ0P|EE zwZ|{SahftvPi|vdtEIOcT&N zz@N&+-?!>&vqvKE^3ww3I_W==#Ah#Uu5KClZ{su8lijq(?_2EXEMZ8jjvkWhnNF~% zxcm&4aUJ%a#dWM}W8KpLI3=3Lbh24dTC~X`w<`ns2G?;q_>a!0C3W~+V8hqFNUEfy zZ`${e#d;&zP(S!You!T7C(o_KcfVQ>_Q@RFcQ7bq7$xD7i-znNS zP9D?s%5J1|&yLYYZvQvNVLX;F&4K3C1kr+bfl-5(F<-e_wV2cCQXJA9*jN$y`BoyR4`wUBQy^v&UAhj5Cz-<_{aILQZoFu0%R|fjm5EzD2rx}UAm%&T`PwNgj?t`MN%Gx@yS2T+7PV6$@3X=g@KhHT zv9|!9PZiS;T-0C6eZdH%P^d)sQzM3igBrz@v47~ImD}S^7nz+?k>}oT_TrS1wlx-u zOk?_%y878e!&4a1ugusS;s9_IWD%l9i4|MhUtIl+l^LgpX|QbAICr1_S=TE4RDSEa zYMFySk+$WSrb$g(0UI#QerT9Nf9talX)RK0qxIvg1?c);8|1%)*R@v#8^2P5X9>YO zlm^nP>Sm71cqYB?;!`YoX0W|w2G$ljVy;BgM=^vjZ-DzSFPIN^7G-c1FdfKroWAb| zJO3K*2x#g!sa*tdDSC!Ip9Vo>iFGMqI=E!bu;R#mywj`B7!SZmv{%yySy$)NHDPSD zZhHPi5Vj%Ca?4=q)&v-X9hR|+xfZKCSnwQG2QJT&et<~smP#c&?BKqx5eS#Kdat8Q zXiPc}q50QnBZDXD735vecOFLENAtg%*#_f;XaEk%^r-UK3tWo>DLk1YF0w(ZAcbiX zTiQR`R&W0~<)~hslwwF$UAMs~0*+WHQ4`522~8{|v$15Y=V&w`5Dl>Jx*NSOlNO`S z{;9!cunx7mkfTLtX?E+wrLJx&wpm*tLT#a9{k*lD)6X7eo>#LQf7?X+BB^!ocA=G_=fS-krHWA2>VGi#bQ9NW%hV%xT@iEV3Q+qUgYY+DoC zwkI~fXXDQI8{VznJCCcXdac!~yQ=!Ygsb!%GSZ$9UDexWww;MS0<)uXJ~?>-IGNya zzFg1HOhcdBV7i;fD`gPNK;aRJ(+V&Tc}42SC`2S71l&F>SLUdOo|=x|OLiV(_K&H` z8s-!YNL%;OA{r{73ahNyUXB7%zxrJi>dAp~7wP=ITo^a$$44+ zUB;2<3jD{xGw|REbFv&c{i&xE0(N%xc1cKCnSVle(9{qOjH()%rW^q{BU0g!85Mehk|Y7fvhm?{B1zJS z59>WJ_|qxp4kyfo5sX8vyyElmjaf4!M*?acXO22rzE?-*iH?uZVa&|k-}}9dM6dWU z^S{$yRQQ0L-e!S5JP`v(p7G$}!QLGeQGN;kR>wIsjnJdi*zMubUH?m zT4=fZvy(^q4A3J1b`=<8@zm#JLH-sVI6K(SMsbKVa{Eajkkc!T6v_tCGYEGj!E_M` z2|S2XyvA|s(5YsS+ZA-~!UiPDS8x8E-)of}xtwPEi^w$acF=xfVldmv_-7LBtT)ag zP9hs{FI>q8ogyAy&u3lONY|r(zm+^|#p9iQKdi>#AcM>0EI9{2kL&b=6qC0Qip8_* zo4jcfBS}oLLFLaK$R?tmcuymUp&!3#Hx)s=r+nexyB7>gUX+upY1%Obi?L7LECl63 z7`*@-5v0^?5bT2v48hxHU;-!e3jry`l1mX~@BrFnR9*cVI* zN;DcD#9by{XcGb;Iq_5B?{m){^H=#-icu=)-^NzN0gW}2r>`=o@=c{QuKXR+c;pf0 zyvaR2HLq?5Wf_Vrgf~qVrx~oGEG$ zsh-%8wYcDg~|4EiqvH5hM{n?6*St)KxQ=f&$!T({l8#U48MOp?7dW*MQXC! zL7%JcuYOuwXlevR!{bZlVH>*Xg-}_ktEuFzn^cq*D1cZkdbMP(36>l zEGtVz3xwoI!tnM85Hh7=Zy7Fj(5W5V`XV*MG}|Rfb^E-gf-wjn=o{1MM7P^e5m&zJ zm)l{>UPxTGIc=^tC^~9c{w`>9HByg4+wz?tM6Ho4V*b~L`5ASuOCj(oj|6!1aTQ?> zyuU|m%CyQ{a!bDVW~l(+A2K;X{}(uf1bFobNuhh~MzhxrnY~kzg}vJgT*x@#BZD8{ zt6)2?e^0HB(kSR>aua+q1!3Q$CK~P|u0ZjHKF9!^iL=9<)W4#h)q(^Bdi8(6pa&$? zME9-pepFc=BH3kEjLi*~-~c$rR7w9t zb?#9|y0w1?Ue>&;+hP5hLC)w5XS@`YOt@&0x7HpV|GjK4c#E_|9>wBoCthLf?1T|t zp%7>jXUEC#o!2DYGV;e^%(T_>g_n1|qMA(ec+8|w^=yHB`Za9l!*K*dC4GGgSx6Qv z(>?sx_0PPb{_7bM^%a%5t-#IN05Re3b=3E?{$Cj{%~6ACv3PoKu}9T-3zItbE1CDr z{$6MKZaJ-rdwYRuzhXsBgV>%vu}+i4^Zl^Xu$Ex%{iqSAri!}wgk`7M1sEdzT!=-p zb+}PLvp_{>)-hPm2)XHltKRHW2~9C3HrD)%n2A-e93I5|D>iXrrd9cgLA?jz<0dQW2 zA+#xr-8_|#fQJoBy}$jObi;@_i<}U5k2k#J)`(gJCXiZL%qxY%(hnv5_qLR9C~ZUIFNreV4%Eyv7`nczsH#8T z{a~Y-Gji6zrQt~o`jZ?xq8Yt@>1~VKv*XKpLe`kX7o*vZN_F@vUXHX(qsHQJ#L3Kx zvl5R;3ZKZYJ*gMzmOFC;{Csq*)d54HR#lp63xV_nawA|gVh}S9rPPm3H>1MXgzUsj zbcNymPhpSn1dsh$BFd|c(R~_;0hZyykN|B)rWF#vIp1w2$tx}erxhq|!jffs=3qU4 z85+)^7SxIdA6+xLYtD)*4RTebs^WensfsN?Yg0BcmMM&Cb*OqwcLi|xEG4lA zvs}sI()imhDV0Q*J!wNJ>uK%fD*G$|5;s_vyxB|S%e-T z%`!3=x-5x)0^pcMwl1W&F~9N|h{F1&B-A1V&Eb{Xk)@Zv`vv*ya-?F*1olLNk4_U~ zWS1=xl`P;-0OzltCYJLf7xMb!Wf*+tp}k{NXsa%TEqmn{g2m`7Z&qn0r;C6YqxHxuUmOcaK&p_6cMUr2_2n4j! zpMm9waXz4kZZC!@9k`#V7s}^bjte7eqy+gVU7om~5W#Zr@(;*XtT(&q$uZS8#19dK zjotY&x%}vHG^7oNrrHn4rrWUYrB>M0D8#4 z0x{u~7)=73)gtc08nX*8Lg5j|ekGOeNk?9%)nUJ1JCoOEBx1s8calO-nfl%Zg$Z4a zgKZ1lj)!Pol1aTwlEXA%6&IGrQ04?F6~ z93A~xS+vr*&2!zCx}6?wQIydkcWf1_dczOk#1y+sVymPtdKv)to&1)TH0P?`z!m#b ze%%=wl6qpTwOFe>7RHcxtaql6-}8L+585{u)p`5=XFj;Vq$L!B;3t4nQZ%ILH5Lie z_>X5jrhweW(yk*BAutB+hRRlaqL=CZD25f)Xx@qTUplNV?!a8bW~(sw$?5GKC}QHq?O_v$9IXe$>k(1`Eb_0bG6946K{#^_ zr0i|@dYGz01)}rQBY2`Iyi<=K7c8_+N>kzH+Z8hPf1T&arZDQBg@R%y>pkoiqrTTI zxHSaPr^z<+xOvWK4p!7)b3{sV1KwM4+|I#%Sw9Ju5y}Lq-aN$W&+fY4Wz%7|3RqJK zhq}co)FU8rHQ#Nuzx!H)AML-zO8QmZ{D^FFYn4P`@DY1jY>3muQ*~HTI{$2DH4`-? zEaRCM8h@C?d@?l51rg>e4_KO=rQgEzsSv(Bo6bkkwJrn!9WOH$+79(#kp8b)^ot=d2ceWl#$BxM`%^&j5J?23 zGu@RHiW1MH;IyV+S0SOXoPLu|(QM*6yp~R!WBO2%-1fIi6Y#X4P>$Cn8S0sz{@q^s z??kIYv=Rjq5HE0NKgah0gXVfC8BJlExL;{~w2b5KxYOmzX*NxyUhyt$Uk?Y^QPNb^ zG6#o2O)|b#Z+HDebZ=DQ!ci>~SqI$Ge^o$xON9XS5tPil?Vibvo ziT6r%Dol?&tIs61NA)e5QS^F#xP2RL)h<#k(S-7PxCGBG?BDY+us!8*0o1f}K`5)I zUP1|dQzZd7M9P$HsSU^D(S}QGYEe<3z0Cj-s!@>3(UtpbSE+(=1U#wQNXj_Zbg?1h zJFnmhI7-)7e4ut#ykmNg|2&5@|8_m>aJ1BfuN}lLS8t7&kT_@!DHV>?A*biA6qgbD zL|*o^T|_NhKn|IuniSyy5HEn6{$~b1+b_oQ`Cw74ql`Y*O2S~4z^4~WIKH&&S{-3` z6x2cA<}O88R$4?oGM%M(r8E3;zZmx?6mT(nmQ`}AhrlxadQ2Om?C~bOhC(E!Y*w)1d>8Q7A*~9)&9Z(s+E{h1V`A_8;}0Z z8-zdcaqpxnU31*&LenVOLAgb0SOI^G9)G-G5xe87^K*BFo>~d{C!FtP3lP%vU9=mrux!bquTK~txECm$D z?Qjdw{Id;7-M!fqKh~hF7%J+8rQMW~e(>B^vLC=MrG;j?L2smal!nJb*pgHZ-7edL zwu1aodh+-D;a$?T($dS~>Apku^DIz12 zbtLW#da-&mjk+*WGc=Nt(GY0lx5c-r5&=64F-%ok)@Zg}uUPSY=3K+lhN?a&jmZV1 zir}HMc=LUD06l2SNEZ7JSR@cPDbr2aliSCNT;s3gbo2d+d4p$Ty2W5ZNzWP1%I}~} z94{X^*{XWi(nIJK0bwNm6>$G7!ZN#r%iN8|F6nGUWg{v{G6*Yzxn*)Q(dkB1Z`Eagxsn3xItGKVt#x5unXKq) z6Cho?%gThP%|J1Zh`^tdvxta(nf>U)d{c0ryaR2>CmM-JlZp@%A?dfFl~j2ou0wG9 z*AK93oKoIv_vV1((bCn3O%)N;w3f+_-Iw*O{albRzb2(qw7b$P*Oqg)z=3cWGFNva zpHsge7IX}#h<`L?+t!^}8lVR=eH!O-_=$kZyE6(g$@}N^R>EQqYJR^*fOVpju@&t3 zU1Nnt4+c(JV=IRHXCeljN>7#6w7;}p8=Oz~9fINaIS59E9er#lgn=9RwKX3@KRUUO zkMA`2Fc$w-S=wKn1*@&>kYoZ(Cv_P3JzD7qTth?Wk`12UOfDBb$!sb!PKn?18MGUP zD+&RDHoLgoxxM?dXhqe}X9=Y85Ds**-7L!O4y^ctkh2;~K%l_#W-(IMCWZr=3Qii)C=~YoDvF_%(6Ge7Pt=ekxm*+D#NDI0Khq6L9JVuw zMiv<=RE9Y6YH_4n)npHkWfuKT2YjFCdmXN%%h9Fk%y%CLtR3o~65E46ucdMczq)^M zxNWO!Qp0T%hmeAGYs1S5!5ccxYU*jPG`E!~S(MgD5>ut(92vh1h?ggYk_p<2OAGGn zAG^Xl=ij`Fi^yKfeV*oziS8lT8E%@sR0fg-h6Wrj$8xnXEMQJd>Z<27NKYNds{2AL zLe<~(O*hf%aw!FnI>jP&*d2-3Xzw&v9ycu`75cXiRpLG(mgU5(f-fDIY&fCnA*y$@ z)vNP)7ye{iEs_dxqGcfExod#9-|b2!(3t(1as_QoScV5(@(^NZZ2rTDxpG0gS&HrW z+o942b^bQhY0_sX7fo06gN!)u>G4zxH;l286~h-r{hNbh50W}EMpXg?UUInm5%KEE zfEzq&3u{BZ+PEoMMnS2nnsrH!>nK1$C*YQa&GSGWjoG6l_&XY3+mWAYvePUU;Gg0M z?L>Hzdi=a{S$DnW68uh&9|QB!{uZ)I6P9!J{#Ckq!$>oJ=YYWQdC2Rtu#2 zZjHAGzy=g=b{M|bV^3k1jzqrbd?_74!eR^%KPyNWF>a=#UJ2+%b}?8>Zo6oPE|QC$ zAR1c57ME2(W`Mf-Zd1>8ip1R5R@z40`=bE<5ikw9pUdNs>;N_hbWwb~HPn;$QeY&^ zP{tTz9$zXt_Hf?S&`?-2JFwrfnbFL>_oJB6ZbD#iNX_JYl}QKkuM; zcyMG{ek6G>QU}6zzGKJBI~P_OcC02tyD=j*|tl<)kCbHQJe}pX|?V( zVa}`q6=376TV!*0r4xGEY{NV&xIQijSb2g-W%3p$9i#c?w4vcsh{Pkkjd|)xkefxd z8e07@@Lv<2pd0xTO1P{l!smx3(SZQR(Y+|rQ;FVLE$=MB8q0tIsy%D`qZB^c9L%7>728tcxu^tykSn;_XOKyig zn%;T?gSFkpoYQmP`=UEev$Xb;G>k;k4L8!!cd@qTX!Pd7`nM;i2ZS$ax_fE?dS0qsZI{|C7~1&T=)fz5W%8c(G9n>Jw`2=^x3z^#G!;ghq&w7dsUOwVtL$9&7vK z`UZYIdD&w zoAqwsJ`bc)n69-aZSHO18P-r-+QfP`eP3Bm_}SkAYB~)s58V*I_c!>2RXs>p7;?qk z8T3~XUpsjD)*0uc{u1KrQNjE)W-oi_YE^xUVWuF5*S}6OY3}VTY|~~;ckJ~bGy7%V zcb2>l&{GfQ8$L^PdHUwPB?C$Y)GQ;(%_Gs>G7jg2XqNWFx_b|F7(X61I|m*5Co9}D zl@cehpL=0W4=n5=^X8asU&?pBR!2|r1^Q`(&p_5dRQQCnr-`K)e@}SfGHO`+$?gMz z3r#9eyw(=^nTOBH3PaNLl27$VvxAX@8-%;T1>fLn}pQBJSp z`8^!LV2VjwCHRF#4N=f>_N%esreg9;GNcQJMcTR|eKyku8ie=d8z3AyI9a|EV!({C z8!6tXH)~&Z)*U`j0;?*>?Tp)1N11lb>9n9hV|Z+c4GzcnduF)mWyx$YY|`3?@k=vP zM>Ul{r_k|-SISvpAb``(S88ijb5Q2!Ptr|$_{?9)!pq;aS46S?;|Pb3@RyEsL<$IR zQF~0T8H>7b#ZLEtyBqm?iW*^D?9B>Eo=RX{PZy&O;|gp!yQc<0~$P2;3$;5g%K zS0so@XVyYf8*SQprjz&yl#m`are*LwV4z6LHLu$D!Wps0l%j)|pe;!*yO&SJd$+A# z((ONjCYJzyzCsrEPdZ+DqbHvrvr1RQEc{w6Qrj&!x)1Zt_?7sMWa6m9>8GO%!X#_G z5p>z28lj|8gjcvVaJNX)io-~!t_a}R^mR87URUAia~x+OEkLk%$ozN5T6eHnPR8Nx zz(^rB8;NCT!1w#+p?(eCC1ub==vPVR; zQqQNKO@N#bezY&lpGT^{+fjH4gZ4mh3$|Yoa;*ULSm?51XcyJ24poAXYQ@%{daog4 zwxwfV5rCG<+u2#goP!ucV z>KwoXPOA}n0352iXZsYx8U6aT>fZe5=NW1r;oo<+tZBh_)+7F*$1~-mLL7*p1x*_Z z@xVW5cpp??p?+d5h;R!Eh0(ksdRl+?YwG<~N3?GX%&_q(joc%>^_vTm3@`9%eLR61 zft|P*&4uLjbpC#5W?4<@ti$s02hr03e0&C>HJ!)XH& zF6PS2;mg@-K6_p}9-NBd@BGz_+b>b#%G|q8JOfXG*wE<-?yh~FjsD`sFhpIAx!=%# z?$1J*aEt`zY~q#BQy3}^UR@%{&a08MsE`bLOBdSc0eTVyh%}LcQ*($+0*R(DG**7OjjM_bTFWN_lzLu*tFetovBys);n1|O7 z0iQ;!w$P|ngs2~5Q_3(~Jh(#8D~jTw`u#IMmey}Zhc_uB*=y|G6!aazi=g=i6;ULb z^_;g{h4T|%&U{z_oSd9V$WX39yvfR4+XS-TASWt0Mn+d+NNzEuSD7pZZ)}p+v&@_A z_kC2A!^r3C?Vx^b$XmaZ4ajht4`C@n3|Ikn6+om!psz`3x2*bq-5hx~16@n>%h!+D zlCuj+zU|)!R#72dV~dj5Fe%{v>UIm|2Og3RHmhTSwHV|~w9iS)_^wCm%YB~yZ7j=L zd_NEbj1acC%CfqiN-}=JQ?iUpDNo{OFpSCA7&sbg;n^3zhs)-G8huO$i=9M+@gDE} zAg`JJ049484YbbIX72D+TpyW*(Uhp zV=q@bUF5!s_H<=k-UPv>`7P|B&!HtcMTC*N09 z1ukRWqrzut`x-Y2phr*H$5Q)Ah1I$X-6O`M5D(?=gJ~gT02W>xndGY0AR;iH^n4Vp z&XBo-{7TKP^ZUNs-)?icxvh2xiq-%O1H?T5M}Q^iMiW9=*@qNo<_m{141~0LIIy77 z_GH{CVkhf1%ujc!>h!<0cqnpwSEA^4A|zdN321*8MsnrZ3Dyb>*Z2F-vcq$d;tT@i z)sSI$M)l!}%d&Orm2f^{U^4b;?d?xanO_vbIK(|48?OdpYLj_iUg56+Gegm(vOhyi zfiv6;Ab@zi%=jHS;>Y)ho1m|IB=Ng)6(wuM-nD#eX~oPYXKHZn-)61rH#0G0N8bm+jjMW9)Q2 zxZRw08Ru9KGjQiv;--(dU0(Or+DuLnxbsJ8s^uVvQPF7R75n1>lK(y>4<7 zgaU9clMA+-B=i9TJ}3Yum$7(-bpt15HtM%it8A>6j6RvTDg|yF5Ln*?@|@;Aq7+aP zI7+5wS9y$fPvZUhoZVM@WWa3J_8Q&bSPQk1~ryb{V+j2qF3mfhMSa56_Cbov6dQQ1Gz&|Mt>-+uJ z>oNaTr23~&dYF|Tk+(#9?t#m3BZFFGXv$*}F7Ta@h-YiGzB?4R_Ckchf>o{DZ(P*o ztVJPYbR2!x%k*`#p^SBe8fsg`=&w}R_6^U!qhBR5k`f>3t^KvXTxhlStCU1jGKAk* zj>DM#6>p9KGY6`+lfM@B&=IoWXDkoUv#&({*#Psz{@ld}A|^$z%Jn!F?j6mV`i3iA zQA*^*LC6wClcK!jH;EdvE*;72odB#+nf;5km;ubC#2~bQ^Dn^9Vn$ZW(V44AFo+5N zeX!spJd7Y(2)U94Es{L0^f#-T!1RK8V*N~O6@KFzh|)yU_@k4$d!J2@%7XCDElPp_ zkpnmV<8{u5*4$G&(`<<`%u8%No5?F`51UERa$sC-wpr=r!=wQTu+Gj!NP zn?M^d65UZUEAIgQ(MUfT)fSctXm|E4_sA~VYIm#;uLVMrdF!LnQoY+jqdC0G&O}0z zi3WM$JIIb!2t9)EDawooC76w{eAfM|_r3mE)FR4H7enCVHEb55EbY4Kac!Zjl>S5(JPe-t~#D_^c!Lxc}(k! zT;$g6Z$HyC?(Mu(n!;72Y(p*#!7OCzQ?a1HV#M7=0!uFIU`q2OU$wd$mgjkPq;MXG zIjMtlYw?;zkvNXFbi#85$~*%&(DyX5=7{~CMW6dup1&=&H=->ZNxob<(yu02WpCup zbc4&FJ&^ElAOrMzlMuZsjy$ps*eaI=8#CYXHs1XDzW1?pIx^X%MJnTtidFJD980JU zgrR?cl`4%&g%qw$=;Z?EX$|~!%C1CR=C*}|rbjjZv-E&N-r~f2RI^6l8Gazq1<=!P zBUr{2sH+gwgOcl6KFl_K#xh_N)8v2sQ0qRL`z1YwwhT8xo0p<2Z4Y3_q&tyyOf<6x}&hOrIb7AA)UV-oZg!Np^>o9WrGt9py15G9Jcn>BRm)Ckm z@ix(4YRbu__UO_sqm>~dR}R@80?HZWPq$M1P@lTKl&kXt$q1peP~YDRz;yhd8QdT6 zViPTi@aF!-qKFAb4*khFPL(rwIkKKD*TPxDyFLin8-OU2|=PgUjCY)O9piYU*QofCfjF)kK9 zt1d48XEQ+>*AMB^l}lo)_yzBUi=UCtp&#{ROgsiT7G27p*p+pCH2}|IZZ`^d)O4a` zPMSK9P(pQqyJ2fKAmC+>AC0-7j7r_R`!6vZ#mRZ+>aSAe{ee_2n7~W;IW-PqvLsgE z*cJgk3g8r;J7$b3VcnsM*&?Wfukcqc6f(7GnIpckjzz7&RaBcVU|l`+n#4-HTtky? zigs}_msuT@(7jKwSXArhJUyxd?4pQ+KZ;K!=aruxxMmR=>xXfx489)0yQOaUILfJ8 z?f3sn4)uvlk+Y6-BpIr-D->!jD@Yt;m6+D|Z&_I2&^H30XW8#kx)!I&z+x-G1>X`M zp=;ng&-%bU@LB{SvGfb}r)w=dXQ=0J>gmt5*Ij09Xp|=9%1syr9@P-!dHs3D{#pQs zzM_*eEr9|8hG^5+&#bpMubS5PpdE(xZtZ{;q(qt1*6z%!ZSy28Qj*(lKJ#ph}iMkOXk-S z=O=3&zm!;4dLp?JJhYN>OjfjBBnJ#*o{0VLedCVkzxamq&4^Y^71qQ^#xxu`$~T&Y zbfr_Ih=GN+zDhxFm(0~!kU7`u-mUG9eH^YeLH6N}!Z;I=A$2lPZZFjUyC!#)7a*-f z+0(}qkH`o@$I4bgEghU^OsmHkaf#bck5D}k%YYFasEe)j49K6Cn`>%O;+CRfpQ({! zJhm1|6u#%Pa-vfil+77QD+&aqZm=-2Ps_i(l(uPO=ci&<`JU}6wGZP>^cS_w8Awg= z1jecGEae%V-F@OjxZd2AN$5VWZGawTQlk5RqifI{(p5AVfu%qP z#H&uG<)5qqb1XXvRd$Nb=6{vfQ{7cuMU7M0W+h6OuSwbJvybw5+G_&cE3A@|DssyB zjEih@&%EZJVUlzGFJeys&LlbUn{yJp?DcEw^;A5OiW?7}X_^#2pQOs4E^vBLnl9wV z$lo^PN_RwwN(dr5A2&ED1X_zLAQRb+SSm<(JYE2fm}>dp04aa3SUC8D0poCp)fxzm zP_FE+*!QTJIq2oD%2l(jXS4u3aQ5UPTl?A%FJo_`>$KgeShyj4LFP4t?|tG&`k^fcAhmlJD&i_5B#ba!2;bVw081dS{Ks~!Vv^NS@|GaFs9d1Ak z+mML9z39uZ0ae~^iOFCoKx8(c-iz{mUdb}?XHTCI@1}Lz#tJ(rJCIKk)K4sou3BsG zZWd>FFmfs51FOa_93{9n>rl5INl3c%4sS=7q#|e0rADdJj&v0e_qD0V;`a;%#-AEU z3Y!p4Xj7|#i55ewoiGoq?WdKeUL7n6XG0MQ*!P?}zG?GpJBKEcKt=C}2`D~4z`+z_ zW(5G8d{g3rJ2<03yCjROI~DY|y_B`#uK|DIG6PK%=VQHn2{^6t=YVVRbkG#*NOx`(2iAxIA#| zYwDR8W=%O(@daMT)Vz84>6N`+_(7*%k*^1N7(?Ci1kl6pO$prtj3}fP4l)I{GMNvw0n=dbeBWsiTC8|kVZiH8il)mv@kH(=d z3W2Ou)?9iYvEao(ROCzGf2m?{v5i#{bKge;Tb(pogy$x-p|x>cfBo67RvT>{kD0w--c&-v@f4b zJCO_kOKhTRvK{G~Jgjrw_+@mKm+%Tldk|Dx1{OWmmjiEFLP=Q&v(^jr5&gr5ekFMw z^$Ww=;GEq31mf2scFQchV*Jh{n(%7t%-uKvYqx_6^9lMTW{M(cf(?0bC_J)jk>BW0 zqj+lI&!~MYVdcjek)UQ=f#Uq9fCDz4pUZ;l{g^JN{R^N+Z}vC$z;OyU&Ecuk=G{Q( z&FF>k$+2ny8{{aYO(d?ez96+VKAIWv-S3t$qe7~EA36#Hrjb2GOt;s&0tjjBK0v$# zZ}aB{2Udur`{rWMRrJSIo;fM$q>Q>6v6J!In(G;ea0{V;e#T?-%x9%?+t=zFWXiX7 zMf|wOn|8R0kD}#o1aK}@BbOUqDz8ln&`(Kdkx}$Omiwv_qw*mMP3T%>GsxGPzNlxo zN;@yjXZH>+w`owuq%?g?18we4h;XG9LHob!iK1op1`iNptN!cEH2Z-u?LTH5wp0oQ zW3RWx>5O3Q81s|G%$GgTWUScODUxI~|Xixz7a%jShU-h8cIC=EwuZY6&<_cdZg>oa;c$SArKw_Rg-BtwfJ z@H~1()I)nM=jU8)q0h>(HJku*QUN&jl!b@aQDLSb99bS6eFym}^Z~{}98cJ93#6+a zr;J`m(qR8)%5hK+o#NCRN?b{#!FE@^YP37!g=FfWZRT260CB$v_7FpaND9f25RFIK zjZTMbCd7BuslVKwOVFL4Q~5-6Px@MUp*)#6i_WehER!;go@ZqhXW*n+B$v~|+rno7 z=n1w3(~MC^U4)mFcCXYZE?+gLpqa2wHr7KZKy1CT{(7ePOje67YL zauawH1*!9C%=O}ZaD@WU<5Q@0(fj2I^iCv?Ua6QFF?8@u`mip)C>$2F{wcp1H`J+l ztz;V`G;{J_W2_&3>^94dsRw%5&hsQoosEegRj z^FNN@)gpy=kq130Sa2AR@0RB+C60A{5g#IKC-9X;9h1X91b6^?rt+`cT|9LUb?=bk z&MEJ1C9ES(bJvN!`dIq<*yjn@)bP_e;DI0xGC5e*`O?1J2N}^iX6KhUZo|)okSEZl zzB$x~8y{CwbWv@uFTlv6D-U)tAs9t5m5P?)+p);n-fVUJUKXE7=XOXe+T+LwTt)17 zu}kggnKODEwu{ojS_R+#aS#?)uw$Gk1QMS(1{*Nr&<<6R-?G^vg-aAEaAH!8rlX3EeMSsSKZVN5T$CGh)Qtj zvEqdl6J7f%SUi+;DYPJ?Usgb?i1Q9-YL}s)|!|>t*Xdf2A+0SV-SEj ziJ9DdrfTdYjsYChy$`;0d)?nh>rb+{`(ii91e9ZB$?2AKnRnIFi@={%(U?Q@7ZYTu zu3oW+#Yxy1hL`+9hF=97?FW4+FaHym$C7m2w= z$^495jUp%M69LSCROzZH;COceVQqP5Wh$auQU={l_n{hUA@3S1RfOxGi z7g;}f(+iXteDYWg*9Q1fK?t{E?1&k$>ZTsjYT`g$_-5E7mx=qR>_p}%K6*p)HuMHO zo_t1e?V$E(P_KTU=Z3LMgk0j66^{BSELZ>1&+3W`h{`;m`xoYKjz{F+>_PITtC|R3~)oJzqC(;lmEQIZP-Ir*7H@X>kJLUY3>2 zhvKh^eChKqvQTOR^o{M|= z0HMxj|b|`WPDdm?%fc323_Y)>INe}Ix<5AfW+Qvi+{MWf$ z_=$=6=q2-q@e^V38{-Hj5=sI7aWd1W{TIwk$@m7nagF^rnPT%4XPHwo_?{9x(PH1I z{6g1Z0&@9iE*cN_-F2AsrXFYazc!A<+~mvwk%@%Vr?!zd$t;m$#c>$ zp}vV#trN;iF$7sz1g2zHTRx^dhij#7zyqQ11Ke{kusoWBZ{IBbNXNWw4>q6tdtZX} z`4Imf=jVf9svJgxoyYE}A2Om8RK_`@JZ8SC&O`EFm|)P^LjD1WO?cbN%mVbB+gmm_ zSeRCiqe-*-&6UINd*m13FNb9QnqjVY9V};38sH+oIla(o*l$=Oi0J(v! zzcX-e$1SAo_Mq`=u4Bdd_VXb2Y<5_PO}RD%Px;L)k&VdfLz#W5S=*4;+n5B1%j1S_1=`Zuvj z+MtNFyuDb)=3yOs zlhh?0FW-6Zaz=uXqr53=xci2d6AjJ%m}!;?wI6m4;!1g?!+9gr#ZR+qkKF=|Ej_lT z4PMnL`5h`2g?Np3aq%H_U#^REqyW1Po5JeIhjb?Sl5D5k)LskmpVx@WGPuRaBpbJ{ zM8K&6tsjOCuid<+$MQm{Rf+>YYvS+MwKXH=(*kjV$}$Ce035LXBVyg@zf#zK=g~zP zBA%Xm2j_vLAM{k!N8l}==oQ+mAfvH5FytTD_pfO8ywSkC)htbMIM_K|njM1y4wBz> zwfANYQ9D$`bM#8hn3=<$yEH2Wb++o3Kh=X#C_@;7&X=$JxNhc1dS)qz>eqQ)5)(Wd zcsPO9UVc$dhWv?ycZ&c#7p~&e+>RJ)$VC_6msa?B@I4@XO?~y8QZ*NT!1rkbb9M#^m+C1XAE|SnvU!0!yy_uC`w531p8XFG_Z+$5?cYhBvp%9z z3O6aiWTj?}6o6CTIkdN<#KW=B{k$2QvP4}~{M&eL%a{LJ6ccKBz))y@`*D(aUUm&! zX9gU6CyjAb4Ok=PC*0^Dn7W^qPBYc_caXTpGGVa24L>8`qO9fIPvbPa%grm-3#?OoW;`^rA0j*AM3`@Y2hi|GUtNij zz`yfy`5xA#TM5{{3R7yQMV=Efu3X}mS2<<4%RGk(bcBetv9Q$2``Ngtrnu*m9((Xn zrN!TWop>2Xuv|v}ULd81Y5~OST`=AKx-SqG#YrhSZ1ai}BSNj|12kDRn05%}%ONz_ zv~V+ei`XaZSA;1V>YScy)s-eNT=Yv9b|;qP8~-T43cKZjMj zn3U^SdYo#K4wB)&HwXv5H<3lo2wH$m{9#pPxdoEAt_9HP{LS zHTQ^Zp`+<7Tlubw1R~)vIH<@e;0{{QTl|r4Q)N$*$CoK?T{+P7Hmr5ShN!+;7iCs2 zKNMn|^o4rxb)-U_zug0=@w3B5gx&Uq=@AgG&I=YjO(y$!mgW?q$pL@i3-XUcsKjjW zc2DOR(#WYtJe<%ikC>j#)x&wbUle9Xj0VfaxXlDMQeqMR2ngd^zwbeSzB%E4u%WJB z*g)t#R;WIZ0Sh(S{~6vB+z>mHd5-}pDxIj@ck+giZ9<>sVsHeNLzNQGc1#0CA(~nc z`{+9ON!=A;RG0zR&VABRS6}A@~k~_ zL{-DR-AxLwasktz5p>|Ckcg&Ei#Iqgn%nSRifoILCpafTh>;OTs4V0u-6M9qDcO+u zef~~}Mq3<>CK^lhum=4>oPLs^d^3VvNQ3-lkRvwE(-4o-M)f9q01tM;lqvr~GTYYW z=zRT0d@V?RcCy^+vL(?Npod8CNE#obromY`3+-Y~Ok*6+l62-BY$MvkhpEMWNPhgE zU$V*>#IN2Ym|LMzLEjGi&!_9{51fsUn?0s_eU34}b6Q8siL^3&n7y}#;;zmU`9Sxf zF@$+*R!GfiDb`Vb>!y!Eeo{EA{>7?46RGtI0k0We@LA7d0rHi<+tQg4o*oL|TrhR0 zG>pT3tT^cS%I2k1LM(Bv?vts)M@lJxJ$7rMk<*|a$XO-Ho=^Ajy@k5T9qlabYE7K~ z%VNK$ChihC|L%+B$*hfz@>%VZVuJN|irkm=yhGN+!n!}slB+r2tzg26IDRuG!Nbb+ zwAjWP@}axl(}@S6d|ub@>NeX%{XT~Bd*3)Zx0IDLd~@H5CN)0`iP}t-D;ctST(HV@ z3u4aq7vvUl8g6rMkqSNno3qEFYIefW6_di(1$8cs;kEu>KbhgIXSy%Yh26tDbd#qaQ6&RPtw^tw7>R#`e=8edWBq; zfUSS$j~(3bPjQahuhL_x`i@h~qC*|qN_R+Y=bv@t@K|>iK`f-bHk^x?xP;3N78vAk zy5ZXE6vz->mEb@tuMbGD8FvBnpv}hlP%W{k4`UZ6-!b9+ckb10F%p23Rn-G`quwE{ z34My=1p#{r7U5F*-^KQ1e|=nWwEjvEu7t4d2lNo+!a4xrMN@k)UdvERBl$NVF!$e8 zd&y=E?#%2W!RjBC7|o{#3EepR1M7cOHyiPR zN4XIm#6UE%i{}+jHrhX2Sb?ngQ}dy%J~mHw0D7tiqt9k@_Z7mdXY~*xqlYV;|G}M# z>5cw6b#_eJ*A42gL*v24Fj22$OyVrr&)7p4`@B9Io&^EXx+AH|26yoX=&}7X%$5KC zNx9MotmEwqLq_la!UxLsYGfn*f6Sdzdu7klhLcQe+xEn^HQ~gzZQC{`ww+9D+nCt4 z{r(Pi-p}wq>Z5&MPgV8WYjt;ZmB7nrJa$ExjX>FlmNFAuxS=L)wT;O(SjuO4!a1?t zF)I^dF!knleOhZ}m3o#HQjKRkp2yZhPMfj;|9j1g0qf$wo*elgPn+D)T-0qiOBYFz z>Mmks6s|m<+EK6#G*kRe?OOl#F*eEodLD^sJ)%#T=WA9>ZR1RqlFUj?L?(AHmK{bD zwq<})IUX>I8D@)rAT|ytza2c9&Z0Q76B{W7be&u@x0xB@$z}umgQjuxnO|+km{F>d z`okPP8_9!uIGpRM;-*OjGD^nFB1kBdU7}B(u<535#$rYP%xfgh9RuF5rtC>_*1m7zDoybe5HGY-Psq;sq4VPSiC(ecZrYJ~U%X&L?1j|Ta!m#*N|@ZM>P;<4HW zLE?|MkpEWT1sOg}E8SyP20#xqUXXDrZRt{;qBC4AxMGDN+8)VabK0nYUdXYf15n#R zw2@Iiohr+x-Hgo?*|OPLh^!2G-tP;&hFR8_b<)}Ib@vAOhHc9ag?&}AxxYpL}1I`*N|x|f)Qk$FCD7q4kT;%T{es&J`u%MgJnvl8dg7mpT!eW zY(%#&$&IIV$w<4-jddBdVyAsqUF{nuE-5*xXccmVE z^b>4CYb;u^X0+aRQ%AlApR9KwW*wEYA$TOM^IJ=+lN`rrbpQ+e&MVN)5rn{rT#%Y! z)@F<1G$2+c&HlGcym@B>;=YchZ|MT(q0!$zmhCZ&?DSD^$Ei+IDi|b=E2r6a zT50t;-|%q=3ec)lsaS^mLP7*zkw&Rd3vyBHyVdZ1Fbj|<`!jh&!DIQJJWqxxF97f} zcAnXpyIw+g$QAJ10}LqwJpP&-WXv5>fJx3w7ACH8Q~1JtLDDDQm?8z(Hm>0gTtusF zITbj!wT}2HglIL2Ex?IcSD6xU3OVX`&9cYB+MK0hWW)OKFvBKeA98XD!`JmWm&M|5xAX^F3xGT_ar-t z;2^Tna90UZ!BozI(~!`LgT3O~rCf201OF2un2X=s$Yw{U7}LQ+ZZ?Ls%w?5)uynjvUd%E@g{?e+(^j8jQF5c}CjEVH*@Q|CVzh&gXNG*WqN=-4kaGZDEbl)jjDFg{)0rLGlCM!#_#boU$?Rp_Si{VXTA|$!_i?5tU z`Q6r;WZm38Y35SW)aW+(K=*uP&xU@5K37juvSWd6@tsFmpdo*G;(NW5-4ty#^ktYr?<;`kCzgVqbT;-7ua2GVre_JdNn!u zVD2|QdBFTB2x?`+f+omKsXCx`jIR>2D?;Al74+(?Mm%`yktiXxH~A$j!Tv>2{%2{2 z*|sVr4yDFhsx(}BlHwr1Rh5jM3cz9fp`|~^Q0Inr#q&y5TlPRjllOU$)oMC5rb4g_ zp{Z+K-%|ba@`k#>-GBv^Hu2?+ zipsukzWAVYh4>U<*eWGgUsxXk93~+RT_&S0(9^T1K{((CzV-#WY)oC?8*p!mDDE6! z{}@t44|xbNh8)Q6ioz@h?_o4>Si`{&62P)pbdsK)7&8Nwf8=-F1Ms2~MO5j!kWshUbB^a1@IOD&Sr2sdc%Pnm>GK!4Hhe>t zK9Xt_I{)2iE!?r#*=ZHp@k>o#{HBd_r{Xly_pAokHTj2wlH$_P8@1%DSGRo(8zP-w2ut+_q_I;~5gUC=8tVec18v-Ts{(?FdZf6nb6 z7Di!F$3mH_?{m)%Gc0Dx;;e&|W-;>t%WnRfTl(kaI*aqNMi_EAKB1Y%aS^=#8h$5_ zCM(1c>pzo27glP+3PyHSbwXy{0$1{X_b&kPV~v%1Jnz14c8}MQdMyi^7STK($B)XS zW;s*q6~3QzEUaqAxP-sYdf{@wX_9@9{@k8DBrOt>qyCJc9lme`#H+@5a@(5YjHR~U zZ^DVgi-U|iOyRFfr8BsMg5}$|6~Ymedx(mf)-ezHAC2seB7MC;;vsCw7Rcc>c}n$p zRx=F%XZ^gTup9IBjn~WT6_Wsc&59tOw2FHWpKt3Ti=ruO1$kMk8WEQ}wuH_#A-U)%>+-aFRJVdv-er5jAtSdz2XLJTTmjM^O z9I?|db1az<|NGadW+Hbr7Yw-wgzqQ#iqSb|;By4tcb*=a5?-?!6m#l!Jg+~+1;HN@ zmZ2HMO6qsYRBllH8V0Eqkl>5nCIf`oYjkceE!EY9krs---hDr+e7l%qTt%3`0deP4 z!ZTv+WDg#J8)(=F==SH;%MxH9#ULb46WhfF|JS5}kd=IWBXseA#2BBr@DGTyUg4Q$ z>jU@Y*jea!Rv}FRVArd3Y}l%xyD!zKrFd}4%fL2Ia7;9?Bmc?s2aCRKsTokcEj43p zm=_Ud{G+mI1^19|d>DpI3pl3$*@HgXOW7KLvw}5iok%?BBG7<)MzdjWOw-8B6%b`J z^u8hd-jz*RqX|iK;oQbeXny134;q5EX7-7_y8UMGggZ@6Sc9?Ttp(ud!5u9MY4RE+S8)dBaM>5tA-pYQ)e zjr$GjcWMz-eH|^9;NAGF*YMy(E`O@5>Oy-lEc-JH29db+h)HdIvZfd5mi6jeXL zD*Qf0XywN-7zMj|0Eb%XHt1M5ZBv&i>#6uoDzDoNv)#-f?s2TRQ8%D|G(u_8MQrjy z+lnoPSxzq{9o1XNGujw~db7k7%-+Oe@c0J6kt*-g2LA_}*JV`&cIdPcj{Hx5X0cnr zdqGg2-OAWo)KSYG8+vhyjfTyaj~~Fhouv69XLyYGa_z&w@LfA|caL;V5BT-c%GU`6qyxeM-M^@r-8b;W4fCQn z3`|V82;cz1_CGU>!OuOaglC+a1IwEI%Fn%k9tb>8+tn!A;n5ZiF^K9is7@h7G3*B0 zjBF&71c`Y>olO7Lk!+09r6%dh0OfhP@R8!X%2DTf_gBcRRr-~KG$>!wo+;#bxSbI4#3 zT_csw;qg>nE515lB>h0!qNhM$<-CRWVn3cggk4{!+^1z(wD40;tEjel!w7ZO_#UV_ z5oh86&(by^aFa!qD32~M6e_x9i2RqGEErK5h!Xvj!KAj5d<>zn^o-k6gSuzRi#5p2 z5dJXvhMo#rFI0}a4R2!p>HeL6c$?-U>yGdT_n=~#!9mX#s}@h)tD#eljIk8-Y^En*!TY!r6!w2%S6l+aGbxR!!e}6 zyRj0mZH3cthhJ6>Ndhx*K;nU-Q3wz&mW$JwWzaIEVAG9?NK1{#0I2hn>KqO_H(>r` zA9>0DjQ4XQ$j9f($0NIaQ5}6-D<^A3j`@A>ZeHyES$1MAZ4Zd@P{tLQE?X_d+6E$>;m;Vk~!u* z8aOC{oAckkSgqUZ*SEFH0BsrbLz=43{%M8?dJ_?i3`!3!6pRcb0k`8;L@%c|j_r9; znolQZVVDcx*c^LvW&MFbGpLbc1@+oa1={lyx|7faMaj+XWS!H^Sn1abB`9#_kCo7# z`5LUow5jpdmzH>&Ky7{P-V9>6`_5~s9o5`mm-~TRWKJ7ab3qeB`i@-;aTjiiYwS6w zJtGiY%J+;V9FOWSV?&$r3#CRi95HfLAr)-W1L8;sPrC~8`@KU@B?iYTQYeK4A!eeu z%JAQ>l7eq4$|U)q=YGpe`Z=He<~z6Q9VI|j@)axOtm4+S9!cgm$X^vqPPD1^$$2J zz!iz9+wC*j-K=hD>jmyU7>9`l_1)7!WmMM5-Nld zn<5lbV#4df?vgQc5X~mn^jImLlT?aefg@oULk@_0Q`-nwDUZnx^ELEXIY@W9kQZLL zFD&kx_e3XBt#c(9mflLb<9c4VLlt%-JV| z1C5;X?m!S~EKlv6O0W@QXC`dFcgLVc8K-iQx1ad{5@oG^m^| z;rn@yL=lr3fWxfZA6K#eLmqbC8)hPEBYUbF7)f2#g3yYjUOkn{>;b8{CZ{6{w68&s zD8pH!%*pU@>AtQrmdRB0U5DXq&x4HaYO2LF^)8NpaGlAqZbkB38c7u0IQwL(INO1~ zVR8z07J|7>f}^w#2cdslgM1B^H?A09{u=xpUu#10eGKZeD@t8DGx^tcngQ| z#{LRSzQ9EGtSCyw+lZz=Pg{I{>&8=4xvgnr5zK^$0gECihz$F6-F_>lS~CdX=To2# z;zoURB~?|81K8{a-uJh{_td_BxUo`mbu_>;ctoSp_UfwDisASY#{{Pqz&_f)$YzBk z?vRcNZX+t4&#nRUFw@?pPIqDgXZO=Bv&slnE%c_BiZ$xe|6i#~XB8x#^VsnXrnB;D zu^rgRtIaQ*ns(oNpB~XZ{5QhcPjG}eiZ*~AX(s0>2feF%gUZA*jK;AT^?NCLVV#F&9HiovN zvE|$t0d^g1F$~fAIt!7Q^6`tjEu98N;$nA2A|IQenpOPF(%4HTvtl(n{L{_cvYU=@ z&jT+FofiY?d+<35qO_1*>`mwi&=VUkFUX`oNn-RcDah)n860SDcuo5F$$Dk~ys2Q8 z)d*|N=9kldo0JQ*e$6^XK>Wv5lK9!brg2|34Xe}{Mf1% zj1pz+d!RtfiC0i5J6yu;DVzf)f<=i5joQ2ed z1{K0NV$zb6=KHG%d_RDb@Rn85vh%9m8*&0wS97(x39pP+M1Z;jZD(O)d2* zr+vI8|8i`(_5ph2wF78#8GlB4Eb+p`6cp+rz!PtLopG4oUjc^KzIx$V~dUfGJPuVdI0XPkhzvbPMA&HlFLh={J$m-+piFj3bh zo`mIWcBE{|tjx6Pn2R1qdLW#Q&}p#O;XL!rq0dr8qUM(Ku2O`-#pfMYBJGqml!|?8 zQNHhYhvh!6QE;%QV%p8GZxu-(WLnc>5X*Pm$#$4Rf-L8zFxwQ)Qi7w97j4i)SrAyY zA?xeN{^IASPzLp%#%H-34FXK$J{9)Z0C^|V(RWX1?W3EDn)$YtHAZ((H)2km;8DMV zjwN;w@BL&2FQKDP#e1J9V?#}~xo8ME**Qdf>9YXY@4V$dEHL_t>%afqGK<9t+c>3& zmCMsI#}>`?=lG_9|2q9rLQ|a3AO}+p+(Pfq^`@yAr)N8@gOV$GxZla|ht8RH1L)au zQGsaQ_L%Ad>!KZj){lcxKpQT2cKw(y>SUHcXsX9z_F1~kMv6) z*xhXoxw|pcW>G!A`)E}D1I=QLb}FTDAWN+L*rUXKC(kCU3=$@pc_w6zni~HpeUZ~1 zfuC>w;+*?Vho`G)7o+1lI4@k#QM(zNG&)uY&{MHLhym0Gy>h2H?$uA&aN3Na ztri_ic64*46dmzI#J_`Yn)SPCfY+|#$YuDe0gII4HhZjKv4%jp$@Kev%9c$j^V@5d zLD068&nmMI33f)I3=7EYj^EC1Gq8HJ%e>V2+`rZK&vmA~vNOYqI7_#J=wUW;ot&@7 z`PYVu@i@RdOiRl}9{dqA{p>->5rh;+m`{k#9_+Ia3;kaez!7>HjWDMuJzSh1QSu~5 zV0uF@1JZz)*bR?N0JKS9D#T{N-Msxfwq}tFBbIl!M5W;m^jB7ygin>v}@nLPJPT9LwXcKsWI;T-H30;;rUGY}Ar++?&b3uK~g& z#ys25(@l(fm4+8FGs>G50LSi9>8tz$dfzd3o}g)^ne|7wLfeV-dBQKR`)0w(WY|?5 z_zeu41j|>f4(Z^KDEo^^_$onROQ$~2jy_|MDCQ)2M9LSRCSTWH> z**12=%_~()ynw8n?@pwoxINfO%wrK~tw21^Da7_UV(5IzMhBY+(1V-wq;-ZLvrPbR zy*2PvFw<_VUNUrgz%mhMMkPRlq_CDmpd!fe&=Q!26-0QjFZc7K=hvlZcd)pYf}7dj z{!s=1hgnMAJN)6W%3NK+6)v!yk_Q{MM4aNphD(_pyQKVig7f0?Dff3&9EjhrMWr4& za&YK1u9N=gQjBT(iLM6xt3JT4I@tonL1f_VgN@y=7IT#BI5-X zT{XZf=%f0n-jIhn))_cJB`!+2ST8+&nXrF|xNz*Y=Zoc+9@^IFrze{o-lcx8!$Y~K5e)hx3QYMx${i% zFxWlL-DzLqO9wVJWnX#U-w=%@npvt}7=C_xi4{}^rcT+A!?>>Zecu7#rvI6tzkwM8 zOnl2bSUdl|z|GC87^;fO>mlTqxt4W(vy^ukihC`ne8Pl%{okHOM!{0Z{}!+caMXlHxmd%Uy$7p66!^-bejz)Oi=O5^d(u5>dK#hrMdfsS7_R8t8O&It03lvp$XOF z*nS*Wb>jBh_F&|aKl*cF#RAC;RklG)s*5IX^wkReL-ZpB ziwF{CCw*_mZy-Ikv9)zre_C4R*J{e~%l|p73M!2X|E?=r;F*UwrhI=n?CM9|;zM@wcaV5;%Qp;z^uX4O zZSK08U>X1i4)5(8wOnb-iwN3<{oQq=)3zxc|L%`5_)bCPnb7r2sZSh18(N>dgzi-# zZFu{TomtB#_usC5}*5Qe4x-@sUL<< z;Y92&Nm4%o`*z`^?qe*-jctccpUARF{MRG1gw^EnbolMQ7@!A+x{eP?nXhZIlU_`^ z>b)jl_Wrms_4@HMK}z6un_%rMs7Fp+fjYZB;MpUFYtq8<9VA6n${1ssbJL!976N4x zpl9SjfNbFdM*KX%87xn7q>tSs>?d5q{mQm*8=3s9%GBE&&@GtM*qA8TN;2!)2Zutq zAaRb7({2~L(|CQACffIRok=m}NP;RB6kFnhlG6Pt4<2-|aLP^qXJ^gatvAFo$g7yS z1-=hfpiKx3`i%|8Y1V8>)-@vs+H_XW0t_b`z1Jdcghs6Y zRexe>sRzfOGbE>bAsl?Y;NF=kflYA}jpL{h=DB+NjTI z6-Sf|QG@wTfqZJcUD{6C1!-n;eVxZI-GZWe16}EXFfZEHYKa7}%d3NhLdQ|r?&}{K zp)Xwf+S8E4jE1A{CaXLM@8^Fd6EV^LTt`4+F#Jf0k3uf*Qc9ArwTb>BIV95@C?+r< zM*p5aOdmUh{+*RvrgVeXl3Xc7VK46gvTli*1PYj%Bb17@5<~m*el$*emLy_X?fXOf z5$L)?W~Uu>kE`hECbsvKeEWwbUwH?Vm!Pd?{vrRb7GXw+H{Vq=9r@*MgLRzs0KvCW(=n1o!iYj#rb}6Ye$bP{TZ1dBV)gjClUOpOWOV zzT&rRQ#rZ^>x;04f#xFQuzTfnfZCczxB!jY4Gz`bblx-h6N;a{uYoSP4oK^2%LEQA zB*>7Yyz$|`WAyJjiiz|~6E`uKy5s|~B|W4LtjUW=ce%wJ7Nj$ViFn*}hG}tgR~lPR4JL^Boq7_R)wTo3aMt zhr8*?Vp6GqAfp5ho4;|lI5G82$9o!Why5i{D|gA|7{*!;UPJB;_x(PVrtuar6j!Te z2J3>3^XuO`vo02c%2>w=$7Fpy6 z0ExV>}a*q!9{QIIW`TEz(JWn!H z9&ioMKa?(FnN4LIG2bYj6UJsq0Cu(Nf7K&>C7g>cac__rVdW8)416`@)}3Ne?o7h% z0=FB)+5U7J`$;UI2u6b!OCKWQ`Y z_dB00b7gDJ1&86I_mBetjwZ8m;jol$nk#F1kTmXW9c3-OO#LvsZLnKbyy)kjB)^N! z!L;a}7uM@{jQSQ8wpqBQUjRQ_%x2?A&!pvXfU+nUD2Wi4p5u|l$3VEooS1g)czWV3 zkPp*#Nths__=G-qMsK(abELDw&$kPWrnrv%dN4cs-Us-&oy5-C8H`^bK#SvLl>miYzQx-x?!}R{lP|5 zx4_tHK37j>z9ZptXk&C{=2k0&RQd7K;t-!^xoCRh`6A1|eq{=kw9E^io73G2KY@LjB{VvPm z9>$LrJvU$>Ju`+&{Wdx={;FI8{1Xq(QmHCPVKjRz?H8ND@aJY58~KLt_Y_poh)$`> zeU(r?eGj#}iw?t6!~sb|SRjlW8{^;fHHVGNJyfs+KJ4#)9B-C*s_ra)bXPfv+NUgy zp0-@qCG}@($7+3fuVm%+&qfTM$eH4qK>LvvOPZ3S^v%8LXuTVP-ml1E`mtj5cs~IC z$*_3$X%s`9-@v;s4RO=#@ANZ3-1LE5Azqx2giOjCf5dNU7~VuII9&gcx} zf(TJ<>!BZw^cHR){@$0su@Je)Q-|MhzJgE+XrV95z2V!TqSY>Su2KG|>{UB};YQ(` z(iqU2Dey+%D}Z1L)z|@Gh7qO@3W|+t1Q!QS*PUB8EGZ z)q&$Q`Lfr47EkU%#>m~Q^%b2(w@>HB+B$bMzIN$A7Jf=7jyqw!_;d;ZaI(ki1bJha z%k;aT22yx1PVmco+QQ3XXq-3TL&FkRHEQUP`j9kuvPzTCgB5wbG+2=L-mUQ_VqhHL z+^?L*sA&Q5G9lO^yp&%z2|oR=pq!T5@laBhiq0Y>e0&ZPwsG%#W!5gaLkvv3LSq%* zI+fy|AV}`m(qP$v5t9xfog`4O^gjSStM=KxOW4s6v&odfeF{zIu8pBfgk<=-gRUN; zN`_gzi{Rr`_9e}}sk`<2$;f0>{i6XVdIT!=gQ!jKmk5z$?f?$2@8$xAFY~%!SV@#d z)#c*p@vj=hXPVuZetY)UQX{(qn|AUM=o{Y%dC<{bv5>gkc57eJ_3Do92J7PhW-8_t z07nt-G@^g_Lpe~u%70_Uo#+D_=Fp@awL#W={$}UpxAe1Fm9{$}nJp{B5@l`u!-_={ zwTcVMf87a9`a=(8CvztO96j(le62>uA9ly7l{|{%m48NZyQo?yl_~67ml0SL#RXN@ zK|c;ZnTwx#ON~W-Sq&b#6vN1inUwr|iu|3uQ9cUb$p2ZV6_fmd^dP%#pw@k8nj5?K z7`c^|t&M?RXdZd?o63VFru2axqAq;Jv)D7yG2+MY$P!YNNqxax&tGXRZy`Xu_^r&z zt5;AP_p{Feo(7}HZ|#yfkciS-hOe`E_ciwr+O$F#7tBqaa1s%6d4OZN4KIVkPug@+Wr6w!Mdp;Iyb1Fbb`+XrmgL-FZT%WHTtOvX%R)hH5|bpE58!kt^QS&7MTA9|dj0))CdH3LA;bMCQ#F$j ztQez&Jd5?ZPcPkihC)N_yNNV6lnI<|0fj~!La7kQXas)X)-lov;H+7X#1j8dqRTg% z8j6ak0As#y>%-Kn=YF&Ot05}5tL5{TRJtZvt{om0N7P}q7Cayp?5fv@?_xgL!ExKQ z@@Fx?KcyQ)?OaM{H9kVL3lh!fy~gcANfON<+9gYU#N!_0%F&xuuUZ~Li~<~<%6Vsf zD*UY1`^yPos+wR!kA=Mc?B8`ESdFD&Ec-5ATV{bK2J(EUEqcY!?3%F(Mfl#1B9X1b zN%-Qe$P`){++bx?qihqxsGh6*u%rxD#)^MOMCc15e4iHtP^t*!_mX=`QmVPOkJ!_O z+|&vT`M?DI3TPbC`G@uE0DrVTT znOx-DbqmO?*pokwEb`L{qk_p&lCqtB!}ZJnJ^yP1zel8MXe>B|m=c%JjhK=A{@_0~ z&EeIPp#H)Ud!fN5Ua3ZXA>W;JG-(?SPc_Pxm8AN8yN2b>iFAon0+PP6~I> z6QQak;z}I{cDhWT8aO0Tl(BHMD*Z8;$Y(2y>|%Z{u)(rLE(~D|&@(8wJ|)`0o3zoP z3-O*AR)AEw)0GtlU4ew*uu_Mmlo4}!9vU7^Jiv)Zd~7r|t!!|z$C+h*GCzGU2?nw; z8}prqPk65T=AA-?@c?Py(O^M}p8BkzD=iy`C@H{A(e6BG7OKLW8i~xGx)3$KgwK)E z?3wpA=Xie0*I3Q0xcPHvza5}wJAz?@;xV{+>8QhuS+x7~(=CJ#XYV~oo!Ug`UI8OZ;h z5j{?Y;8D>5Oix5=lJO#t%~#502+6urBiLJ?&p-XW+sVXg10tl5k~hxvOBQ{C4O!!l z-#>`gx}Bi1+`I3*6vEQB4-hZqUco77N#(j5;*?K@c9`Rh@{V^fKYZ?SK8CeCvsLh` z?s=V-;=JPR(!lBfn*XL^F<7Gc>r#b}iYp-QZ#+Avaw{QOTk(?Vx*83o_n z@Ry;v*oer&pI24xToXkDMnzcprMK3R1ox9#jslFG0UAjS~0r0RbP2<9^LI}_s zwm~;Meh|X1XGr<1lA4+;c+BIMmR@=sq>W*x0(=hbZ)bJCj*Hb4Yq)i){;o#R(DDH| ztW;3YqWe6I#UJp|=Q1NXJ&Dd(>r|3JHxyU@b$M7kQ;h84z7vdeR zp@-wC#w{!SWw09Q0t?_^m5jvUS6SI_vy+ESTi_Xn%?Z?*8GQ)pAk>8=HEtbe1ufZ& zaL(%{=!5j2W{N6kdTY(ewK~XnUX=9jJ$a#huTM|bRxB&9j6mo2FWE^IMCSpW#dQl!IB-oxypF>@@Pd z0pfLoFj2Usuh`B_vZ#0#$oyE0S5g&qd>zw8a^>fOJq+A9-J5-r zI6N%H7Ezbf%H^!tH}IwT&50jdAW=5*tGP>l9!v`M(JpI31oMZ}X;l$^gYt#UuruvV zV&@2Ql{#Qoy8^Y+t+PhIOXc*-4Jk6SJYaQncLw<9xl&>Jr$u`ROLn1nkm_61>umF$ zxf-Cht^gcWc&`_O2?xO?c#IAJ{`Nqvt>W2|$ z261e^Da(Q&fTIE&TNdWk4(eCRyc5)EY1Qgp)9^i(JTtw+Z*fFBdq)m z1O`2jZ{t1A*vu?xV!@UNp9Kxld=rS|w#!JPaV#fjJ7a&l>^?vbPX%K$1)+|oIsL$` zx@biqA*^X#Z&kqxhF1r4BOGZy!Ts;+=>sXmn}=xR?pjE#Y;Et*9UKB@A&CsLCj!NX5?Uj0nqcmHq?K7zE}5N@2JBjip(>2G;I&f8AW?h({;on=uMO)p`4xHFdGly2wqSU_wb@5q?6Q+)5&p_l7pA>+Tsqd0Gv+SB21 zC`7D@C>S2MP4+T&=*IWT?3@h6s0-vgIbd5Z#m z1o)fZMB6C06lQt8FG3&2q@_ERnG{y z$pKudGnl~nGIpb%+0^1=#V*|Vmm+MAFgECpWGuO1r!AWw=y(67ltF}8mNZtl1xNUL zsRth}CEqFb9a1q$JT$uWOpoT@Ho2AK6{pbpD#h$mfHL(cDd7r2qS zyCBprJwX4khKNMO^{yty%}SySQ3s}u(G1217Zjwi^5O{_+6}Mab;r3-X8)?3DXAc$ zmI#BB$vp%t_6`E{lvjkOI@#;mDx_#_N~(iWgz05Ij zNscxlRnxDTEp*cF=D6!>RL=p&;Os@ueBW#Huyo!@J|>;e)Q959C~@K#_(F}#lGtTP zg@rg#pnSa$_W?E>=0Y+b)Q`VZ|J6yS1m7ocRFT;E(lZgiuh&ff0Pu5gPNu!ay&+1> zJWnBf!xq!otTRjoUm`1cN40b4FhR|LfJ1)bok*iIq56~csMy*Zw1VP_MW1@^-#;MT zaUAI1>u!Fo<5mS}rFz%}376y-2p&UW7&V)nGzqH|Ei_Zh@uJo)AHQyn_l7W-n5ABk zVD~Kv4_-F(au8Lbp!SSraJx8wf9O)VP5+|3dJ%8jqN=-t%FG|tLH|wz+w%ik(-Hb2 z-hdlLkmW$}1u}p$hH^eKgi65F&mi3`auo%7M)f`T-&i(!B6&EX;Ad${4(rjK zX+xzv{t2t?PS}B62ckJefFt-dFYoQl^>W~KjvXp==38^y+5_|$I~V_|Dok5+OFWM) zK==RYhe8AL}gq@I2K^ z0?>1fn;g`dqeoT4teS*49V)Si8NE>-XQ@b9zgIGm@NT(2RrmAsu`y1C{w~wjG6l?o zx%JQ0tOb0>6U7#o|5xMpJ)rzI0jdH}4-pIHJZRnNkGvjE2CavkiA2OsuSQ=uSTpFCVo!+n&jDs!tFzT z{eV*H0`7J)Tp`6imll1Kz^FcP&$_ON!MY`IUJzhOO_z0*CQ=LNABB{%Rv2IJcrVV( zk%iwCdj-X=-mUT&M8e9bf8yBD3obQ0%>E%cDuJRXe&ogzQ1Uy6EH!Wlf8Pfopm~ug z`uzVLJob#!AnuVPNc)6~_H5-|PBp~j)(316RQx%KbWsq7 z4IDbjIWxLkpOqg8R~~yPEIeGlamw>AkAt13Ba7>>DO90anQ@%~9BKAMD2XPjW2IL$ z_Qu1?#2@n#Z≫>}q2fo|C`3a^Ec%A%4Pv*pjN!F>ejwn#k^eh|72m?cDFcezKtX zSQ`U4AE`ZmcMneOjJ+nhhKym&sft>E= zViW@7fls48S(SX-MuNMY7D!^wLhmQ%*!MpFW-kNtwiB~!f(a(Fh7Bf<+$RLJ*Rs5q zZ%foUus267oKFoYniF>T@846}e(&>@3mgt)<1Y!+#r0?+9a<1%wg5d8s?`i8)z?O1 zKR+jMFMZD*$Q^pMmK4Vye{M?iY;+@$7(WoV@v5VOd*&XL_DMR!3aRJUsvc9&bSE!Q zgHY&B0XRaoSLv>vr1Jg-l4Zb)|1~-~i3ysC4eu+_LF5+SLRU|uelIXvG#vdh$1e%% z)Ru!*OuHa#*{XAgXxGfP!Fz!R#A^zKVGUAX%JRze^9O7i)Q$k!5-y?ZHR~-rk3->G zRjp>mm>wh|j{gwyY!9gK>_tGRnWXN-a1>+a^I|)jt^N1?l=u}aJnB|hI}(p_I&#eP zRwvl1)sI=3z<*ey0WN#8%a_*XsVuDK#!3|@$TnK~yLX*1Py$>h(rI?WhAH4kE#LdJ zlK(Ko#cKt~1Hj9!QyCa6oh+RTJsaNkEKZ;H>ik6^kJ*Y$qtR)SI)BB)T>U+lnK!bp z>-l>x;WXBr$R#Zn@ddCe@!ZV8HtwECe{-B#m=t$OpZIl&Nn3n#Q1oXmIa+nwM=vr+ z!%#hXe(~nV$yUu$mRpVXuhv+CTro4%KOEsdzrPc5)kc?f{>vJ-7LpSFsi~F@e^F;$ z=G+L8nc!Hq{7`}qgW2H;V*%7y_Ntk%;A~sES2E&^xEb;Sexz{=DR(3s28h=~rI|dU z)`#4L5-)hO&xdUj4-1F;Z2EE66E8Gcu0j$ensh!Hnp%V?YpyBI+SY-QeEC=?jkbLo zSswgUl4$k!ckk*xI;%h$WO+qW?pp?0`&M^$qvdXL%eoQIvVvo}T`kEWrNWR!Uzq11Q&VU?VMt^qmKJYgfDo?ZH-c1Te5M^MyPgb?hZ zg7Ia~$3K4)lFm>el}GB*G5VbXYqhSc9j_q_YplVc2|@wpTnm6*985pdJjfBN^p9HMZ`pG9{zm7o&*8}xDQd3SdVf??0OB=&7=fIb|2^u6IR1#qY|&$nQi z#;hK}@^ff53A0ciO$o@q*KzPx&I#7!OJYdqIt-QQcD<p<_gbsf645u$B*MDmSN|(YeJH!W_})hvnvWM= zCpHv81)GUCJ7ihU4YFU{{<(wcA&r?m2jBx0br8vjFky~0pXI*OZwMKW4 z4mKyOBI|l4;Lf_GFQ10#uzB>wb{je|KxmZ=GwXKaGIENd0j)4bL-x1 zT!#H!e8pRZ##ghQ@mU(5`Lw2lu{c$x#qnWGmY6*bS5#(Sxsxy4hv)US9Wn?79Dm7Q zmpcG({@(^67e0C_)A-Fxx?Zl#9FTN>x2=}ZSJ0`hb5nUY#L0dR{t@HAr1*W+de4tG z-r)BMG*S_=%<^JrN*>f1AK0rI!24&Me@i8AW#C|{IZD4j;(4zC`P>}-s0cH|(4FTE z9Kq(wd2O-P>1T1DrSEWKeLvP;_SAq)TN*8%Akj$@#-l7vfH6-4%e8alhku=uXlv9L z2>TY#e(JjDJy<87C&nJws=Sm*qv(~QaPa%{dra4k*^g)c;$y@#AX3D0sqdiwq}X!@ z;Be;|y?90$b~eE1E#eWcGLoHc6Gd2eldIWlqe6c@2!^#nJV|xfxvGmGZ|sN@Ywf?b zsI2MT{v235!!GvN(Ao#+@{xd;IoTjSw4oI^gMBw$46UBPvM6GZ$g)2mTs_slc|o^} zoN9BPTb2tCseT6&Krbil4GE<0l_>wCCA&&(RS4jtsfL;|Kw@vV&|mg1aRaD^t3wXs zuW<>2gSOS3AJ#bh`G220YF#4)V3F%Y|9nVUrD{F@S>)M*bWUSjfS%QdeXr}lRoTqO z6ZVMV3let8vbMc0_?;h}LrEZgy@(TWQ35CJri7}6;!huuF3N-~snJO4H-SJrNuMU^ z)y9S&L{|5{^NktuDO0@ooN^3|2s^BrE;4W^&mM`i8{HgCf6Ya7x!y~djvqk!OlFYRgunXE$d);2W z_Gq*h& z5Tyq8xa5;g6rUfNH7Eyrnx)-VV<2kI>1f$F_xP=!Q0`o>f` zS_r^LKoLKj@C|onVa6FXh2gg9hKzUN#YM-K;CBfm>$Z4PJYm>)l9fh~&=I z^zh?b5b;k9@$lv`k!G@nBtX1`l!MXSy?u6!G|XUH2*b<~e>=9b&jkt=jjVw%A6Q=q zqK1dvcuCaB6HcOlPxP7zDtp>e<00K~K&27bWZxTL0Ql^h2(K@09Yn?m0a4yon&g@W zAgS&xc!*6!p_Z6MEQnN2zDGz54Gp=k=|S8C~7sqZ#CXQlj6m%C%?}QT?Oz*I-4T9BESAl@)#5u@(ZS+CXL9bXjmlU zvRvVQ90iXAh1=EFD@N-27@@jhBFaNJq_m+o`|>Fq*wZ;M`CDo69sgzYqJDPnkpZJR0Tm_GYFR$Xf|zyic8 z=P3}I`HwnGHv-0fKvO7SgQq1x`MnpNbQ0WE5kZ84Ep9)R3hli(f&t3yp+!&Otk|U zmCcLaXQIvzka_U_aJO}#@O4(4U$!c;Q}dIpu?0V59Sk_{qKW!&T)#&ZaOJ+_qdZPB z^!m&2YJcYgyen%s!9*-ebMQJT7VQq``g|;{P*^N^vmS-H*>X9i!k)hREKFQk8I))_ z&>@%&Z@g=i@P-a)*$42UNh}!pAby+the8H5thg`E_!hIg%XoMmH*7Da zF9#wVO#KM*%UMK;7`=wrFmjP^=a-V<<6DMF?`GoK{P7+(X8-HxwQO z-1rvOtSVz~WPRb-0$T_GpPo+I#IFiD+>AL11%6dt8=)=kt>Yg@tqXtr1`YyR2RfY` zB47EGG+DEMY(s&1&s4tlgzTJ#H{gBE1a&h+UswTf28Uyv)yrSQJST1fCYa99Z+UR! zbVZ|<6lECjZ7)-w`p^?H(mb~i|F156I<{RnUO)PHR)QANXUd`UOuSV1< zauXyN?}HZ8Y*FX9gn*>OksLlfU?yGmY8x;5lhR8@!w6E{5J2?Z>OscHhzt=4kE{M%IwaB;b zW{hLu?$cZOofo@KRvqJp4f=Y42#=1so752DOXvKTrElQB^B}^VNzpa9S4chh=59{u zGo=xs0f8#1Y@~*|j3T z?lrWT7Bl*Dlac}pA!F|7yuzmez}d3VqRPu`TIix1CMX%t9?Vt?jSY!Iv zSntpoY~Dr|9W!!8=!b^@I+`34+pm^aeEatwVt-PhfDyj)TTWjzQ;djOoUt}q1}Od1 ziam2A3BP)D@y-!SzCI8e*p4_z^ir$LMV84Ia@-K;6eufd9;iJ-pQke!CUt>*P~QQ3 z?9aahY!9XXc*nS%M-lK`^!2$C(-2El1kyyv=n}?WQ_AcjCY9A)!V#NtV*={2V& zM1eV4X(RZ1K%Q>!`MRI*z305FsK;|~FuXRd)&X$#oD)7tHcCz6N(?OC5+l3MIPQ(Y zwgZQ+Zb-DmZ@T1}Z#SkJE!*;k16gU&5_8kq%>UdS_eV>dK>9b!O1ytiqnXHD3X~epMM6tX3!~-J_dG2 z&XHEbNN5=VaLS40rapknX}HzVtnI|!dTUGji1l#ocM=OEXkOoVE`#^rzYIYFQ82t$ z%rEa)=eyd5OeB`u1US`b@+)ry@$3LT7wr2rFJa5#Ovnck^-CO0?WiO%cv(Uu9r-oY zCrZMSK#?9Z?f(sjF*!;4#m~ZcE6c89>@>edN$26ww1CixF$uuQICY;;^Nk?#fr`iW z{wLZN`F`rt+lOuYBZR0O7Oe1Ynhh@{t)>deUTGaKI&4RQ4jmUV%JWbn)RiF%7Df;> z-$J`Nf6DpE*A=9P1aDvA<_TJ{^-B=52x#G zcz*qmOkn#LW^XHC7_&&xb_XWzbiFVv`9)tjREZpFnRIU%OM(_acUe%p$z`@cDDU(v zy#S6peObZWK05&511etr-%T~O{@H~C%~?cdKkH~L^P*ZnBLm7ve?*UGYV#U%dg(G9M5 z*!DhC=bngy@12ady71sc+BNibXG|>2namwmxH<@K5c#8)u!4^G1R(AtZZ>J1UhsIQ zKU=K4)_92f%yagn`JEJ@V&`NoH)oxq1$d*f(l|u-e!SHnmVhnC#JYYfS{=Qiak(%&}cfUZfsRmrpv4RX@ z-<(!tz3$&<`d$REMOd~c;Zd-eYD!iVX{x{dIGwYwtaQMAL1DQZaqTaMMA^BHtEZFP`_m9E2}L49Oa8Bmt&MdmK6Y^6tag z&l0eO(TG>ZY>!ic-|vkGsyjt~k}fLK2CRnLz@xh>^a#O2FZ9GKDY+D;-YycHc-tJh z$xt^F>A#vKaJXEJQnn63Hm^L??-zkH{_wQl>kYB)-HDyw8^Cvv+~$*z*vm6(zO+K= z9Mr%o?sECNU~l*XF49D|`U4pTWIi48cAJ&i%>5ds%ot4Bm7bfl0Z`v{E@3>}SM~ix zQhbL7NCI)0Q`NiP5eWKyuJ+wyideJ1A1d+@Nui2Xi}a6@eWLZ|9O(6VUTlY@6*m2Y z$A(fTQbB0+AY5RMd>EpfdhQbeqv+{PV|6dpbtNc%i!$)cg9Yb0PEi8940z_ zOkh`WFtdccTg0&9eWaVm6MVl4NsF}HGWXkOeIAbvQh?nQ`Q*O1v7v-K|=mn^2`{*Y>1A0& ze&xp6Gh@rhDcvRn21=z&YS}q|@2j)NInIFc@4ly?V-(#wKEQ-;ZoxXstlyy?oShU- zBsJ699_?)~RIjE#Jem4~Wt}P?gqWv(3C$9c>?`M^xk7aVoE865qnp!s7Jm!;mQ=Kr zE#P%Tg9b}f@hXjHG*(x_7fdZI@Rm}TC6kP3vA-7$QzvCN-2dNCPrBf%6CEpiO}_67 z5Cii^WV5;Wr$v0AFKT|ka4?yXg5W!~_PDJwe18wcQE3NiEx?3S{MldK#9gJ6t?i=4 zMJvU3H?pIKHTK~t^1Y88Rf+=jH0(_^DX;;)3OCLaITQn=$iwPqA}=^mT*%i6dbgVR zU`e^CpjqL);9M50W)cI*cvzvGDi?8hwZFPK2$(;m=SmKRKfelVvf1M230*%4ceH23 zxYx47GdxcD>|b?^t;a)Gc;@sw4>F>id&Ro#9QNFTL|)=nH`lMR;Bon}0RB|@zTG|r zD*Zl~No1SNVjzvW_P@-E7E0}nW*~eJO#X4AmuU#wP<3Viu_KiMIdatuqTzz~W?ufP z0C5`_3T5!UURCJ!qCxv)U{O?fZt&I;t}URT^^ec?nh^aBVJVIc%dKO;cg6b ziqu38r>>DQ7=D6-xFfv$&rJtHew7%&=TBYp;EHE~z%f2b@lrau$aGQw!y0+Gl5rB^ zw6J0y6|e7W2~nUVXWM;ZNB!w!LRD0|w4T`BRn}qM>wNFh4Gc$N<9)frEEiufGbDGhz$nX8cEuBY zRAN@HX5Q*dZym6%BM7Au07qJWE`OZ<(fH-J>qB3uEGpZxw6Z{sMT@2qUH zFT~KOrSkICxwj|zLpeGxJ#jF!=P}*tC@`ie#=Hf9uJ~S;-!gl9Sx_7VX&@I?pg z4A%$KdIZh4-X71~0grEi6ctuH?wb4%T!|hTqkb(yJ#xgZ_PK{RnwNhFVc?W^8D?cO z>Qp}CQvy+Uzw`Og(n;RWO=X||=F^L*xDUJ!ilo~1w*NXxviF9vFHrH z!cF)?kcyHF{=U7W;CC6kirBa}vr*%}MF1Zhh!R#~_E-n{py1Cz+!j^#45nj(cI|OV91c3ZhQ5 zB$fAExNEScvO^p5@?iAEjwrfIm267zFJ;NbRBWQ4Yd$gD*tHMR?VSzV2Rhl1;ahOK z4Bl)&ypEy?=t42@bo?NP>m3FVmf6^w2K6+M&)dL|KIf>plDZ*u9d|`9uGY=&`W@jp zNjxPHRpDbKr_Es2G~Df$+s6So5r*Ke^`b^UjfC$;H=I-td|@6Hu6`a(Mvbzj&*KtUfV8TjGE$NakgeV$^v zX~)lL90I%XFon+3q;CmF82TLbnHPwX78`Vw=iY`{rgP_Z%KKtN<85v(VCfw;Z{$y&glS8tnNI464EuU z|KFBB+jpOwqO0m8kgId9QoLSnx_0@2vA=2qdqOv|ygm35nM*Z9J6_r+*7f*}??uQK z7{^@w3|vpdprlcJ=b)LpM^BGw!T>%KEB1+W*DH6_U4cX-uEL-*P{lh1`F*(fR-7gx zw1UpzB?M&!!z^ym`~&kV&mT;I-D{C+iM7b`EhO8|n0Z~tf-AQ7CA?7MxQ?R6 z+okVuNFhc8sL0VCYti@jP8^>mDyx&eiokfMDp0(RQ?Mv&CmH9FnAJ@$=O(Llzdb|p z_9Q}|YDVfu+F`+f!As6p$rb3eK6HS(-syYuwktr_|J#7N{B<^~;8G@9om!fT=XQZv z)zvgvl|(R#m)2Op(c*-CHr9?)Z`=m!SEAtD)3M?dZ=$Icqa5_5nuEc5-?#vYM zjGRe{TN>{sb9%Di5Zs^WEg?f(3(@SvHV*(FSW~gAUz@gQ6ns=A^Qal;R%Lsmran3C zXT=mpl{>!^{_&NcVD%GRr#2yi;k5q>He%eg(bs$>MfAdoC5?3PB`6Uom626 z`1XK4QS)|jt3{W^_Z)27Q#%UEOva-_^6m=_6f{g2FLQ3HPb*4S{%fY3J6yB_lUd+j zhm+*!cmCKT+uqmv8raGQ!C$lJePGN!$V4mX^roeb-{piUPP})p+0Y5kFHNrxtjaba z{|0%TY~)5Im7IjUXr(vP6R#*^0lM~Q{z;fe70otV@sf8NynX7PHfpt=n>f&6Yh1gx z%#NJ@C4OCU8<-?+Xh7O9Fxrlp1x7BVk`9=)5Z<{F>6K;y=t9|*)RjvUF65c?lT#VQ zNt3-1qhs01`AIfx4^hnH*&aH%*P-|qi&Fwy$uJ?y{?37(ylnM1a=VJHVAelF%JlE^ zrn0V2!h=`J%tg(S!4-&?(=C`o_AkoTLNs;rbw!JdjD@)k@o%JC|^Pn76JT>qjos2I{UdRW`5 zvM`m8r|%WMHnX=~I`#zFxRRmxq$F~>mzozVto^u$P*;~@&;{^ej(ceDE$i7Pwc9O* zed053rJ54nh)src+gKRV)=p`Q6PEs+fh+2kFC~4Zc?8Y%YqEuUNHaETf+7zaI}hl7 z8h|5T==-!K7XKJo#@7v%EDmVulRFZK$_UEY2n! z^F{I`IHxc_D_Ejpt`Ttp0LNEO;7FK#yer)2b`^dfVN}2 z!nZ$IYPCPV|1Sp6L?w>J$Eb!=BB&9Un-nX~`q|Q=G&)((0g&N(h^{t*6;ageI9-@D+47HIQ z<>HHMcIaJ3ku!EI46H$gVJi}&_-|R%IaHa`QkQtCWchXBx)xqEU5>D9k zk$;vZ&vg3TH86hq{B7wr5H`#LO4@oOi5>2&A(ej@$%LD=YNV!}GPYgg8@~AB$8O}4FWkqN(I@VYX5jE6m^EN#; zlIG`nJ|z7MO1#lOOqc)uK-)T1UhTW2Hl}RZ1T<>qHCP)7J$!j1LvknqOMnDyQWB%rO#QnBse!(bf^oI+T0E>kp?@^Ut^89{pv}e6l3G9 zJr~+EMqtNKYn@w`ssv(0a3as|x_$uM|IHw)5**J9muaK#avO~hPF!8Uv@^c{J5b^u zj>*%Hcq;=I=wAB}^ul@x{heuxLWndV4&MjVq4R#PYm@7_B@!fPK-~X{$tVU6CN$)m z`y}0JSB0Z_Dbxaq@EXB@6nPhW@+CP1{Ok3AJDokENIHxPVNKZ@wd%k9Dj}Q1AVW7N z2F8H{IAYff5^umK)=$b$ho*xNWVgxz^!KJd$)QE72OkAdq2TxB$f()*1!yNs08=7! zRz6W%iT8?dbLH+8K9}Q2UbB9O zYwhMC2JyjCX(TCi&mULWA*dNSHFlq6H*mmzkZ$b^JmYGTIM?`f$HUHW-9xg!%Upmg zH%CAnPB&+n;F4EnSxD8M1Pk@}iMxLgoC=;V-1RN zJWT1&(H-hCnKijT0HKrlv+M<2l?g^e44byTQHlr#8&dX+6}_JS0Z|hWcThWw`qfLP z<90Es#NCjs3;6u9RJU1w(5LRU8VX6R^qmecQva-7{#2jT&wWWW6^%yBVk?QO<#CCX zoh~NjLlgj?rFhHhrF*o*@ZpY6?=#-w3B8!A3s9b@}pPVTh zIJpUE^U%lq{udk+6+SB9$4{?ZSpXbqc1!CoXg#;Yzx&Fc3NSOIZ|v~DWm2lq%B#;m zg;?3%96E}995CdH+%`OShR_tJf&%X-W4Ol>?3i39jAIYN0XSSmIBaf|=#5d<43-F~ zU9iy`(~RDc?#LTOogOVxUdX`DYx|-diK^BcQ}?U+i3BUPn-~c zLL~GM)ZtII>OB4Brk*GRQ(=u{t@|6gSA>ZetP+6J*5UGVRLO6MlR@UW1MJD4GJX1% zJ*kt5F4cM_uP3w3C8o4>1^*MMV)y1rt8Sb7GKR8Mn1Ds>;GC*>M_~o^JFlA?jTPHm z#=#1DMZM)iDv1&5wV-kv_>j~5(6oE^A~=Ea$M(w;@vX;ujnkHGSsRc8QF;cJ^AdJT z=jLh{o{e87z~}K5wm)WtH8imZAT`U>=Y_$&ss@nAK zb1Jd)v*4^NQn5*0N}_EN+!l6PWccd@GZ2dvUCGn7urrqdb*}u|Cp}lh8;d5*=*>HS zSOfU1;V0VATt|wskC|YRTfN`n_9?5uYFty=;U%aousZ*QsR+tdZB}m{mPPuRMQqWS zRNRRw*|mDt%oAggZANMs2*AlJZVaCp;CP+bGBH@m{8zT!7j-oq0Wlbpmlg$VPjgmN zBaQ32>AGbv?@ntLsTJ%>);k@nI*rqxg!q?tnGIbMfa5u0T6|9WG@M;BLuXGn7uG8Q z_ZZ3ivaFIonzhVe5cncEJH4Fp5A(P&c zWgPR6Le0t{&TqfqSPZbdFV~wjC%}U!E6F(ZfaMAmma+-AWaBGCn>qx z*dRZHlju`z+8#0Q~Atyakz+gD!NR_5x9%P*G%GfUcdS3WHBJeuTus z_@60iXcf5C41$Q77yn(9CFW?8-b;EUT0n=R#lQNz9K3Cd*z1Sj;vGT`UX16-5d__k zJ58Mez!?=QB4AfmYS-9>?<4!nD?5!RBo16Fsx$@Tx*g+mX8HxhhrWXfN&rgpmOHYi z7dvu}a_{SUD|r$@!3S+7Ag&IW&p1Rhh1cC`oVL=6Owy2ob8Ps-th54=EbLp6vY5O5*&~ObPzo&;5H1Gxi+H| z&a$xK%h;>#odPC6?X(O{t|JrsX|8F{(OW+h8h(2>70&8gYs$QEnN;R6hW+im8wK#e zuE(N%UN%gb`P)2jA(m%bz%k7ewxux=yh6tE7Kgub=o;wpRIYeyKfF??z}@zFEYH>4 z^~xJuaz1L-qY0i2fcd{0suPTe5N=0JGov^lx?q~oA{s}67K!ad z**bLRn%jYH1d|Al!=E|C_N3U8nnI1Jy4mLc{G|9hoV&Xq+G8I3JGkrPyRXP0a%$^` zni5H?NLk4b*6BZ$_EJJW#rXa}xa90{{ns~lfZBH_;kPvQ+)Qjs!okB*X3dUnn&c3- zVY#;4Y3(7>6Cmzi6QeQGyxWwtFrG9HsMPCQwPM#K31xT^38NOgE~X`}%%JJ@kE<&I zc^7eyDJ@6}h*g4%n z>z?AIg7d1>$NB+w%3T#bNc=3WgH?W;KB|KoOTQU80>NvVp7qnG-}{EsPAEth1dfNb z_5gin z*?}N3QqB+f0v?3n{6CBj@Za?y5L*&pq0~YfC5Wrmv%0WQgPRuv!&a8#g1d;RYj3}j zkT5h#eb(TlvEeE=glajA!6A|41aj)RKaU5iletfyzkN1!0ADJsJ{TRD8s1O{{5f=F zF4pyH)Pg9kMb-idVVgSP$W`%6J8j9AY7&Z@Cynx^Rm=#1jYRgI7B2e~pOxyn@0!B1HWN-;Lz0=+vY1 zNG=+?QL;r{G2#IckavDn!zDT|w1|;wum;3k?uU!Vg1_=8ME9TW*`y`zt^~?-bN}{1 zGYByVM6X!FMRJ-{TPVx^dBULagvi+)q(z33!2qe<8R$Gn6teVCF8~fq2pCiI7g^-J z=%Nb5D$v5m@Y9*!)(lw$8MDGPyt8{k)fo z;v)!{fVfLq+zSaz3=n84JkCl3Wta!i@EBft#b6(FjV)o>=dm#bbYELZGydsulXGIA zTd)!Imj!8(xru=VreG#cb}_&J;Osl^{Jhq<l1>wEv!7RO! z9a7^<^CfLvL_~C*Whxt}vJ=Eamt5{gKwR%Nr#6JiJ^k)4F&|N3#%-=E#gQ|lHo?yQ z$^#v-jewd^=+!7VLg@^DlETw!na~tdAj7mUi~>D%>Au;{gbWi0tEvlEu&=%>5&`hX zqVT3I=30-4Ny8su z%^ow&O*A#9_c#rmX^X?x191&n9c8*{M1kda0L}`Yn$eOr`Y@=DPnkO^3VahY7R%Rx zpNW9bfxZIr*V7rwgE2_Gez2kpi4(W^ogmgx%?yowORbVCGuq$!3#~MO&yf&Q@14qF(-m#z|{t+`SQudaCZ&n(wfIDIOrSsz2Kl#ZA#`@7#jJd?3%_A%#nk21l_d ztnaAH@sW|p6d0q`+51gF+N7+E(GEQpBB zbgKt;t34Tz=)33(RPFsd9wX=#pnbub*`w1|)!xz{ECpR7W>andC)x8P>9Upp zP8j#`x^ylcC01wPzoQ(0E=pxb^3W5B>#y@`cnF6SQ%fI~g$CDM~I!+EV2 zt@9DSw5#pxg=b6@)!V@N7JmO#U)tEX3r6g|d<;2=7GJXRl33N(@c=0k&aK0Y%Etgh z2k<#-nRKz6!T?zCzYfE;EBg!64<}`K9={;BKDm{j&AfW6X1nKY6mg*7zD_~}?}s*> zXHfb^aGzvZvIo3kd!uM)061IS@BfShYMm&b)llo1psXTnl|i)X;UP4Xp`X&@Y%zdS zcy^kuG?&fff10Q66@h!;7;Uip%oJ~_Z-7OIQ1NO2;DF)%MA>ly)(nEdS{{Csi{5l% zXA)^ZeT6Al4uEnFyy5~2NfVytL|%%!TQNLvws-CrX`U%UOEKuN8NDb zX3+=OqG+O$`y6vQBDLS+&~H_C*OQ4LQF`sV-@DN-&VA8$>p&J2&Dgd_>@EwqiV|{5 zC&!UL3bc;uD?sA1|Fr zzhjXEU(<}&_qm$zW1@y>wyTKfRCW|Mb~xafUHi>$RG$78ubSlzfW!JiEK_Gi9kU592(N@n)>SsNW({b=O7*sEHs3Ev`CL;~$!lH#fJa z9|_-iAO~aMmyM_C0wG6vzDV{HEPch#1TajWpRA@qA-_fGNp+9+@%-cv$5P6}Y{~xB5eNi#=6-@^^DdGITIa*C{mthBsPIG?|A-$mWO8Si`p|@j z$rG}Ux3_IBdSsEk{2aWtiFP4X^srAoPNS~YL!wd*=ILZ)vr07pctk$3)!oDx;19!a zYS*=|o^oPuw%@4AsmVSA)@0kjKgnuN@6Z>gpMKFsH#YTBNJO_sF;3DMVs2~|U~IZX z$!#n-P|p!n6g@%!T{#tphJL|ce4{{$$#z%Vh0em2YFukd9;8BaNLVJjSMjn8(O7jg zv39xk{!C;TY>`}Uukmj;a(E8^orc1nCx4$igvneVx~>ew<^IO(!I6O4eLHzZa2EcE z3GATYiFvQDu4w7Po<{^{llRLYfX6BY8bNM0Xi2Rh^}pg3{M9`J*$B|JaPZewO3-!1 zAS+o!NccXk$a1cFyBD{9>yKmT=Z_JwXnBe;=+u1*8hK`MGR@G5jQdUHw41F}AR4D+ z<1K^9?>eA7;pI>#qIw;%Pc0MXicKN9DY@ceZ5N9+b!k|CVqJBs#ETE~~QOcF7W zCLXi2xi>Mn@AJ43!2v$+YT}z<&snkdaNrK73WF$OAsSyZ&#(?9u>gG;tf`k!|EWNK zcYn{y4UzI(_V!cb1zpt)pG=+P;w3Ev1^(}B1EA}ofApp!w8I^=wZc?MRT)7OHl5CX zb(T4&VF(357nY!Ij7RNDSr&K(TIBIIL{r#0JCb>ER9_-tU-L^L%Ms-JeK;|8gYh2y z@ZT~K%1W*y5sxWoH{(pdhNuQ)a{G9AeWTmrXMt?B zNEJ{`+lJn{ZnyjIN%7ZLgjkQFENuXPq@EY5@6)mq)kJ1a(fJr;vK?jz)w7hK*xfXt zU3*L=4JaG$qmtV}XV{Z1n@}AX;)-5_G|zR!0+xQGJ?sT~g8}^M2BB#`B9={~h`5HP zE9?Akisi=5Y6@o+Uo+W>Oj;F!+t!#LVD&$~Sgau25oBP$Ci@6|8F2aqdF9?VwOjpR z0`Qq7J6&klU(jDfqsoL~h~~V&$>cXl5sxV&3e5P_k=k=z;V;_@S^fI^29%|Xlpa*G z_25W|*)a5^G4oTC6IYe})-B*9DrA89qtS^a?Y+Pz z-$mTLU|F)q|6xE2o^QdI4Sx<9dnI9{n)$4S0U-!%+DmuDEYcR^m#>`6xfqoS8JiLRGrHM2#YC%>f)AwIjYeBcOEQdOG?fa6O7*H~TI1Ae=jmxTh% zjqw^QDg|p4D|#(T|F9v?*`)mdIK8;B5gt_F`oWN+T4tE=m z&#pJ`gm@TMaun3{!j4&-rbLi5?t5%!$NQH zqWIza4F8KbzFi5nJx!q5D|-tD?i(;SIP$EUp8!4;%B;w`77;1ImqT#P#bE6LVjGp8 z3XGJwT2`uAGiaU(x}U>NSBngT#S8DdLNmt1>Es zyzFSSMI$Pg${J{`u#KEU994}Xbn$}X_Y5+E84N(s&U0|woOLlCg?6O@IOWGFy zE!m*MlM(ZHCzmy^;p}a11;qVwSiF{gxEeh^tyZrAUW^!qworK&Qx)0X-)8UE%BuDW zM~pxZ2WGlWAda#hTjO}5aVj72b#2P87K<@+lfBIE^IUz_63Q3Y1ZqRebJt>0g_;&~ z)A>Y&&PZccN6Wq1;$ae#fxV_#h1>LO%)wS%lggg82fMvu$h6m{wNhQ!C!6m)qK}Yi zjQGsU6$wxtA*;fMI*Qt04g(5|S?;i_14dFsrFFcj%Ho;aV!_qFdY&ivt0al!9Up!y z?9>}=4-)fkL}86l&G++;w2!4;>_OD8pAQyiy;Y-y>nn|;6nzRw ziurUWJ4V0;1y|@GjzIymqHc75K^7aR5W)(M6GK^+%yEGfbHpHc3h?<;6^|yY(My0}a`l{Gnr%Ysv_zgTu~mA4!5m!69DD)S7G z0>0sQKCOMk`V3jAixUKuX>Pv$T&n)UuKVq6yHrtIPOsNKaBo!fC*E{c(fw{c+1Uq^ zooE(o*HKT2u4ee`TdD3!td$GEpZ64&eMimYcw93S;WN68z|HwP{*2^SO+`UZwErq4 ztB*X1#bXi?zG~T7QaTY#A8Q1%=CFy!NR3Twl{$u)`t2QMgS(0 zHgry*UV;n}j+!hUTl9zC$gO?194hEKQoWi700-J07w8%++Au3ZuDtAOeDK!qA56pX z^`apK!=!>iXjZ1)&3>nN9+s7qsmboDIa>}%YhBadmzc}lP1EQW_tNk03n6v!M#$3C zbTZ_}xK|o%Ds6_TjJ6SoGq&m0s#%;^qJk@nL#dnroVE+|K*ZNt@66BxL4lu|gMqL- z&Ya@M(T4y&ARKnsUU?i!$}=%a`4RnZkTD>>5TJ!Suq(?p0k}wcv+% zj?6TzK!yq#mO_$3LQic~p9RC&TL7F6&s5bkoL32d^~3vWt%IG z_r0XrFD%&pp3;}DnS7y)t`;^z>b2Y402g`=&uoL3A&ZyqeNh11|IJ`xPXw7wzpj`y zxbRoVa&Z_Tl2UH1+P5fd@C$6um`ms50y~v6w@XP^RiJ%wrAbd9v0_uW*eBSyf^irly`UuBc8CMyYs6tMLf6~;G@a80FSEh@O? zK?-5(>fruhzk)Fa-DMrU` z%9UU2(yXkA|BUF$H#1(c?(DLtQ~}j&yn~R5|Dr_ErhfrAOIfw}6lOv~a*+6QmU#7E z+u`9qdsgMydnUf zABX2R2#(SQ@$u5I4D7#uIGiB2;^_P)OBMKi-`7uXLbacUiq0VUw`WdA!h%A7rPE=w zkSwU7Nv^6~62pG+?k51}(`27qMPH9bOX!c~*Ct4U);-xcO{g66>~L4>>c;F;?361g z*)$hCnhM11Xv(>B#^p|~n3Qx{J@@~Z`=<8Fnl9Sdww-j6j%{{qCmq|iZQHi(bZo0* zr(@g6d7pE3&TshkP2G)W%sFeVT{SV59hj*;djKH-r$tBDF>qguG(7iI-HlJxxt*ru zO1W*42*PR<@7!Z-t!a+wUTq+~`w^55PjxMP%B(iL^g~j}N1KFDpl2>*AQ#}De470H z&Oa}r*y*Ar8rzxTHa$+9rNPxn8b=>v!FZpg(EW`Ld5q=-lVT0LM5>y9{0D=PoFXQ2 z{P)C9=Q>&9*8zOm1{y5=vE-_;#{CxHGISRORST+Wvjg9-;}m>ugVNf_t)|qKb*Q!` zj`PV8Act@h3Hrs<3~x-zw*M3{#pbX>0dN#0CqrdV1>?_dXB=KSv?yJ>6YgbV!+&Um z@sQ|(W-m?{>pAqBaJWR|oHhJC4uSIQu_J7RIP+%a%MNSpI*if-*!9TJ>DKXS48zuQ zDvz_u3L=w8J5@9o1Ise~9u0d#lFw#LmuYA@I>1Su5>!PG)K+A<@c70PUP9utXSBn>W_`9?0O!SyQ)YuQowO*ng8j zFqJKu%|sLhpYCov|AAgF(W=Kt&AU3u_W7LB?wKzs3&6QK!xDrUO+MvTiaaXW_oBG_ zM~B|(gP}au^mj=SSAA_5DoL(rO`9@o?2&L|>0dr+fq*ll5nA?Yg4KPYsXNLS0LQnT zgu02haPsL{2s@1~+K-0^mA!P%VYOoUSyxwA>XS~vIUXx&HqvxDxzm2N!xT63F>7&G zJ4`>OL%I8G76=&-FV~hhD~|IT`98BdUgtsMkpN~PZO&3-j)iL#pKY!n-NaP%Lu1(f zvi944a5)d&u1H#N4=P@h7XQ;yW3IL-c~1b2lhZ3JvG(&-L0wZ}Y*C~bNLv8{^0pV0 z!WVMKLh7@)@6{$$NDH33g%$_9wLXptnZM zX3pXGcikaeCPEb3;}ME?k3r5|$u`$I@Il2@^42V>yV~XW6As#1D+oVNiJ&3*^;aocviI?=Tjb{^1LZ1U52ujg@Mt;2}dETF;(l&mi<#!bT9K1rv^^mindY{ys>4zQS zz8bq`m~>u@edk~AVkQY&OFX8Nq+GkvY>IflB4aooat^Ahr}Ts~j`Gx0IUUF-Kfm8U z+#SgJDv|B92hR@NGSPdbBnu{k$Sfo<_VxbTdYLMH!@0qxrGc~7S}XNZvbOOsVfpae zri2@ZLS`0{V8+`4-*q(Leq!C;jk!^I?~+Ywl}0j=lTFJY<7Mc37%Dap)4*|+2GA+) zN?$Xg&dvy@dv$~)8*nj3CzgMo|BmNdRW;wv0dXH6+N%%YLS1&Ey0x+|0T!>i&+}&4 zMAj$%V>Q{+#BYXz7nUN1Zb*0WbT>&N7>hm0Ym$2lG?17!#D1li{6azi@K0PsDBkQ~ z@!;G9tJwh%%jm%@sKYAaMtsS+;!ArNm*jDMPORs&cRICXW2N^1vJ*xCF7A^_wWr{( zl8Rb&;q~u&T-b!A(hEygC(oZeEoDgj`RfXNm+nMk>Yp0JgrwuLA|p{|nEdYzoOjy4 zdq#}c9rRgZ$#t3C-RB-}q?kcckJ|x!QbZI=^~Bf`0;mn5#9MEM8MbjdP?n^AoLzPe z1@)z~%9ulc&>u(X+c=Ih>W=&QGnfrnViN^V#vK$U1YEZv9RYB3Nu_ol8Ap^d941r_ zC|A8p`?=hOAViQhm>b8*AZ(xOI72M|Fp3@+P&kk?v6dTVW}YHtQF#H4X$m2E_Q<1B z0C0k<(=g{>CQ2YbK#jo=;HO$CL8(Oy$Lo~?^*OeVgZzG`8^*@?&}0n9ycf(6o(O0z zhcX+6S_0V=^`pxU_`Q73wHdLnKxU?PLrW(^WBd^d^3vvBEc28z!Qed+GHBxhT5c^H z&g@3@kGgLmLmQ|y0(%(1p0RzE-M-D5N-eIjzx!Wp05X{vgxQ60lKx^H)=0H38690S zC$BY*WExxN2+P!H`N-v2@qKnAvwI8-L0O?Dgtu&EkeAo20lH{Lx3<$}umA3Y^(7=A zjH7n-(62BbZrzA04Zis|R0F=cHoatlOog|~UAjUwaGK)v>@QN^oRyHp*{c-5#gjH~ z%Nd!rrXOjc?+gp z#_(Iet|VHIV{~Q+IRjAnnr24Pm83T&f&n=76rg3^3suJ_8Sl1;D+Q9kIS}iFhHIjh zJC#S#E{15iuH6ICigbzab8-?}1D4KqX>hQ{I9QUftt>TE|4!j*0Q(1gL`qQ|0g@}0 zb@mNdP9Wr$y{eO&wMdv~PlL2XS4F%-;$Hp7eRjaZ3mhrYwCl|{6|V>AxU9f7Bob+c zUZ(Z8e-cG{>T}rcd`l$+bmR=alKP~uSp=VaG_N6WQ9=4U?wM1RTh$DU>^gUGT>O}+ zU|q4LQ)Px%8*rSDfSO2s_BH@^pTEWG%bUgsqLPdYWEeDFDz zKv&56X_dn6JXw#U3Jt|een%E$)L^9KEoM%JA5Jt3>H9rD;YUQIh&E%jvm(;+m!+M# zLHVQCVR`ZF6W@nO2dvCMI2Tthj9WYBO>N)R``^9pMXjJt331iZTqmzbEv7Px}+TW+|c(*y65wt!tV?o98x46TQm?`0Q z#vRGPM+@GlFF{z?cV1Ml1c_PVd4Z8_ee5GB9BUJ_L7#`FHZR;>TqGu|CRSc9E7aVB z_Yd9k{$U{R*x+vk>D(bEVJCZRDWYnLClb)_zQH()D%zA|k;Nz?>j=_KFmp$9VA-Cx z>OiN3ph6{R!+4*;aaI;qohdGzt}rCp!D-qpL4}IrSyV|u{O-BnKiuE-f{ZHUc0Vwp zpD7}Ln1pf6;K2C`L)Ld01Bo7!8&(|jRv)q;IE&F!Z@F$rM`m_dD_E@{ z06y=td@qi~U&z4O@L+ zoeDmTn3bt|lQ+-i!C{S`)!}*;kntt9D~o!2m#+G|jaXJb#0NeNnS0iSzI6|rofwYv zj;>BEl>YIOpaYp82PTMEbN2PV8=q1MtT?piz4iuHV_i?A=pjF7fS$mu)8x>Mo|G_G z7U1Z@mFp9H|1V?Z*y+F6Z%;83Jqb8~3)d7Zw$>}WC~dhi{r6u7lPj=S8qVIEN63QT z^0)?F)|7R}^5;ogU5H!Sa~XY5(!j$F;OG37lpFGa6A&_C_PSmC-Wj42V&HD_OiDuK z%WP93XJcL9NYBFQYNS`no*iRzBUZDg@~JaMY^JVh zQ{Vm1G~I5iS9B3aM!=CAJo4(;kEg7>Z%f67V9ampf8}kyRLBVu_rtTK=COttnXwP0Su=uTmico@IZm174yk#;%F_-mR9M#0vINI zcTJ_ar6`I=-Soe=dWHqqxTZs5jt`^C)mkTY;4&p)0q;ZJbcBCy=#8WNWcXkjNrIch zPaX6U|HR;u8|=LY8C#Khp)BtFOEv^KWD|G=0$9(19utGh;<3YXq|+qXrOcwg=XW+B zO3Tt6-F|hHk1I{6`7qf0D0FdNfH_T80HLeTZms8!2&hotw@Wcmv{CwjCx>|;U-A~R zw%nhmbb$*Fm2mjokF9LYzg&FgqL&FuA-)dOPy6+z51FUIV@3dU_PXt_stA0THE*tl z8Kgsp4EB!4(CmbA!G#YZR1nTE(>Ses)2|DNyEIX{+F`l!NC*!35@??dmf|D?f8q#t zg#wMRJ-RW#=CuJ*K%SR5OR$os|sfA za$db`?*Kn1^k@W-$H0<*lGuclP?)`m*{ZUxwjknH_$q+~1N$3pbS>R5z}`HM67^IL zn(bJ*J-+`MD?8<}BtWTwjep+11&G8FAytz5241e7HBrLlp2EFmi!u zCqA9+SMN@@u0YT&%7=(Dp>s$D=wlAXsuk7mDB@0~k^}g?s7u5_Xe% zA3tV^Gnu7rdrpkP>u9^KnY-4mI;|leeJntj!L>sTgWXFBdRQE=_>z0L{*&X7*1a=W zKAZv-fa6mXtNR4T)#3HV8<;OS*zR5;TU%0e_s_XTAb_%~3FI7wjIzcBh%+JC4#Mcq zffG4U^|~64XB!wd3e(f@j{SH4>S+JhzrXgD6x+7^rC}5Lb@%-3&QoT2U1-$ysoaZ- zBBi)Vn$oDONa*Icca@}~eel$&6@&4c+Y1?$o|ZBo)KXU<_ehpdg41PE8xuJ^6%nA!OKrz6;)~_ojW90(zK}eyZH>* zJWOC540WMw@_#$~uFL3&2}G75=9G7622xh|Sn5u~68f7cADtC_A!hTfne*=8=wj*b zqpzesAU3)%8My6Qi&$0aKy05&u=xH^Wm1sH`L8MBI z3dI;oh`=uQaY*7E%%IrRm@8)c-H+V`Hn|)A12g5#camn0j()72txpQ@T(}NgZ1hA0 zds{UX?`nnK4csGDyGxl2Ns|?{{>hm*@BvozDokpxJ<^);>lnuRC|JJr)daJ**~ZF;?@;cCywfpMNPXx40X>B< z0Otxt#POYPe$FH8wS#FOIESZH>6Q=OR(ZJRdK;j*Jd_eZ+@uyp*diR zV3>Kzn;^)jcelfWguj*qH}yj36ixtR;)Y`Sgqi{PESxU+m>xtr3nCWErmXaE>*vj#SxPslhFBLnzy~k^nE_Oy(D-+ z+I;d+^M>n)r0Z7@WF`$>L?05tAyryug4&VdT9yz~2UAB~gH{h zCS;s0wt;FNveoxT^zHV%;-nR2z6N_`|1g(Ui(+|{t-6V#9k#3iI1If*Zt4AMaBaQR zfB!nS50NC`n;{fE5a%96Y;DDeu3%t#bO%OUg{LM^Isff?p5`3tfsy3%=y3c2tL00b zO8x`zd~LA#N*Tr}Z5f)R2xV7<6jC6Q}K8rudw8FHf3Kyir{ew%s`h&KcjKg>)NDNBlXP zrIg8f`dxnGOR-V0(ayx_ofN)c$`;`wE_38UlM?if}jAAAy+9Zvx2Ss43^^B z1Jau!ANEZ>)*mLif7*^UI7e$;;ceB$5(9#UAEw%WPZb;2U(OEg&K50n$|@Oef<;KKr$KLYGD_d+QSaI+uLg2#+aBnl z<8{X5fCc)fF?H&EpTmF96l=xF2E0a3kj!Ll26+|6K%-#-W$(uJNxD854#*zVen|bZuaM!`fdThuG27GWp#Md z+apqohHO*tw$?XpvdOO!ZXR&+UKjOC&hn3$pW^PzD@5*(#IWJ}J zozH6duW;BgYk_x;pM)6hu8YuXdSQJvlh;en(pI<|Lq=Auu09M!%9irq{()^E-J5m8c<$+f3^iY* zom4>>QQ0_sq0L0m9R+q`W!}yR7$)|TqINBvD%Gd7s_GrS?E00~z~J;Enr^ z`+&J;W?zxZ2vP$G>nl=>pOjqPhN8#aI%=;(Jh-UFhi$`g=qwM`b%8e z4|pvRT$an#(bydU6kLP=fl=+4&}pbt*uA7^d>ecs>=G3MsE=S zoRwdlbzl8d7nPepuf_dNP&$&oAqtMu5;5B=;wh4MOSaP7OsX)-h^$JIq!Lr5*7n#C z>KxH7Z4a%A5|`D}rw{=+^q~fYMz8IsExsbjbt;pg*hF?byf^#%y-)^UqmAS{Ue9eS z^qNz#7h{=jj2GE65{Qdf;$%)v0*nKq*Slyv-|xeMg5pA!z&sdEjrWasdIC=z5hSYi0HlvZOc8qq?(iA&>M0{9G~%S*47 z&3&d~*(kdG`g;|>(djJkI0$pD4-r2t?b-`D2{*u6muWsjlkA7-X&Kn}CoJ`-0!mP5~jQRcr zMj+Sf#uLLQ54Hhjl=f2GP;_w(9r~W*xF4cgWFsXlM``-6nMh1TuYX9|T~z$}fmUg> zCog9aS9nT@fkv}w@n@YRJ2Q}mgow20r3r|un*k}snk*PI%rC%xZajl=KvVYW>p2X! zYcI_|CCS}l%ZEM4*?i{KGy^)5N-*cw1dBpoj!N~gu{LzQ#|aTmjxhc2ZsQ8%?=;&w zjScV*-qYu}^*RxJuIwvo2TgB1!X<;K8Kub?h$2N;RJYWc7fQwM0B^IsK}8n~{lVTn z#n|I>%H-g`)RzjhBF&kXZ@o&#Sm-c?O3@e6u9YuGo2V1>Sm!hzm<5I`)|jqXU0R0s zoB8)g`ig3gp)EYC6%40Cr{4T1H)0rje#x4&@+hM3Vc2 z^^|LsR{72UmY9p$;Nr7?m^ToAVZcFJ%T6=AUO%h3T5#Ih`V(R{Dlt-#MqYFez&R*o z8)I$1wx|>@SR`9c+GC!ee=eAstYZmm_dTsKpl?K=MDGu@8%eq%F1n}?YbYIgFuJ05@|6o;BJK%`7EfjXn96&x9~t4OEw#t8GH zT*jRUrp2da&Lw~_9-l@;lhA$l$&5Z_0YO`2({gl||&qWn3PAG=o+{ zF^$T4Iu@-N1gY2c72Ac24Ef zVR-1s#_OuyHC;nk0OB=OWmX=kh@m@Uh(yt+{jtF@DpDO+I-I`vLg>HI94OUx%FAGNyAkfK^VosO7vLyq+U6&~t)u4JzR(8T*w zd|UVVPqKU@32J&btO9349gHxHrBcT1ZAUkJu1rbH%Uww=rj`{)`V36_xm3Tgvx z#Q*68)qm@vXL&!(2gu&9wLR8)+57T4^CztiMUb!gvJeAjvOkN_pJh_ZoUC| zQt}eEILjyv=~zhPfL_6)vx%Y5y2VeJogeKl1EV)5FTYYQ^-y-=h=cOVb@TV0_493h z0OF2@&P6MUDDs~)G;#rLd_LaBows=INfDN=1gkOMTz1W`%fJpo*8v2j+zSoi_B%c+ z0mFPk?;6dOA zCq4yrK0D^bdb8Fb{@{rLN8j}eW@4RC1-tjjJQu+DwhV@g> z{Os}z0fJQ^10hV3KKrWtmpy^lMEYb>?neOo17hmn=cFyx^M?I6(+hqijR^2reqEN^ z>F$?58^LiyHUQ^;ZE>;_2lo^j)~Ti#^do%LfK4(^8bkUE2{IcPyAXUr*uh+HGnRJF zg`B9M5CB|F#)6b-`*!uao^Ll&`{o$Cb&>}V_hA(Bt)fS zgv!pZFa_|TEvY#}65+>Jj6}BT9}6MY-K-d|t?TtqYt}U3u_VvqAiOwN#SlG5bFVN~ zJnV6;@!)5s0@0h_^uONy^MC?gfbyHE;@JP3} z=NZMceGd{}J!VDhq;(^Qp18Koy&+KTq$)@qTP8%&$4c0D9w~;4dhktSmX;)glwdcpF*t|E-=%i|cWh>o3?o7gjP|M8ne-RI;NPoTtSs0>W9Sa*sAJlpQ ze86hF_{;T$Te_}{YF%b#NDqFeEb?S2HPK?-$b|9P+{}GCZ!TUw{bX)MFHqIM3EW&{ zO2y&l>vwlE1;bAH#byS;!MpR=G1Dm720`XzK6}GxEpJnNVpor0;xP?A4H@?Tg)@AU@7LG>>ha#t1Cd zMkX-t&$TLZyQ(iGBCD%uZ-TsrD*gUe8+vK9i>_*$DCnQS>}4!1_NTYO-{&ak@>q-7 zI(h5fW630H*Pq}#NP)|lY!HS;rNsLMugJ1Mmt+hz^M=^*GCfjW&rzpJwQi4)Aly{Y zWrbQOqv?v@^Y6r9nsWTpZ%})yxerzRac zzVmfI{je`z1dfkiiD#u3EDM=mToyVsXv9#%A|x5gT)49Ob)_^+S1|Eq$dbD_9`w-Q zsI`60hD|sWoNnC>x(qis0da4sAPugoahBbbMpI`>r(O`$0q3DTEjY?Z>SL7c*Y59M zC(B>s&asao#f1yF!DY5I4tYyKYPZcE3~Gw5L9zHgXVz-?oG1lL$=WQX#lRE(w6Gk@ zTs*7-j&u4E%@={!U{~P4DOThy-LVxB?iqQiapPZpF_{;N;DHHAN$*`5@?Xoq|NG?> zwnsRCL=C6D;?pc-rD`y=Z{}%eP{MZAz`mWSAwi#3(itbBYCg?y18 zg<)yRcReLG48H7c-+!-hpGZ?XA5M?4ts(~QE2wnZXAk0`)t}-@Ju5T^b;v>IgFG6M zrAge@L@b`6#TpS-f~jL0k@^oFz%CvfS!rD6($U#;Lmrn`u33w~0A-ZrG^M?)tjn`l zzGtt$BxRK}aTIe3j{#NoTRu}K*SJjQ5}h}%RNzwDW&@G{oS}li1McjC%?waDTp}ij zE?2#udbUwRj7AxTow%KwqjPsWr&k&ITU_7;C&1U`UD_-R*eI^&*OM`s+>4tua^L6P zikO$mG6lCYkd%S!z~9R#&M(bpMaHD$#rfflD1f-Z?#BssZ9+I`_Fs1Cf&z*|Q8;-1pt8Oo8R zlroW$7|oim>^e9~aNzuLpO!Ixv$^;dh~WMO?4P(yl(6ej9&cQg?wA3^@+r+sF~$Ua z*S+7v&zj+r?@2R5h`-vz<27XAr^_-a=Mt`5*Mw-~fq?qd+ zNr>ZoHZgK(>OXOon*}tPNANQGl>B&!3=2pKKOW{-w)OR zod30DFD3k!W<FJ zAg;?TO=r~WkA-cx?Z*UT9{1{&k-RJ08||SI0qNrS-TzY@)$)EkW34p>N*A|pk|r@y zeKW%vVn}H`!(pu5LpE=xJG9(C^r``qH`qVf2T)D=>FYwHEpT9NLkIq!q7K zk_S2Gdki;QK7Pzq?u37hiu-gn@;#DjRzeCOHdj>Lq1kV>6x z_G@7tQA zO9e5ya+s_bhl;Rb+10@)5TWy3m#M^It$#UdY4?xKlh}VuXDk=H)5dK5;F%)Ot`~GO zsAi0fUo04tO!wIdD9R7w=Fh*ypT)Aj4=~ zb9K0;1$5pp7hkxy4gg0>bUodAVmhrA6`2N=Z{0)-4?k@D?KRt+PoCJUdC9H5@Aox{ zD|TJ9p3dOUzog`eSM8TPeXcP{;Kiy(JHtcY{RzRmFb^NL7f)!j=j6_Qun9hYJ7wqo zI>mj`9z|WNy3n*eSu>-edd;EOTS$f{lCuOd&D;CaAivaMrqqYVUiz&8;x!j%y5I1O zyjW_+-RnWix8%Dq(>z!WdRi)%CXM&TGVOC$n&BXqPPJIXrkePb?v z6MqD;=Ju`2S0iqtsn(Zacjj=GjOcmm!>}K^%30P`Po>?4RR1!IajS7q+_1U?RbdUA zMyuDlA>HzFTMQN3*pM)@7tmk5`kf!6cwuLZSFo70!V3rsnvu@gtBjO(kLQHF#_pry zyfhc$nuliPdUQfHXJ`!(=230!jP!*RCHzYb$GET}))P434zMd3mlLJ7{nK+*g_3v0 zKYrPfzbmMt8Sc!?S#9yhWyQ#Cg@JV{erLA}4LqKGsPFc{RUB%swqpz}OC8b`za-Np z0Ov^L>Oe{c67el-?6BM7f%+e)r3pL2lL|q>?573tb$xU?sA_uF=ZpO-caeyXyd9Dx z!t~Bhp3a3(gHWZ=N%^|~<$5=M+135oRw!U@nq|_7SA}{KGzSsH}MUZbxf6=iS z509E&o993~`lZhNx{grbT{zDNg-Yk-AC@}h*3`IJ;%9UnFw9!l z`tUNXf&hxPVJSwC-nDGDR9 zRUQzpaqfVJftlbrfe0g74(8*et$7j|nyYO4GB+CvxkK)~sWED2z6pjfgUQ=jpsqCX zH4A0~8a@S~>>Qm&hc#ixG5{YzlV)rZ?dU@A9uDvJX76K{ z@q~;R^QLq48epO-qgC&Ak~Y+5;pHk^Orl`;t(9K_7^epjIm*OVLeE62W1!!94wdFU zan=V??rFK*TuJP`3(HKqL0UCB84j7LJ$`Q<8Q5hLQe#LG27{2T)emdepC&z{=Y9~` zUJ2~@X6-BVEZ^rbFsgu#9CHhS}}{*I*>?dn@J;CapY)xAprjnh|RdO0m+a<(EoAgF{hoEWQF8O{7x2k_7T+OkUQ;7?QD zHdU~K;KpZk>syiauDUk;uJb-m`ixT1tf73?lO5@w`suL6n_oM9{uB47`SG-pruOjW z$+&sd!NYgGX>v7TSWxZAMh3s!;I_qQ)mb)SPrE}ste(M6fx2P0?Tz=Dn!^mo7_FNa zZyxQf58*#yNYQMw(CmVIc${~u-ESQ#L)@`_nF3+~E8B2boY{$|K4CR9pFQw82{0O7 zMh1Ayu23C&|5UaHdDY{kos|b4>tUvbkrG{;l`o?88ZD7tLV$zA21mdbGH19NA7rTN zGF^H;5=vr|<8-5=?`vE~?ujy?t3%xxS%?{Te-tjAa@F-#TRt5AcV*FpfAqD|$!esf z0pQH1m(?xr6^}(#1D|`r=#YSlv*PmN$zXYPG`EMEfl%rQZBY1fo@q!rtf<(ZWZMG= z6<2+XlBPweq~N(KSFx%<1kbM8@KJKy9%xdke<)%#F z&JS*$J#ZJ8X?k3ec^?;Leqd4wPidY|J+~gWbASN$A&yixVZ6g31WyyCBVO-4ZAO^9 zye_2@i&DE3ArFXQN{lYN+IeY4#IKH7 zv)s?84p9VEgCSxZ~-{8L*!pQ207y9yp1^uh~6woxnm;qgXY}=J_HA2LljF3ut9RkaRXU@ zTMEwa;bAM@MKvH-Z+2%bp&-;4wx>)C0Cs7W$w3%a&}0HZIS_U36&Jp=Br9^o2a_c& zo42|I2^}PZ;e-A-ni~EY5T?g6<0)CK;lhvrmTC0*bnGto>&*`ez$XJDAy1fb+GG5$ zosP!T80_TnnqJyA1zu#Bf7?|Q>%D3vlZngN=4aWaU7Ld~5me}-|74NPlPEBe-FTyw zW{4*M2eUT4Zoj~&cShR)pS__wY%HSw>Gz(ee9*hxb5 zjsh_7&@5jeQS9qkHd`JWbY=h!j~ypH-3m1kk{&LljskziNSDjLP@O6Df!a$t>Qpu6^8Ovi`Gug7@c#jt>5g)9{2SKcFKLqT3kW)rJr{c@PdyT3Fvi_uB6vP z?Oyg+n3cr}5ZcM5q)ctdwBPxWRCnV336%hd`?9lP=0Bg~o-(6g0+>4>-Iw%s{x*~# z#xA)`znbYc6o{MPYv~RUI0R1=R<%xK^+~~4{h|89<-;JGNAv!~CMp1*l8lbK7$Ynt zVlQ!O|Hz;xylc%m_pSx?9-5@>MkbwIkxp){d1Y9mQWPrpsItQb0(a+E!nr)p@qz6&inYQbN3L%b}dZ z2mMleeLK9E%h)M}3yr5`HK}IaZ5}qKW$TGBq76@t6K1>sd?pw2j5uhCR5pMQm#a}UQb$R=31L-u@AC;(R?MSqSk$$I5KBB`{9B8fK3Dgu z@XPT=f=9R)>TC(<4}1~x#h{r&4Rv+5cF?7l?|Szt_F?A^fpI2s67e=caZ#=0Hxnvk`0b1g8gzPGQSm^kL3SMa?ehbCDT1Nd}37u|s6 zN)1sOARAhXO@|hGkM%sL8YJ^=jvwbRjoiU+zux^EReKm+q1&>k>V|EbbNqw=p~c=- zl?e3D^{n`=TfKG;vE`=yL1by4EI{Ol$S_cY(6OEwIQyLzzVp0tqp8j>6Q7Jb2#*2= z8zVS*Oe0>rBhDFMKeVmYyHfL+oU9Mv^HdnAo}5hN>u7YUmfp(9V7<$uF@OEN<$blJKTN8wq7PLl1}-)KoaVyJ`A+|K@}; zZgh_U7v)#9pcd!q-|DZUlY*cDHl*!tki%=vMWmSd4aYvjLg&&ZTNY_G6Nm7hkDms) zGXOsR-8J%ZI)8gbp*~obz&$V4!<;wE!0KLV@E2JM8PP*o$#dCvpkO$(0-waI9-5qq zkvIJARTJIPGz!JeQ062h0em`riVwS)GNx6txQ6j@ro=3(R!GP@ov6FSMP1q9x+=st z=S$>D#xG77oT(L$j?pw`9}hRwQ=7{uE4(o*kY0EJd;oC&GYiW8@TEZx7siS~yhtAt z+8m37c7fpP(Pni-NOTN{X&f=HjV2T(ZmTbk_6a-Na*Z4OG(OK*mjZ0Iq`bwf2z@ue z5;rnf2B}k&^RU38+Y6hj_-aNMj}F|8x9R)8?i1z|n&j7`;x9(o#E$Cg6;zeu)s9QJ zrg*iG+z%_at)^Z!hhqSoVy(WPK@@z~PA1qr(6Femy1vA!p-Mk^!pAdM7BQ_G?0U>W zG_Y#Q4^uB zfBc3xciWh1!Tf`BM3cg9F7&a|e@6o}3L2zc1bwy-u0(&>Xj^+}bq9p~J@1f#W0z3g zBQ%$gqa=nls{yMiMiy#we&t1I^w9-B=c*-kCbU_(E-Jx2aD&EWbtq|1lPm4a*8$%8 z)Jq6ijMZWcunQvt!wdW}X;a6C`8PI2PC*OCO2!$)E(44|CeD)yL3;?DsV4k$rRWz$ z%CqaW03um~X=L}dQ;~axk{{U%SN0cR|Ev%vJ!w;J%-EPYg`|ZuQOxoz6WBn-zgW*& z7MdG^8N*CfJHU3Lqe36*W{TlSC_IOA`mCPTKtD_M=YIcRv{%`I23-aPdkYGj znh&X7WK1-vIjajB>1r*+m*~k^rDF|AIy1^DMVFmi`wpH6Zsgn_PAI*YHagDqXEhA-Y?foEQU^_fS~ zdwyGTMZs&+21o|6JemB+b5cKO_;14tQe%hkc$Onord#&58c*_q$8d z>+lzfdt4Qjm7c-Fb-9{RYdGIzqe(XC7o<1`WfOm{ z%HfvVms9&%vS%>STLRrKeMhFm6_B2=AOrx%y!|PgO&38&Binxi>Mn%_TW|*1X(nITvz##%)iNUWCo2&ZbhoaEwq?@Q2gUfo*~XM+-fT94{hX$PbJsE5)km_lRjSwtT}=&AiG4J7M92rg$h*LxFzxK-X3x_epa z&%c-Nxg@oB>q7C+j06{xBd70kLgeRvRZbyazJYbGS|^ghC)9?EUV)3U*fIy}orQBn zbrHARc@Zr7gMQynn9(nXM$E~=nAKgX+mFFcmH5Lv)bD&0tPdh|tUJ1y*~%=gATkl& z)9tqWRgH$?HY%yd80s5&!>_$D;9L_3?-}=w-h4Ap8MyW4I%&x)2Dah=35pK!@BK4s zM3=DCAu%8e0#^$a;7iiCo;=DPZm@#ndgexJgx1dk+_&nK~l}68Y{z#zE zW#Uu7oHHQMY-d36Yg0YTmI|Y=vwWtn!VqKva2!uCA%lOz!Z|9sDrdhKCsvVGCa#9W zds|G4QTWjwdt(TG*&MefdNdtDv`eaD4~%MMjCwMxv(#Q53ipL*Y<>6PlV=kk!JJb; ztceCvY5P|qpyhiwBVN~|#lOjtu`e;s+n*|1SvB6z+b(yUdxj%r*d>-X@<&M)0QdcT zu~&_-e+TfP9HKGmQ2EjObjvFJ8}TwMx?v<}T^}S!&9O;Z$L1`!KR94BcTAv_Sk4+o z@C*bk-hd`>4FT*V$G0tDwv#dNdrosb?Q-%Z2m3I2{@4xGB=yR5HYbW~G9Gtq%OUiK zFf6etVi7P=&WzaDknE3CPn}6euf9y=mK^yi^5#ewQ*WU^fc>29wqbmT&msiBc>eUKeZ+ibHl*)6?iui)+h5gSY;Gsxd;PA2imb&J73MTmiQBPlW#j zU>AgPbotjV{}@dr_BHgbk%lPzQbHVKh2}XP1MwQFu|5n=(Osg?Ff`LPvJ8bz>x3u= zZ$w~nv6zGxgAU9&s?#@4?qhBoT9g@Sa+P2~n+g0(?!zyZqW)_$J~q_~^Rb6!AXOtl z1)7j5?fm8{8g_oXTx`%%*H=0Enp#+!MQ-2o@BR%s#i6r|3Ig}LC(1kL8QWXZ9vk*l zHF9lH3PR4b!P`{wOw=KExJjfL)cEk67I5 zLu&_zh|Nc~80`V!ObmUpE4r;Y6Du9iJYo`5IMdH~N&WTndf+sR;3>OgmomWhuWLd9 zd;$rBK8VTyoPd+Hfd;J7@WhB?!-#GHgyi4OD_gw48pCzA4hKKN&;lDB(|_~J78^pe zbw>|U31GN@E>8h7Pu+$t43);VJz)TF{?`^ek#$qH<`~wFVqLTJmxY>fLbVC`u|yH= z4P>yWA5Yr~KA+r1zo?gfUq4C|c(o~Q%n#hAyD>TFtlei3e*PlOOHK7b4J( z+KzDt>}Sc}Ja;O5392MK83E^Hs(8sDKz1Cmq!&V8cNhB3qGhQ`=UF+49`@*r1GBsO}S=U>|uNW`1}iI;W5RB4imG4T|}gC1+8p+!nn82vi@%@>j}e{ z-B`dnIsOHCGQ17R#SfY~UEL?ky`jLPY0KD^DMC8%fR7r$$K!XLA3_kWP*+q_(!$M1 zs@|PDcMvI#&|S{M=}G}H@`CZ3MtXMlCd9(3J!$v61lV0toKd%NuUTPAz~IBGiHq71L86^jlC zAN1Sop@BU=>Gw>hNB|$0w6`md=I2S$NTk^ZlG6ZvqByJEl9q9DeiPvpCteTQ?Y}S? z1vK@p4Y~AZoqXAjie2HvI;pnPTU5}=J;%=F-~KVG-4$S$K-<<&M>46|&-#!f-2YEs zj%{y-@Cy&M6|2?sH&i71!j?S~rA>Il73F}3kmG+-+ARu29p&>&x;)?gS~9Cw25J_B z=T=Jp$<#Od;WzDx)#`OT1J54T6$%|h*tA~h{##X?&?!rIV4?kPEzyBMz?Q-@LlT854 zYsp#@cbP?ssa|kHj9P&e3P`8vH8C!2doUPjvQYc^FIS z0e};pmX_QZ2W&W>Z7eIlCb*+?^sJq^8m6tj+yDC)dDXhaT8{bA`6Hp;9|fcWJt*Gr z@&N%gvz0~kRS*HI{`a5X`_Ok%lAgv@7>(W}Gc`tW79pzM3@SnI?Amu8V{+DNMPG?TK(_n?no zF~_k(XjD$x-xDJ-0co8y`94EW}9Ydh>Gqv{R8bgO-4dKrB-t1zS zUP&4ytuM9S9|QPjDE-vPO$&+f#My=ux|Uy|*jXy?|1ft>?Ugm%8jfw-wr$(&*tTtS zY^P(}9iyX;?R0G0$$k%3_HX!(>g>Lr@r;^l)f_cu6?0}8+qK%?1q2%qfs-d4(zDt* z$L3&-EnaNfO|W$Q?Azl<-+@Yd5Eh$vVHSX%v`SR9E9}gf!@q*uJSq_De{up{$DTZ1C!)tD1G8<+4p=_Id-tQoygYOt?#r>@u5qw zKwT%vU=mOCyE9OyUpsTK!1RD(%Km7DZZ1DKPl`-S<|N%IM8f_g(B>xw3;0A01^B#q zG`OXxldkPpUMR?OxXNyN4V>w!YVPMcmW%MSGUxdDD2YGOIERNRCqLo3|My z#y6qa+`@~Q7$i^B zE@T+chZ(@1G3CuSH>epmwsLR1Cj*8U(t?6wm>$JTboEnit0|+c9?Q$SH9yxV;;#!n zJzZO*cWib#j)!w)qw=d5)bk*-2moheAZruE227-oMx;;&FOVj+0y@{Z8VJ^Uww(g za0u$Hu}`irktu#!(7ZIMQz<~0WI?*sb^s45mmpGSe6Z;tF0Htrcp1x|8A;SW`d+Wn zUhwJD9wuC%CMEvpL<7N~e{YU~tp2@KFVA&)RQlr-`3>V`jzRjbXE!+N)H&R}zB-j4 z@IzF)v7fMIV`!d&0>J0pTfgd4La!df4)LQLk-z9l^ikO0m~{1G*7n&`lNK<KS& z)jbt{-p)~=cI{&KX6$K#x$rqMZ{n(^4-~ujpC^Up0|y}Ic9Q)LSy1}{@Mlr}v}cLG z|AJ>oA0=$yylcXFQm13bFS_+_=jOz^Di*=JWPD?KwwKG4E#_}kls<-DCbdli+p0rm zooQ7F@)r>R$Bk+N)fwHXj|r>K9yNqqgi8GcSomt1r5PirE+5aeoZ|IH>+UI)nHEh9 znab+q_C6cahLAPq2jk!{$F0ep`1ku)?I;-CjrxBVtDD~NEIyHxz?pzi4>s*(9z7eb^v#jIJT&c(;+|LdIgr?Bt6*(_JL&=g#Nv%=m!pp|Eid-?3RP6 zt}2^|(|Cn~)(`tHZmH)tfV0J!I;+N)`;YYbXHxyHO zkwf)hdA~OO|N8SQrK?g=$B8#Y(0rOS&FOzvF+QZ86hni+S4QO3gxb+)f?p>nGcM(EUfw0t!+^_Tlm?v{9Mx7U5hesQv60+60?7`vQ-bY z5VslARN5c#=k;5?u)Q{PYApZX)BUKPyokxX$%pjZ*pUfTj<&N+0khq zY3nqI*&btV<@d%C&eh16-G)#u_-Syp7ESjEl`vyeolBO@=`^j|Ff8+%Mrn&ZVE%Aw zSh@d{LL8c7-A|fn?&Az@XL;RcY~i{>+y(bqF{Ulyszc4kyN;J4$3jJvA!b+HZOE=o=D{N5um;aQs8i;YhHu;g@`$s(5z4BHU_eXbc4S zOc3mm!UX845WvNIqpfQBB_t$OAmRBu7yUCeC`lQHCEyp0+~dNVzMpqKs(@IqF?r$b zITWk*rm9CO;8sNB4IJ-7q!TNiXKLz8Xs5`$-lz}}wsD0hzPL&z2?K9^(y8FlDxLR$ zKV%?I0H4Y9f3y`z?--Vnl8QCqmAC=R37e|>4TCNC(hFZF?I`i{Cj=+%9B#NU#<0(@(#M{FzO;HKkJ9Z|5$TDgkrj{(RW!dSgC zpTErKi+s|subQn^e^WScUL0&q1S2|*fxEEhjwVcFS(SA%V6fq#%T{+0JQ$c1Yn|V% z-ZeQeM@b%%%s3T?@tyax&0rS3pEODD(=u-ob46xS3pq)YwG%f zr0o)UxzWyh_x6KQJ@=hAT-=Vde#3;3QN>LFp95P&-sKuZc@1rFS(xSiwUgRut1s{o zB;aRzLbu7cU0CTYs``H_P=f9} zD;1J2Ao&HrJPbA^Oap175i}QFr_>$@YsA63kM;igy+wx}Bf%(>Ehu`Fk`9&gy8<4q z%Buud@w@K}fcu{rVm<`&H#f(yjLVv8RXVkE69qqJd&2QG?d)#n3TRA&l$|3o|MKGb zk$(&A9$vXH9?}XSN2nUwm>u~fGxF5jFo%*Qsn$)}3Q$E=kKLk-dvs7QJ`Z|h3 zc^tf9@lUh!>4SUW`@n6j>9g(qVnA`91vq>gs1_7&}@m-ztZEMve3l-}%6mCKuvx^ztYTxFW|SuX?*e@0V2S)qnT5 z{3avG6?5krJWgTK7^3+b$e=d!xSpMO6;URnOCO0d($F6W)uYU{?o#VZ1^4BwczAV3 ziy3bWQ?{=JOlGxC?FR5C!kJ?N<~hVGgxO?}Kc4RM@AJ=ElHGb?f@-a^?8T2{@(9)R z5z`%gg||dEowmK-w$?w`8D3`!dG5f$)WC7L4*~g_pHl<5m^uC^*A%@Svb5Ytv_hhA z@PNPyLiFRZ6Dug%Ak@ZgdI~vvLlJpL0hasvpbQx{Z;8e+|+wH#gH%zA`pRWPc6IFt*eeHZ0Q-b$j2i( z&1swL^xgihAC{0?iGLC(Pwci8TpyGsh(xPtL`-dfV7ZLzDHGp<|Zrd`4Yphgcw1fj3-^U`x@X6Fz6k=wA7w6G-YjB zDN%uWxzIy)HLFFyY7K6AhAt(afX(NJ-Y;O#efra!FPi3>66f(CjYNFMuQU!yTV z#`RsCsYHFEtL)_cPf;OSP)LB^Jd4#U`t!|qWXIV6q^?%42+1{L5t-n)L=FZmIH@l#1&Otx|bHh;{iIIo)WM zs!)?j*<W$!I+gmY|x9h9Ld-=89EZ##(4-<=Asp00oXglI($W{~q#87KmiTS*W|Ms4N} zf{q$1;fJ)8Od}C9&BXAmJL^8u>KK z&5kLu0b~P#6;b3b;UFZbk&60pi}0I@#A(ILfA=Q9Pw8O+;$k9F5!Er6jJPHDs+d?# z&5m8Y8+4)YtH9+g+WX;T*cKlo>2TL3WeK9xbdNGWkDcN1(8PWHngXa%C;xpgRcg; zCAJ_hY4At4Tdh4$HF_;aP*F|s*SFZ<1xIr6UjPng(z4~?I4tgezjKO#@Z-_1Ifj|n z%}}!@emzcK4YAvx^xc$e9?RZLqtcolwECr zJl7$SHmox<#^Cka100#MHDDkuW$3$qK5>^m2R3Q~3`5uhcOhY?+eG~ePCxO(y$Zdi z2n-|x%AZ6M=(?9B43E@u3zXFx#9q=8DcVBCUPMo2xGtEu1#8i*OHuGFgsSJ zbM#1{w}f9jO(!s0m1@K#CS|BivYq&%*BS-6V{ce|FS>xdBYdgmnrZxsGVYuhnR_4H zuX2EGEo>`HooyCLI{4t8zFn2pVG_&`?HEBIIXX9xb^c9Al4H_<^usIV0be)uAqJp_ z?v&sR8FSNx@94#Kca8G8#a8Ply5A_WVo)IJG-ww_FuxVH_hILod@ZMre{*q8zM<#F zWsi1^R8fxhh256~Gk_!E3A>&Kv~nO(>ywa{G(rHGOv?eETW=fG@sm!ay2TwfDa3Ei zcOE4e7c|BuzJSD$GB_D}1~bYB)XGmhEyJ6l0zoFkjFPYv&h6s}cU%9_`2(X{2LrwBaAS1<2dFk8URn85 zl2c--F{Jm`e;-mE7c`fhBd7j%#l(whryXd%8T8|Ddw zAM%pLIDR#N^I_&gmm05rw3#yB#PNh5UJ37 z?(#-tOcK58fBmOh~ekp5bjWag(2X*av{_*z}L+N z=%K}N<=*a^Pxh@c65^A?uU8gq6F^ZEW_RLi=TTZXM)p^g4+}%_p)iz-8Bg^gte4-f z*j;wvznD3x%0Qr7S(_jchIIAmnJ()(<5xDlbV1%58IW$%%*BK|6F@}+GMq!m= zW%A*hT>N8cl+*0q5pktd)9BYzlpO=Z(&{?JzWa!rB%M%~-K5j<`?$j?2^y|wP@s)? zgm}wAXj=Kh;ZgoP1qmakqvuH)Qzyvu<1RDi5Mz{*ll-tLk2wmbAE=@s0r+#j;f>8& z$eF+dcD^O(V3C8yUX_$xS|uW-J8qiX+SUMdY7R7kWEalb<#$?|(2uIcPH%@lb!sYG z+&mtv13B=$9(!#7H%Z5Pa*g&ptdtfTcH<;ReTRvrh5Qr!k#=uvx#W@eV8FvT&a!j- zrG*=4kv0qxqF$`&L(*)DA!e`01;+&NnSI!%Kh66Cu5UqN<=Ol3ICGYhgEfxZn^$ZZ zd3I8Y!sGcC`GRJ3ljF>G z`W50)T7Z$_X|p?W`Zr{T^mbc00vX7(is^MSKPrDO`^LnDILCLTTG&3QR@xV!Ck_ep z-BEMYnykm_^J!3of`UNdaywkil z(fK}YT#FYLT9&mu!Fln@T@OPOa+*i+Xb;c>FM1NXW$q{kk@~Q6JXhtRye6wk{7=r4 z+~`z?%@#8>SQZIc4(@tzJ?kmL@#^|$0+0eqOm3ZLoOxBqX&U#;D=GI z*awBjiKoap8jrH+Kj+7C^sr0-`3nCDlXYGf_&}0l zgCo}FhxHC3mS3>Fz3NKr-4_|eR!7Lf-~biJh6eGvGfWGJ%R+2);v_Y@QW*`8&0nCA zh;z=L6=FmqsURx*CK^nVK|=zCIFWZOF2_PbM`>GYe=Ac3?2UoIC&sbPn5`4Jw(BN(s?*4X1p?NL z6t@svMwe25QRYxdZUgktF#HVT$p4LbxFcH%BZi(sH9$CjLn}Eh(!xQgDRe3GTAz<4%guPRa25j4xnU+HQz5Y2-DC51QuZ<+jYtEjEo-BHQm|re= zsSX&)Wg1)gIzKGx`n4YVw~)M_W@_kE#tSB4;W#+FCQa@=F;{p|pY{tGpyz*Wn0?RH zsG8!`0Qs*NpLmT2-x`7BLiht2npUT|`_rrV0#e;R&W|VZ!Z@g#sRJUZi%gb!IPR9_ z>#tABD2|F25g#C+%D3PjB@wUhI`>xSwpmdbCD#0UkJ4I)QZ{1Xc@Z;3vw`>S(|DTV zUIrfsq(!UMEJmrdgZc<};vLW5h>`})z$qj^S{RPg>^6P^C%;RoDYy0<6Ue1t}0TbH{UDdo%vX!1se zVEy;^%ji`G)!_2W{VMcG*@G+v_VaK*;irmg=8Xk|ldh$lt5A_6NSQco$siR)GTQUI z&V-MQ8R*W8fSQLA^yffvt2qGk*|PS^U#4FQa;oq-h+Rh|c*i7!J6))hnYXD`6U6^v z{1DSQFce%+1<|oR@aPJ1DZx^kJ!PWK%fBMb>V63U%@Dw8*v4kiDYiu(7=>}cjn6IS zLh(KecLQC+L&!QV?QOVbx&$PUe~hQ@kqh-G|@1 z5=@j$%U>u6*RJ& zShvKO(a(~91E2jEP9V{6)F&6dv(Ytt2mjsQ(lApGREqd2kx-I|(I^5#_!Bv%`gp(e zLAom6#Vcc?F!c#@I-wBH*@yP-{Lc%8kf!4#b{|o9$HX|3ZcH3!<{TjJv8ziHw%gPs zmuOwIF+iR*G39J)im!wm@0s_u_wS^Jicd;3l!V~UG+8Alqec@N(@CJc1@%|-<){gB zKcg>7VgZ~i*5!F>TkhOhW00|}c4BzMS)lxFD_IO#KeU~9zW_#h1ZL07gaJO{AGI9S zgk>8G)A#vVnxos;#9ClK)pYy5f1gbeH`*N2aWUK-A47j|)rcfX!#mj`r&w!>4GDlU zy>5zts?g6G`4$j6*xqq*Sor^_C^(fmRCCV-S%DucY99RVqviws3#6_hwLCdjt<0VS zKkXTAt$jfc0&*S%$FvYm>4ukzs&=P zOXf6m`@}iVY7EzTL%eHn_OMAW)04m1A14k9q*n3RT^>qmn-m!W_w%sIiv_;UH&>uR zl~YS7S>{n|0u@y+pARq(YwJr4EEE=l1|@8ugz(faaV!0@iQ?Li=Kos$76=RuWWsjg zxCJo-rRA%5sa02l6vHl6k#IzS+D-w}y6%$AQv-0c8EhhXBxDhi5nOgQaiPojn8WsW z`K?rC^;mPyeNkCjpMD=n2xc%b>Una|xjdBrSihyT+e~}^?@pW_yJ`uh-~G?fcJ65D z?G45MZC4Ya;?yN)YIa&#Z#jt8nC_mcAKuj`MTHoRSltoAmC6@V%?EOd{2LQaTBX=& zo-O-zy3?2QeSa=}{A0A4)2bc4qI(_04*DIzHk|5VWYSk&1ONKV&))JmT9C zyl9_om*`3aA%u_`!whbJ?229^ll0g(r%t+18-%?-sl|P)5ppOUeJ&{#^iDZ~IuXC@ zxVQ;+Na^?nJQSQqUg}oHy~xpFb_$(DkSt0`53?aF@?x!vE*ant0W)_B&F*TPFt9AM zu>JkKHYt}SxMo9l6^~wP@4qaxfzf6~@;w(ty4J1pi4f1LW>6-S!Z8Hzx3WU?skqYE zDL~$(Ajw4zLI*2%Nci>}iIDVt;`j{8+|=CKjF?pIX9>Ibu|E~TX z6%2|u-OatRtJ4!UH-P2&u9I9u4@~}YYxYoUwLnr1OQDNg*~862C`vzdjgm**iNpO$ z$Ka(ip*feJTrNihtu{1)YbCn83ck#(9>&TC^Rmqc=<)QZK#pCix|%c0oCEiYgF|4} zf_0FN8#>%i+SYCFigurAUDmC!Ox6DluHXS&C@OxiTqOjZ6ToebRK`8BavYIXtZf8E zgF#J_M2`Ezeih*JB}F1ckK^ZnH52C=+!yD3%-OT%4J5&;V*2r)!-va|X($78OKGiq zBwy;5lMP?aOiDt2U_5*+G)#uB6|2^QG=Ls6cZZbT~3BnF9(iYt&^;8d*UwN{ghQ)l}eKK)pYJTZwX+Zy~ z6?f~b4s&^gwuWgMgVE1KP%6Z_72^#lYqtB|(=f;pR_A1LMZ&@4xGrPC0GS{w9?^jb%X?;Lz!8Jk&E6E{W_Yaxsw#VQ0DB zw6=U2j)K>sLEH%^PAznQ1kBDa&zKl0ni z836NVbJ&)*x66bo6H)ZU3Buj+-(QOM#w{dEe8hmbiQ2qEbUJUN_sNn zJc9I?|1K78;8vgBP*gWtX5H73)d@3}TNyXD!e92*ShAI*KUzec zOtCKXY7@WQPHtZeW5@sO;Zvz`JX8|Lq<(+D!xukV@Q7_>E0Z$JaBiTGMDr!0X;>X4 z=S$bPN${`6u#;TxH71!VecKQD{AuvVYdVnm8QXk#i|EOgFcWyPnpx=4AeRlg0yH(O*@t?ddH>SHQKlrV8+-*Q^9N;?KaF zfh5H$2mpU(tqVOZBOsuFU=(C5C1!zP3oN%-1-H@^C1$4yJI|SSZ&3<` zwD1`6pU!3zIJn|_;c@i=Q)QJx8Y8#NT;q-Km|-zVE{cIWH|ZpYxe1(k1W14|0#xD8_C{Y zUus2)#}U>Hp=C*9Ak>MC@g$zn1AaXVKB;Yt)QRrQriDSoXx=aH0A zGC|-@j2K7xc{?t8Q~>k!dV1>3;XM|HXuur4U{xhxK+2?A?tA^bIBP*KRpDqd63vem zgT6Md+Y!&F_+)}q9CECClALLy$_+H(awU7oo++`2ok)P!VY*l?F7Db;k^y@2c*RC| z4)FQ(zCKirpdS%UhRkn)Qde{=`D|D(DUEx7=LtG5#|oCpx}Cs3a4{+C1T9Agn#M!p zWfXUjkz=h5NGcbW&mX-Uz@a){O^D}x)0d)Vcr_RpjX|8Tq##GxB?wtoPHSL9;^cjT zDLJUeVQXpIwm2~j4c{j!(8+x;-Te~=dR(c5VXg(pm&QaX^e&uM*Qw{Q8?`%>6PD4L zH9`PYq%iUI^KB+@<3BV>%P$~0OL}@(Toofpw7RgSTp%Cuzq=^D!i$?{F4%0`}+ug&! z?Z!m$4IezWARw-W%Nqf?r^Z(ff1sqyA9YUDoRs-k*797ab^28m9$TRG8?Xqk9Kb!1 zB^^DM`?3a6s6tfD)4dEh%qis88QagQ0FFDD;7GVKcql$>wll)!Mv`fI4}=u2itl5G zrgf?hdusCuTzIghx~wEcB=hrcsExXRf4D}P{v!7g#UGY`wkmzEdo}5l78sM45(GQ2 z3c$$oSzA*aOu(d`9^#qX+oxzkrvl}?W&*7aVk!Zidtc3L-s#6KSDR#JQ2Gg^r}roN1AIm&cWgA<*038f7m zdvMPt;qH7|yCdNvL?|xY*#Uemi5Vy+v}klEWq6#yUC2*NyOnB|Pio@lN{1;G+#rVz z>~jLGcl9Vb4!~#`p~3n{l{!OfOO>30*N(^_;m`DK1?b^v|IAmpPcV#s+Q%F?p$FBS z!oiw*teg{Agg=&E$bOAQlnSncuY*W_dRT;1HKm8&5jOPO z%(S0%w3Pvchc*Z3@n>u{kP2P$9ZnKiR-n|Lk7+WG9qW|-85ytcpn!W|^UI%Plt|fG zyCD9WM$@e+-=sXMXJ3f`^Gz{jGFKQv)f1oxfcu{r;!$Pii}Yc87sf_-f>2Wjv5Q*A zIMm02%N05!ITAI#PUejyX_R-TB-5_ci4RgGSZ+L%ZJ0wklvllh?Sw2V zt=x87JQ;t>|r8DVEmMXM`|1wlg=;B1U9@Ld*2%?FNrY}+_aiS7LIvRC+P=nJnr zPNFQ>f5#K)r(Q+CH4edjm|k;P@(cfM2{?y3FX7a|*KiGJmow1;=pnY{-RDLu%Inm8 zZ?7cblGiUOx0W{nait9hP!o@I1 z@7gxno!1k(tOFJUOYq4E?J(^fie!>F;I|7lg~g71JwRN|2`C5y)Z!%t>#1#H*ds?m zlG#Wlf5t3j;^InayIJb}ShX-%Rzat>R#PV!kaC+cV#s>ZVne*dV7Hc`C^bO7&&|MJ zrNYek){-%zTdxe_y(I4NY^@v+6E1_RU3m z*!dAyhjVw0X9e0@xR*cD-bk~M#~wdKIs#VaF8P46!!6_OA&RP4yLlD@v}Nr-O!Fdu z9@dYwIeZ=M@K?5hyMW2ki3#YD$_q)UIgV7z_vc)wp36=jj*b2eMWrj;d~QnZ0aF|P z5^oXtub*|Rl3fFv^Y8!;JaL-}&!7k9Io@>Spq=zmr47qmh2uL;`qAelsG`JOZz1!b zRM|%clQnrjUHB>43y(Dy%MkT($cwIe_C84zV;}8LZtab&F(sfG75szPZ4p z#p*QNe_ZNkxeAu2s-0_03c7Alm0bmQjoXX1gu;OK5Lr}*!};0{d^^Q=z2%Rlc$Nzo z>wjAtV>f?z)G;h8ih&_Agn8Et!k;xFNN@DZLzQi|h_4IpT*Vt^=Y=9n_t`??k3Id( z;JR(|Iyt}hVG;K1cDtw6fQc;4Pb$4~-5%U5$i}JOzg{+lLS*B{rTY0NRDH7=%glZ| zV*9L34zp&{=GftaE4LnF7;n)MXLAFdi%9v>9h3kuK1N%ojq*8P66DR5jmc-%PAGUu zv71ZFz<}S^y~dXmb-jYKH{`>6%az@Vow4PR>S@mjx?H%lB<%ps=4bD_BmP`%t-I4i zh3k`N-W|L4u_MMH@m+)D4TTUotB;IF=S*4*x1~#~LcHUR$~M?e%BD{huZu1be}R9f z4Z!C%n*H#!z9e(0{|=IJaI~|H9X9slx+~Za8~S4*@u=zycf2l$@!9_ikItBji7X+M z2`-^=h65C0pNao7Z8VSR`+O3|L}YLMywGJQj6GIc8yb!23SH%GQ>ZAmK^^W2RJ)vK zgQGq5zJ&q{c|6-X`7*YmPElyI@qf;yf3PHzOLLg!0eT2|_qjB=t;V_G+^=@^S#%-) z1{34ySZZn|<|s|xt0=pMBS*8n(1Ld|-NfbagO&WFNZycDS z0;R-szO38IwZ7Mxsc`Oz`Tu12e(jgkavL7(oQoqrs}F7xPmM6ns)H7K)RYSutKe3P_?XnZ5>Z* z@D_d%1Ln`Nr;$R7ovqn(*6e~pzBggE8v zvz31pV0SKRD!_Fnwkb!Tv7ad4@3{cG|7Ql5`kXTR?T4Dfz?4fLPRSR$EiTbYHEG{z86B(n=gSYUb~;& z4Dv~at_}}P(ES(Kk5>BSC&BhTpACTmC%j({C7o0dHBXc%4oSIvS7RS|KE@ub7?7SU z-VMhQ`rHwo*KGo@!%)QLVPU~lyvZ;AUG;%Np4L&tr~o*m;7S^yJEaV7Ci3?)leQWY zXvp=kM13&|n+M3Kk3Q=S%OPI^6#Hf;A!|!=NpmQ`U|gOFv%JtRgXv%EIw5o4`vt_Q zarg;xwey`i8zOQve|P=B(_&E{?=4ju_|i zK;J22l2p^Cwk+=edRE`acim~Fs{DBV%un7MErMP*PIzzzVI6l(k-2f26bQA{VEAeE zY)-Q7co~qucgYMDvjEK(#UBjulkK0+{v!i$Hrp1yijt;M7T@B26daA7MwxC@2-1bZ z0UJ@lOE6+$!+4DK1QJ+|^6Gh&H{(>kT1usuNaz_KpQ*7ZzJM|9eAinV7Vg?;r#|c7 zMCv6gEsdfzqs)L2v;ByiIe<{{Q0%OC z&NgTf0p`!?AL11&*lDD&lh9$2UuCRfC%y zpi|=tnhrksYn6qw#-D#)9Jl03@vy!)(neF#i236G3i|$E zh7{Vh&pp6(3IFhK9}KlT$yjok?xB_W6-so&uv|#wsZL}yBz4_M@B-A-zn0?U$hooD z_qk_q&xZ%G^_V?Q1V|T}F0bqx4hOZIL>uh)vqrK53 z`pK7LWAt-aqTyZ;<+rG24lS2u0)5b=UxjUC+6)F`Xf_Sn@FrBHN^WJ}^>E)gEZ3_) zvOeqHt)#77x?7{DoTqz6u^7)u-8j;z4uRY(XBj?K5Xa*|+1E6 zlbMZ1S$( zYPKn^H*Rf^sUN1Q+ltL!Y@3m6j+1#e+@<>QBUykKoYICNx#^aU;M_KM) zy?7fMrtlAA9JcFM^P#ryzDoOdSIwAZvKpwNiCCY5BN*;D#3Q-lg`~$_zmT(0bZ&33 zO|?kNk0zJ=TBZW?qi~**aOkM~IM46jRogTNCs0@9ZP4}cMCin?Tc>c2 zM?M;uCI{G;8uRo8>Ji~(E>Z#3qjYI4|C1k?$d(eLo%59vh{5EUKLvl(WTXRp&d2^D z>6Zx-iPP|H9L?7F*MPrfO;P}bKg6$W&+t!>N5>(IkTLJD{YT}}m&>7$lj|NIpA)IxfBP>^*H%9iHyeSZ(So>`s>JB+KTOm0Wo%O=OVb zDpzDrj9(NiQ-2rHVivTO7{VX!dfp`uOLk1OyPl@S0WKDZiccv?0H>ojKJKxc#^b9* zTDtH}idgh%KCJiicJ{@zPas$SJFPWQh#LDE zwH)pHoGwgiq|8!@%nz`37d}qwEn`LVL25;aj7-F{s3IpnTVvFN^<4Un<}+PqXM844 zpEu{^{{0{^=6*j&=nM1np@i?}f?%vMiE*+E+!uX8U67#dYxn-5g@G?|zd_QtLcLud ztBK4@-slB!`C8oe0(I)_a30ClQx&dhd`Ha%^|r8;^ZPwl)rlibJ26<^DDW6bn?}PB zWq~#YR*H_c^{Dd-kqo`B=q>abf1RJ>^krc7E7yfst$OFmfj+(0`KjjuR_R*>U7_LD{iCDk}ZGzY@X*UGbl1h{>+JmP%fi#iIhMDe3CQw$mpxSh=sI zLSK;?c?h9-eVu1A06zb(4c&~NLE3#jv-nQ$lOy?(!I!R6CX7`A={GHj!W%X)l$hE9 zCdzL(yFRuoN<&Tx<22 z8MNRUlsX+XKhR*MJa8#7%!XT8`k~_+T-^a}D+JKESXrFDe&tLOP)|tlP=L?az(dIK z;p*0%w233)ol$Lu;o7$w_UAyG@T|POX-(p|60o9r4LdhA&O^ueHOtE=p??Sr8#xA^ zaw0Q1iY!~90UX||M_Ad`tUWMBpWn4M(jhJPV4Jx>6@Rx@dg4I@f+jyjG8X)W%p=Ib zX_on!pe`69{hOElkT1v?ro^5!&y6bpoP{Gp+20OFar>ED=E+pklTgv7fnHGM*LMGS zYQdAF`D8K{K-p(jT=+!p@PQkzQh_-^x+aB{f7$R($MFc!Q(u{RcBLDhLXcMbr6_#*;W|b+eO-D>eR& zc^sfes#s4-AD;2@$lKuyJbi{>GYE(ZbK(*W4};0s>yoGdCX3=LB;0+XjxmDfT~FY# z5w5@b5J#tz1g{Jd(hO<#yRTBH$@9lg4d(({oxoTc`oK|!YQ{tC8C*qU3!6af(9Hr7 zZjuVA_`t)U-GWl%+zO??)o&_NY)-GyG%GD{B}dSsfQigu9yVGoyO)$y_U4R#A?V8| zYuz(sW`qlyL}k~11w_k9?`V*iJbr3wClsrP`g;y$!WeW84;*d#tonYDib zBAYbgpqyBv4#jf-&Tx>asdYD3g-?{l4yPx9tDo)7cac<|~22nR-yIat+y z!dvOf+PTblxg35Ov)%*vVdh7M!IMnslAq(9%AeDgxf0e?>{pBwq zFVVs6y~P#h{5plLd$cNas(2q7ApBIS*Cg7B7ZdrzeKEZ5+09nZXb9j0@A6q5+izNl zw}~zB*@+#0wo50JO>=p4Wr@XAt}Bej#&!^8=V?t%drhEDt3hX`{vl-Je^Q z5L;~uZ#}NQ48h^4X)}vRBuJMOpeJW)q#s73K_h19!r*>0&q($)t7SxQ{kE;K|1wEPsXD~r1{8jg<@CrXg<=#Lp^vA49=B3!}ySvpjrD(Ka;^E5UDl z*#fnHwU}dKBr_BImzznTFXZZh^tS+x_BO_z*h5(`W+HIh7C+_iE0JaZX^M*Ih*AM4 zH(dHQIvWUae!1Djxbu}!aXS05<^ESI`CcXa?Xin>=$ee=cU>LxT2M3%#{>S4tl5rL z&X-p@&*kVzv4(OUL7BcXqgoTD(27%4y|$xtSYKd;%^w|olFA@@vFnHuz>$#?j64?f1YUJ*tTuk$s`kN!ilYk zF|lpiwr$(CF|lpj@AKS~b^nFmdDHLq+O@0ttWQ^0Rfm&?{XQbL)y%kbP<0J}YRf-= z3)}X;_Z-!%fmJ(90_X$v)Tx=Ezz&44)FLe=rFivd)3VHZ?v={)d!CGicv2C6zl*hT zf92O1yCA=eTHG3`a=*w*r!-ey8*?!+R`K)|fY065xKYTXz(nMT`Tm*s^U^AE5aJ7GbN0A~Ap}}B# zoHhA*EGhIV8io!|U*wHQootoQQl3Co-dA0TNy<}fsF z9Y}{-YJ_H#c3~Ijp9*rDwZrr7#1IV1<^BMF&8<`G`iJiRLA6_Q$fkq7W*_wo@Ng*m zQM;?@eG3-kYToA*k+yE~l&?jkbsZF$UukR|;hfy_8OLjZZpluIHw#dtTQHat?RZbn zy>o6nAhg&pxFRxa9hHHzF97-x=momEFLcv2%57Y?*2Cm0uDFHz$*8Q-z9ll7#s6-$0`F78podqSY=J|X!hOry1{T! zO`kG^XoEY;8%%Jw>T6D*2GjhE034Te*<=v8rtSIT2_f9l7iQtzRLOr$8*G=!vG(~M zi@-3~tu7Hrw>@CW_EL0aLzRDdCwK||sQC}xLECtq_WtICWO&tyRDqQ# z$XcRiEHu$$fAJ;V?l86d3NM5b3!NZM_CH5A{Kyd5=3YmNdAt(WEviU=OyD50-SzR=hg-$fl^A+G#-)8zOGbB!Amv07Ixk*S!vgmyZO~+8-J!T3*IU@iV+lQ> zcCk(^nH7AS43GYQ?;P>?M73Vb)VAbL5s6VSe^6)apJg>{Ae|zsFQ%n7HB%o1k6o{` zdJgNW#D1QeWh>u+%!vCWPHi;4J4DGwmu0g>eaKJt- zf+2O68ve0RpWf^X$9>7tqQ9EG1)0&7w`YE|yRFprXO9Qm7|V|#)5NHJYrk8(fUr3| z`0Dhbz)OHWI1`jBd_3AvLiKqyHOG}EA@7k*GH|}qPB5eg2tuFE_&jM`QJWM2cO40y zo`P%ij!rs4#%Ly5AQV3+?bjJZzT@Y$!l9;P(RF9c%y-5hgT5Ft$nP0DW?^@3EtSF0 zzTaBL-FC#C;09BD+x+pwBt$YQBU90teA)`%@=T zEzAa-%X(ULBa@OERbrx*J<}F97w%gk+!I4nxxUu{JKtplXHvOQd=6OTT-I5c@dl6 ztTh=_=W@mVNA2%^AA^Z_u1S(ci7!IO6oU#hhX>%}t#eA`aqwt|HE}+YZ{dQ%vCgA% zblJ^ZE1NY6+!bybYDJLJw=>eLO57@jN`l5~>hG$!m{66OhNg<5PtWW9%@d*eX-m=5{;;`Nprg#97*LI} z^r%Z*$%pC{0B40Ty?j9_W5j!&8@mSu48_wp4AY_fx#DHd*?{4Z@jBY*c55w+%WBjs zayDU>7$)!r#-Nq3luEkWYkc5DI_*1O7tJQMc7`?46cX;2yA^xxco>uE030*JnztmP zY~qa$?Lh@JDzKNzf1gHf{ZQgp{&VO;cQ+q*jN>Bqr@(W3MGGsAjqbU7-2|(g@)I?bMn^-T!=a5lFPY}It*j$0TWXSxN+Kyv?KpD& z%vMSy1CE=oSfeYcbqeP9eeR-2#9FGLjnU~Tfv8mqS2KAKubV~PI&>#H_qwU~Pt5}y z)MEHPu~m32hp^Z&u&Sc}dPI1xYI1jDhL?apgWbQ+?+^78=-@d;E)LH~%VTb~&P}gQ z+gL!4rGbuDH3X>OZVT>gJ)lOA_Ecs!wyiJDDz+Z$74JAZb_2K+xA;oHRP_LTvVzNl zdjg;syszU_s{9|PqLCWU^CCD;>Iv1v0uI#c_6*Bc9Xe6FpY}q)OiF6krS$P!GjJlA zrp~LSn+J?~zWcj7bV0#D{+N<-e`M#(xJyfl<&~R%9KZFKq>1r8nT`_+^7Hi;rzh}qApmyi#0nIrh z*IMzH@r@bCk~lZ|2Jgit#H|OrJ~t(BLd{1i#KQbg)0qi* z7HUOLfe2I)Ha0XIgP=HZLFej3C3@6!#Ba6izaPV`Cj|idtTN}xy_~^sK_QX1%U{NF zJ>Bx*(}pm}>6{7`G=x?Co>{(2PFi8a5ivz1YdPDf%ss~OF#m7LX=>kai(#@Ga616! ze{H#eCuZlkr6({ss7O=Ma_1sN6}%i(^xbpm44`c(0ID&6h%z6hZLBQ*7tP%wsw zF-->UtvEgO_Trr#voQF+&mAKjcau`B-|frg;K=e=tMo|V&wh?beaBVE!M&Hg6tFbgQNgB;)xY_ zk4Qcja&5zL_W!-L>5_mCeo-Y+?HW*QhU5nEdhbPT|G!a}SHo!K15lun{wX*x~312=A14$KC^>CG+85p`{{A8f}3LHSx; zqv2@oo0zjmM%)C3@|u#Vq4R~jPQ*~L0pNt`5Z~o}IOkZ1z{@jP-Ixnb$PL@X{5Mox zhDPz{6r0f3Dtf603xCHSjngT!9?p8ZKOk6`wVoj%6=&h`hBsQ@{pKuJc9FfUQW}oa z_xJw|bA$RFAyu!fV)WUakcuI`Lpy6g@FFbt+&>@G1WaMiP=&wCbV=X!4oO2H>6j*J za6s+=`2P`{->19#9Y6(-N&K(o@IDtJ7Br^6jUx^CVmL*%#t%d1>g>eS5`&W8XAJmy z|4K+l21cwR#q_CQavZ1M+r=CdfU|z>VpfrBs(Tcu22_~NQXe>Wp^d*}GB-gr%Kw;Ir1HW%y{^M+oY3GPCmyyawX~P_V6YwM{`tHZ(V=#m3KsjU$ z&o1V#@r+@U>xhy*N<*-({y@b@4&~#RE~42ouO{h2fyj#BaOT!M+ruzrKs*yB+yKBS z?x7c8VOR2&9;wZ0{^ShU-sW4%V*E%GY)@&_Zg;*t^8fXz-)A!52!Rtw5D1g9TZ?1A z7)d|v0ZNVyhE+LF0>IJoFq9w5P*XdAe}?`#w*JYDiLA>RZ3>d0%0>5N-rmO-}kx0@>)tsoW}G6H!f_B=HcTKD=^bEZ{>MbQ9a(> zh)Ws5!S|BE{Sci0d?5w!aY_Y@?jUD)ld&UUZVpHqJ*-r|h-pqWbQ+eU$v9r`|R)J0^;xr3d`^E45 zPRbtyCl`ZPVl|%Jb)al+^HTE6PBL59yHNHOc$Y=a4MgY#Lo&2r(s9@q&Llq2-D8%%30o%>t zyNe?^BkR-&@Z5Qw%Vbkbaoa&>sdWgBGf0Ea9C4A+ zmx=~sXV4SR;#VT`W|4 ze?1@{i02y^05}ITuk3gwJD(3gYjn<8KB`F-Cfc7ahQY?Af1ij3%~fH}=#RHQXw-az zF(ZoJ-`5`t=KH7ff2Jb1n{k1oXV!ejp+e3pl_4nD*N!C}rjXL;68lsh^hQ}Y^LN&knEq9 z_XxoGA#5Q)Hbr@tGv@e0tvETo5IbOz-vCmt=v-7b}VfS3_Cf9-z;SerU3#c=g3YFgH`sG+6$UR6@vf%8pRC!{W#rZgVG2 zYPY@^aCz|}lO!2GSr2+Kb%3Zikkg{$nNSX-2ry0Ux6XsqMlLd;yByBVU0tcR4o}pC zau?~ty#&8k-Z^)4q8To+(TgV?*H~nkWk!j>hH(OoD3!S>1k_FyHUM9$mwW7q<# ztrjoYM#62`{k})@)Ae8{)MvQ(iA!`2nY<&D_yap<{Ra8!jJ7)3`Tb}IQFfQ36G`>g z$repMf^pwupmP^rsfDFRME6!H3i@eY5rB_+K?Jp8(;qB!Wq($+H|Ns2ZZKI3t-n>P zzsJ~q>TO0N9#U1H#Ub+XNIarpk-6ECP&8Uq;Gf1jm9VkP5Ad{#0B`^@{m(1~mHHrz z+`s;mfYxe07(8j1bsvuwF5yraR;z05&xMs7WW?~^?sj;u1w}}e@P}}^v5!RC4n8H6 z5cCbJzuo43&yyKcZNqcV8fM~tHPGt{z<*-yZQTQfZH`8v4VpZnL~MsK|BwqtI%R}% zccW3WbHS9HQVN=YNP2qFO*7CNMN7;9=tF!Yt3&}?PuMX-Dr7=sFBbI!jR&6Zcz@3a z*^K7pVVCJIA(RtBjjj_X9~8gBF9dozu(Q}xU(H|M)j)N4ByQjN`An@N=Kp3nx}!u6 zH|0)ul% z8UEolOE#0+HP7<%u&Cs&CWeTlV-b+GRf3Z#QU7{*X@EYU`o7O3K3Kpjzq!`YhIWnS zdWxz07NjQwrgUUOf1WRc4m1AU2zN{PsyOQoitQ;7YrfkcDEWALXOkDk-FW2vzPG{> z2@f_I%4Rl|%ANLL16rcxqm6xk?$E$jn;b;Zp&K!F$&>eRVgz}5m_0Qx)zflVQYXPa z2B9nmBiWI_;jF?1*cD=JgHP-JNMnHhTWubt1|&@Tb?md&&m#Tv^Kh@h&wHc2y7(YF;C!cnWf6#0&K07TT_ZLvZNN? z?Un8VjG!@z{c4b@m97ScF0ic~Y*~Mt255eKzHGGBr5RtD;r6gd3H1W_cs<@y9w~_+ zYcyYofh48co!_EC8o%R75cf%RB4@F?ZM^w(H(o=UeiYR!eEnCaP5slgNt`eGpw0hr z%Y2%|`+L7vzi>zFwc-9=cigEh?pKvJ=Yr1yTDrJ|B-gZKYSxQnID`#=P1Rk}@Asgp z<1{j{2>Hy&0PaK9pSnM5h`Q>60`SQ?oY-4rrTWPk{$;ivj>qTy^KJAEe?f!{OKNa) zuM7QBYU-F=&?lqrzoA#!UK5(|lud#;96@(c&P{qaYhwZ5es7-;PsbH%pi1`Tu#6?> z*asRlAh)o->tRwJ453niA42T&ecMJ&V$h-uR=HqX{n+eu|L<$A!Xll1e|DMp`H=w- zKOgGwTjbbk=)s{T1h-xILB&-cz8pDlKrik z;M>AWIeDJbo!xYe{CEHyXuIlQ;Z}T@2vqhDJsC$Np0S>a6_3|OOB~Wrnl4cDoETi4Z@4UJb9*y{i*UyY5wtCvzZk+bGL3tlB zWRLSN&tV0%@nZ@^ep!90Dh=sT6RB)M?G*y`Okn`;gqwaM^#b!#GySlLQ3a8;0j zwNi7hcger&!=K1F|G#JN6BdRibqQSPp{WJ@Hi|Yr`$wg^tC+KT`=CrNPt%7^X)+1} zEjbk#=$<0*OG)DrpwO^=uaWCtvUmZ0mxr-(soaO|sBrtq`!Px@(fY!Wsj?-l-?VH9 z=_9tXY8e>llo2qxiyBU&#IdEXDy`HK23&Pg6-D|vJ^7SuAPT^la%KPG_LODcu>Why zYKgN{Z2bt^_Usyk=*uz~(S!y)E3=HwRFoUTych%OY0nv00a~`V4ttpPMvrro^uUOILzhR*Sq!yxuYLg8*Ksb)bCJ$?4gblPIOXm*Xp%`wDl`7{8Jl6tEIB4 z75R=sdiLBuPb+3e4rYNT{d2XFpHc@w@44t1*tG{Wre1XPdG~a}=oKpLPV|{~L|r<2 zStG{^Rl*@)|8SK+j!7h@WC8jtH;Ipil*UN{dDxx(`E2we^ydAiNqoBigRfA~_uk6V z6N)68DHI@ayL8Gg;3c~JOsYSGkxh3)iRqs=+402qonHmO{m(3pf()m0W5;Tui*~o1 zZIDWkl}B#Yyjy7XpII0~Yj+j{=Bd|B#)s~O1SNVVkez%YdcidTnH=#b2@+u&rjxAS z^@kuWg&P?3u_C=Kkv^jLOnh3Nw6ZaAS&KkL$rKhad%b^yL3sE;y59bKRePv?#y+Hy z3JQGG{7AP3ro1Ppk7oKi&*s|i1cVwmqC0CyaHsWn7|7;6az-V0rG0e>d!Z0L=OBpX z%tdeJkK5I(Uc&VhyBq~hpYz!ZccIKM`8jxnOIrwtpV-0)iBRbvLsWHEYJF{NuKlvq z;f|1@$OdM6A$9J#CgZ3d4z~?Z{LobAa&_ni3lBTVU_h9j^+ghw6g?ib7Ti|FUhAKGgnuA2qB@61GSp5G@8qUBFzrzhql!&2BQDbn-_@oD}y0@u&xl z4=Mp{4rRUZAP2zHpXb89e$CqGN|Sq@XMy9Gr*%@*CC0T5*iWl<;klIQdAFk8yQceV zwzWhy7tpYB3Zw+(%q8d=Dns79rk_Thk^s9*bq9R{<@D9vSElsnOeQm{9u%Kx<2Mco z@J0QC)SIaALkg9<;h$^tU0{H0qv|AvlsKmhS znwk_CNB@gfy;U_K$3862{Yw+)W7WsKimjlSCKZzyXS-yk#%O@Sq3l4tU=8CK#f8Vg zR~g9;;N#%9w*S%UDXMc2sFq9x<7SM}SgJEwpBCjIz6!-iz@p}T_x#>>B&40}Swi7J ze@B>>IHlUybwA+#UtN$@Sm5_LSK+Sjm`?>sckp$O!_bo!Iimrp%m_1)g<#pz8l#L` z0J%@x{ZZ<`aQ66ol=K2Tf-CU?Wxfu*-g6B0o%QVt?>i3RL#wR3K5oOkqCrLpwf-(i*KZqp^b?p|l>tc$8Y5X7Qcl`A^cM=#iH0qi!Fs-kO|Bug00%+7EGHABKxTlBM;`p#A(>vtFh`}h*f%PZP8YD7XvYW@U-Bn} zO}>QOpk z<0;Gbm!KJ;QN+ej#j&2NPap0)$yJf|HVg;g@ZwjC7PYC^-IP5yqjP=z6_v}#Ek8Ia z<2JL;m@5?DfN#ii{<+YPvun!3S;2SqZ(ybB(H|jfj(svup@wr&wgTXHF)bn_JNWx_ z?FW2}AeURD+F?U+gXY3b%eZj8 z9b%GRti49m&dqmsnowtL7lsGF`)UjJmD_JK37W^VXw1FBv+Ulf?HUIiQWN+aG*9aB zT?NtX{3Cj4FrOS1f@_NE*fWZ1Eh}W-VuX@J-1cZ9G+H15d@P^-3DD3yz{0uXj=m|K zHc)Q*akIR&Zi4yBs@w{bv!Tx)NZ?&2i#?V#zNW7C5Yy%PaFT+}%vXHUtL%eyn|f)rD7Fv_Jl!ELWWpJ zL`BgH19WG@2W>4X*&*1#BZe@ere!8H+`hT*Jd!H~M!NJtiPK#ze46qvn0n`#=7YxM zN*k|Kac8co8qL~eIytKBRJ2FF&})m`$H?|#g6Fw>vL6SAD1`$IlwRNU#*c#rp1j%Q zKGn2gSVKY9tv3uk+iWX}VK2hO-jJ8!x>tb%sXv_1^&3IkNj&HojOor15`1-ucU!DG zGZ*A%9G?ODlzvogh0SUO5F<32<;j>KNV6U&a?ij*P@lAs7aF!o)l5axe7u0Ub~T3{ zQ#@%a-6lkq!}sqH7VgFUU^(j9{I1*i5|I!*ppcocD#T%u#pzK}OXSHdc)WT_dADos zQ2Lq5XTMr|Od79QG1QDlt9U_OOd%q`1H^gI%zMjW`RLzS4cHe&dfpd8+aj(XfpH}#3ssS(=NQk2?9W*?%6DTXo|UoO@-i^# zk*))iY2S~>CT9CwfwMfkDf}=KT?Cp1&<7y*|ICtbaQ^IORh21Jvoj&VIO}FB$$2z< zm7K|-L&$835$-OC;n3bxYzkkXahQI}fBI7Bf|rYIDp00#I0SRBpn+}xuw?nQ{qu8y zLw#roA>RN+ZqyS8s_y<3RnPypAh7#G&UAKd9h-~ns|;~JmQW;sEgG_Tir^7K8_jR4 z?c7Jw?dHc@2&n{y1k}!mSMSwgvD~;!f)P5B378ZFWwP^edJ0|y*{`3 z%LY>AGZ93o%${JazJ;S6(4+2s`b&V$!blxxvin(OzpO^cG=lrH_jClCn?};3ZCK44 z4p6tMo_iHcvJbVpnFuy)1@n(uew!@c$FX|~H~hKm+Kr*39|GVk82JL_q?Dds%l#0V zq+diYb^6-JhQs2$A4lkC+sF=PS%_AcbnW*N!oWCWX;%n4-tk)uR6CB#5r^7=`3?Fz z8lVp!P&(+u;QeN@tnWblk4QKWT}Y6|Qa<7+#JG^wU{AK@oAsm-##7Q4$HQd9t%T$#YoRKroF!fm>LbQT=pZ)%ASu<3dxBRv{#3G@;zeA=NN6FI#bbazHm)vb zCAzHPpTwgn>;yE1_AO_q8~=R6@DFs3-*N@M{aqs-89mJ329 zw>2^OIEzJJf6tSFEw=m${93z_2p=6_hRXMkxfYRT6r8OWejof&EctoLNwThipz&se zv>1D(<@u5izOZyn0TfmyHM8;ACA40u573A4JiGYP{9c6tvSjR#$e~IhEoKLcYNaGH zhIcV<@1rSy`D;A1nW9Cwk18?R z!rqPP1fhIp1k(_t#^ML!VA8lD`J1CD&{vwau`-0z>!N{N*I8AZTt*<#cKy3H9s{yK7s?jKM_vf`Tl; zt~>;oBuwev+-6F9rkoL0DG-3pLq*E89-lD>a2HP_n&r6-l%onj)M+=dKx7qBkV_A% z7Qd&jA5Htxb&FAi1fBW(2E;i$B~5*CptT$7B@v#)W?*=!JpLK& zRZl!1ksW@V)HnMr8Y2WF_%Nh@D{!2g-az`WOeHIP3SEI|x9gAii;zl+mL23x55T8} zS*xkQ4n+)|= ze;%{CZkKWt8xey904L}3L0t02bh6I1%EWEW7nC?_1dAMXL>KCa81yf@8_{a3lP;yB?T%4Mw{(Zv=l#bG|UeZ=w;85es_grh6qFl<==KJuOM5}1cD5dqV7R}K1WLa4zmj8Yns6E68PYU z;Orq~)&_7F)Nol5?penHMd%MJL|<$QcOm}}QOEw!?J}U8>nZmull|e*r$e)lK_dp( z^%SBHo#VKTT0w}(VLH|pB^U>+XI;lc1T@i@)J%1XlfMt|`glNHbhes_uFmIyDE41v z<&`oneH%~Q{qo1B(3tf3K#+Kk#>e5*4yx^>Js+$OJ^RmVc5>sl)Ownmt&4J zlo$XuHs)vha<@_rpj5a(s!EsJ$2vEO(b055faZ%eyLqg!kOJVWS}9ry)6ir8GuCVd zVpo+2YtcVe8xyI#Uhz|Jl$T_a$1$n!!0o-Rl{U)F?fD5a7$giCqGf5jbGlOvkL|QK z4Di?6FS#j@tG=RyeBQ9I+Q~FMlT&^>aamkTI&6!{)nqZRV^%nSU}#anME~Clg(vU3f#b?KBmBEeVi5v={MAw7UkaIOT!X@b>LqbPR33c;lDgb=S4 zU1Lj0KeIrOyj=EQL6~^iOigB5&2<+KBhWz>)uyo>Gu|2{DHh>*a{XGo@A-8RXl>&} zeQ@vxz6KEJJiEk_H5OHeV3JI0Mqp|;GV@tglSKhyRztN#KM5RB^ybaG*0V-77A`+M zS#k1}TtZht0CugIpLp^)gk}@<{HuX@Ft3jqD9PkVZ;LD5{D(GL1i=el$Ud{WzAR;Y zK&}?n7yMyq$EsrCg-qz3wz{!&@bF*l!T$w1r@Qb@EK2vEP^LrW>?dzS=kf>?U}w1& zq;9A`xW$frejfkqnIoQ-duwZKO{+11{1W4~)!12gDM_b)w_|U?`i@|Pt>k=Ay2ng? zWQt?_$a~un+`pLlv*@=LGrxS{hs>AO;X-%lI$EL|?!~D5kX(|T{(G0q{|ao4E2Y4Q zAc^Js+>MR@dnHL{G8z1%w$6bS-1N5OW#XL=_+M`kVM_Mg$JE;oQ;-5_o-34ri{EOx z2@E%^3*F>Vg$4HxKNKXd2@Hq;b~!NkVZ2omW~Co}>|uf6=$X7BPaxvGycXb~aZRq) za~H?zxRdTrRY`qe6|`|g9FfaHdH)#tN(L#{6H;`~j{L4;bfGvbua?Fb zV-X3RPI0hNy&>ppws)^K@TB_6{WGa<%+?YL3@T~G!EYrAvP3U8^&#gz=kE`LMh@V^ zFd`1cMp1%+lA5Ng{1d48OBOcLAS>E+mTL&qn1;>x$APj|*CmnM#Gc^uXt#8435i1Y zKvidR=w*{QnDY~d3;?IVg-VoL6C4M^mc77`A@=+NZ|G|T%l}nhMeAA!4W8DvJo%=% zh#6MZuOn5RaMet8htR^$dCDMLsyq5u6X(SDcgwnXH%r9WsJ-hpC0EcQTB@NW1n18kd{QgnZfmT6= z7M!WWo~$KXR|_E?Z^4}uF#i}gQiA6PbK;Y$fj03d8zMRGDT83&3m+)t@aHObWi5GZ zv66RJR^Sq8t7&l|9BZHg3Y{ zzq_P>=cJhx8L>#wClk8j{>k%V!7Kouv*th7HNsxNpGZMf>g}Nj#(z5MZaK-|E*QEc zMM`?ql=OIdse18kuK1C*J*LtRjK_mCx<{kU^&bCu1+L*66c?YzcmX1VZ#` zM>piZ=L)f7Fj3TzFN-=T0Cw?LIk$!3tMtE?o&-oJnsLmXg)-UxM4KnvQFE?J3ycS?8?%P1&=o0wCP~#vruV+ncb>Q zH0r8@;=hYoAQ$lbgOm2Fy}WyjUHR2yCUYmcr528?GRb>YSV+GY+!u##F5uhmTPm=s z552?k%s&rB5$Mkc=zK@AhlQ7!I8Du2WAascAVe@vpftxDHaYJ zTzw#N9Y7NDm;ro93wPkPArMY>P0Eg_iR{f+Zm-x;_g0iK9?2uzR4kb}>sJUP z8+(B*#7cFbb=$~VQ)cb#p*CE81t(3Z6OI{lli7V$R!3;VIOKyjAQCFb6wU$K*4nC( zzpB6Mn$-1_&05m?z2!ieHWVC6F!SR*Hr5_5s=Hq(P1Qn_4w_x%CP`=41#Wqllb!@OvQI^`pgABAA?-d4bxm62mT;p>9v#~MSVp$&yJiGj)N|hylUCPBA;!|0qh5a35efg< zWry{|wKk?(R<3tk9fJtv6_pZxashvhNveWFi~O?qWTrunveXcY64F5@#?0n7bKXF& zwZEC{N*6qJKw$&;%SW31?sHH{-5hPLBQ{TuVCmq@Zg7x@K zzu;DwGsG{(*OTPjm;Hi5|7!ZtJEpFzfEIuw@FVzYfHXpzwaR_UXNj67>K&*fWd~Y8 zfAy8`o#7>Y+5*&o^_8A7grm04QlVX=9BO=XLZ-4C8j?aTUNZq)7~pB|=Rzo6=+8g3 zyEc|#WKp_#!7cf)c5)AYPo83en|4u#mNI_Yc7oW3HzplR!2=_rs(Rx{gTINO1UvJh6izHq<)qvB~{9$~86GNYq?OAPVLx4aU^CZR)~{QF)e zJ0QdX)VG(;G-R{4K6}&U7RHtHqJ_2L4cj1eAe7^4epmsRfTaw^Z|>iuegik*P}aUC zk4rrg_=th>Ja9`355Qm5xk}^#8s}o2;MwUChIk8b^6<6Tfw$4+v7=|8k#b)htI4a% zrA?}lvmt>{n-$qh7C1}c0uA1SDVr7gg(C*y0Da1kM-_K(9z7toV8aREC||q;vQr(P ztS&3nt_y)7lmdgJ!l}mQa&P>^ju|v3KTy94tM%0#2tWy1$vx@A=Pwbz|2KwK6q*q% zlDW?`3+eiyl+v--TldWGg`r@7VqK=j_1TJUB-~rFeP)dgDG7KH4GH?$YWFb^2}nff zX%BbB41f38RkT2^i@Y$|^ua<~qLRqDk7={eC+qRd411OF(}TIrj9;aJ3O}bOW-L_( zz!h30TFpqdHLr;?0#*2U%ZYdwE&%Le>DEwu^%na5?g`<7fpAY~ews+R3xgw^Jg9ty zXSUVfKKGJ{JD8(~!|3c+3Kzg60(zDNCRV-azlJnECE*x@$WNaep8^e5n%)*Coq_hlN0;Nw{s6XH$X*Q0z-a%hk|hH(MsOg)h5MuvS7>-JpQII>-K9d6=$v3og%`H#PSk}epV z#y{CCL49oY9V@q0C(D?|P2IvIGJaom@V(*V zx7@K1os=}2ARuZ>J?u>d?qbE%&GG@AHmglhJ0H0Halp8lSz$i_-ABUN^FJf{^7_6$jn{ zB2^DrnGyKsk5!uAgJp6QhP}~94<2Xx%jP)rUnn^K{uxU906w3eyhV(f`5?fUZQMOG z2DY+>Jw@@xV1!DJAlO&WugU7&o$(ZkCL07gI3(I_K8_ z@l!snq~O97s#wz2!y(}rM$M;FP;8|TeDrngdvhq}4rEm0T(2|TsTD}C54K}kU>xz6 z({g{X32L-UoxuoX1tm(LZO zgM}d~>sASC0(&x|Gs0% zY2pDi2&rf-E1^xa+YU5=*5RaLslz2P^X4pr0GT4j)YH@P&68s(O2S8tUKw#vM6^)2 zQSmLfM;muF0Ox;g`7_tylo>u$VnoN0R1)!&2>nA3^bW+oy_JN5QW;+JhJwm0dG`HG z=3Q+VyLOIR_HzJdSEU_Pe4V$EW3gP(P7h#^;Z_$VTS|p#daBd`_`uL&O%wqsi+~e< z^Tn|ESDxiR3_aRN@W7i@aD_#^8f(>Gu@C>8ByUgt(V3t614VZ)e(%%dct{W)Sq$Om z1b9i;`5!bIa5{!)!IiFo(~Wj1RHd2=rxuC1gXOsRwpL%i^P!agA*!0?FO#iS|Hkwe z!&%US1MumiU_39_o%TrYI}7Y$NmF(Yw_afX^56ZXN4#M29*JA4sMa6yGG3~o94l@?4I3V4@A?a0zaywZ6 zviWMQ*n6{J_gglkXgP_;p0xbtY`H*)0~>SKI%UqjLr(hp4uukXHN z_R?5Fl-lR&ee0`#egly{(`95}!LCGORNTTfmsm(IZ{YjmC1M z?69-z&%T@m!mMP4EA~5~BJl2JvpNcGqVwe)!2WWie3&E)-A1wL;k7oAOkRnfHWipc zV)fqhMo#vo1BPi?H_z3KrZQHx?TBl!*sC3$H4Gc&@h?)jInvfh;MH{k;F#pk!>6i+ zp4}CNC9*eBUSXe?q52AjiuW_6)9ye(8>lr{bR2SK_oT52-{wRy%^ zdk4|dYhnVxf#2bOzdQlTQ=rV7pCzN&xlO^UWEhPf)`>-I=5;6+>&GZw<-3rh%)^A& zQWB=2HX3qpiO<$QGV|pVXl%p3{r=yW-FZaX8pO7OEB@O2GMpQ^aw`@*405R;#4cTU z%;+SUXAw03X5|?a$aQ!H%H58d#kW;umAytUkmYy1n44bwJ`bJ~T_}7QdmH@XIk(DR zV8)}XcRKB1Ac-z?(&+3*M+p5xFW_e8pie3qjk#i#IoI_>k1z`9Ot{4D+!ebS)W`gN ze_Hsk`p=`P)N7D*EmHuEVnXPN;DZ5#xo;zvaKn)B#N-52mV>~B`55#m&4AXO?3UM= zw#x79391GvFGqn%*<&7{k76Pz6O44xZ>WY>X(($C5iL+dTT0<^;mg+eIpW=FQ@jHC z3wK+`Qp=YRk*<;=^!=14+?D*M^--Y1M$nr*`0H;RrFPMVHQOGU;kD|-m6*ko%5F~l zx$Hken^ieS!5hbaJMrsnpCz&yTuz5qDyoS|GN<1t2nWN~foZp@<4GEy0XRwMtj-mY zKfSXPe)s8mCwK3JuMR9a=kiN?D3&zWH+Wc#vG~OJLx9=!g0+(w)V_`*sbsie>zTaw z4aYK7?nOrfa3Fg|aZ{AHN6s5B%`NhPQ)y>f0;6Xw;qts_lK=88be> z5-WodEsT%J>?8bn81@t@PNw%9&+Y{5_i#!=EF=7WAns>ooe~97wesy>#SlK4em&Ss zQNrWt{KO)ue$N9vwoB1?W$>FV}>=xRm}iCv8XxeC-lLV(^`Rc z;Kkt7vq(Sbu?^-jl&Xu|(w~nqulu>X>TYGiHHl?^CB7RDl2Ob2>&Wp=)K7nua?8?x zeJ@Zb|{v?BQ`{1 zI*KFCV`~>uzV?9DCusTf>ea7g*mG;4K_!=Vo%R<<DA>5< zrFq0q7{xM{L;FRnW91~HnyL*_9*o4crYyN3Tg6K%unu2A$Nz^`c2e*Qi}8IULY1Mr z)4k~@K<5)&Y^Vwv_WaYJX*tm!iezWaP1bJtu4CqpM|6qnEo1`J(ZAIGw6&CpY=yg} zgx_h8kE>Il{XqQawb1w@=wO5cuuEJoQhfk)Zj~8_9ZcVXeV1{!R&AhEHR!HvF^cn% zVnQCwl6R->?_bw48e&_gUYgE|LsDfNLBMA>bB>%UY&l~X;ec%#d~N|RM_F%r_RUJtTW`o8n( z09yYu3u4hno@j|H5KmL&8pzFum3pBBu%0@lCug#>saSvL4&FMfU!CzYwATSn=wlT; z5W3uB!KIEISH@8>f6Tt==J&aKeNallQ?!}rgIGNAjfyD;bKF@AYmvxCL(-u#nc{_N z#FNEB1sK7wl+NFH>5sf+==$UQ8+d%i_LpwVD=L!d_ngw?KwFw(IQ7gp?E%a@qe30# znP=#N2ZRKsB)0%7WDjq`uK?3ImX}2-qh) zEJrsfJ>53TKi+zz`+XHpI8ZN&s8;VF9(&jsbASSP`e`9&fys z4rH;KF_lP&M-VA`MV;DY0#s*i+2|Cnw~GeQh^dr1JFRqq0{E@mT;A>ygmf^@90y1K z4|CV_pV#+vV;gO3tFfIlwr$&1W81cE+qT&xjT<*k-tWcpynn;*YTvET%$hmp>@$1K zY%c>iV&L=sH-;`~kagUG`33Bo;j{d7*RtJDU**?r@laCi~Z|YP~f(F|whMBMC!23bs-5+B{hoaEbx|J{ctw%(za*rmX+EF~iR`ap; z@=N)>bmK%L@S_?T@Yy03KEMUECHd+0z;n=%Z?Nn$BVl}ibuDjqd@`KTJV!keuAj<( z5C~%@<-L?vQqSS9=UyzLLpTtjcR1$*Ng*Xn3eoz&WN&Lt11l&X4=1fCm}whjEzf}S z>zo5rGH&PI?$OPAs*~g6P+eQY14V6w@Enbe?_msZwTC-87U_s>-yu!FeVVwowXS1QA5BJJ~XDW>#l_pQ>Q~KN|sPM54}AW zYo&>k{9yJEJWNpR^;DFM!8_bXG5$(^y3wA6bhCH$pSt;huvaK6_Ve>k!M4Sx)`PS1X0l&$8Y@h3-Oqkz38ie|8BudnuF5I}IlANjk zbHI1gPA78)SXUohVK9<&q&)~pu$(Gm-*5OS){MhZM#ObMUi7#zE}E`!Lf;Q|j$09O zR)dvK_NUS}iD_C8$$?w4c(*UTsb72&;Fnz6L4^VbF@*NlQ4NQ+p|?kgA(y~?xXqF2YzXC8Qc4l~ zd~GM|bo^Tx;WQ8a@qS3@AC76GomFR}9A7E~(QtJJ=eX7>hlGOEb}@8E0zBEXUmrQA zg|`5EOs}VH9%gjyl{Ke@;`NivD82t>=Oof!IxnT<9BM_*ZCF5Ue!$Xxhj(n%T^2x= z(L!ECF0Lj*A%}+3L$O7p0qSHjIr4hgO4b6remV?gz{U1t^r$5SOrJVm{7Gq`DCLFd&x<92PK1HiHN zB)vDkAU1gOIHNiF%|>aH#!GfS4OcbGc4xbah{0hDcFKbZrc8w^7}DH>KM`}?EP?bK z9G#K!ddC|Y_KzEQejy#6!L#@`UxVG`hqTD3=>B!bTNG;upJv(!Mk@|=Jd0@qy#7I(=ciP`;B7UN&AKg2FVF3-XVqqSKSL~#`%dB$zA3MH{m{2$A1zrO zg}QYyvj*UF4?x_<$$cC92_c&H?bnjgc?S` zfsB>32Vqp93B^lMh*fSufn>bu8X$j|kGAM)QC%uXyqySjAS=4I3K9LopME{R)JCqC zT~%!rg&?w)ACWSRc_TFqD=G<%Ew*J(ageokVBDAg?oT$p1mN)KWYbG>sJ`e5KU!vJ z__~!P4}9R&d%=~Brqx5XQsGH$9x0h_8qUPl1(ShF9bP=qK^h_S@}jG&ga@PubS47# z8w7mMLt)c{-Pp%NRnLN0sRVNBpiK?kRWfULguLd0NB9U5=u0`Ur4(Y^c;P?V(AgHN zevO0C+?$}UC|SscW&rgPJNK;C3>6%^c(3?}cOnESh}XZBm@zdp(y=c5^U*+zdE`h`2?LGRt3wyf1fR{g=s1DPmRw4i#s6PbI^gpu@ zTlZZnd`)8Ecit&pnqDA9{j{MLC|;+lniWrLOj(0aw_)5a?XFj%%D()*68XucK9&FL z?XaTwah0>Qg2i&j7r+m3f98_HMN+yobq|*_%o7jfCB0{vAVfjjf>-HDT5qbYe(m>K zSK>KvNPGCc_BV$b)v~7=eYGx*kLjfR7l;U8e?cCb6D`^D$k+>3+Pd89Ri+M0RERvk zyGt%2SPP^1-1L5Ld8M}+t^d^6@&X%QR$?=l8NQ|}UY~7hpg3mg$N%5MBGI4XuwLQy z%(6XV?*F2Nk<9_4AzdVd`EGUV^pmJ}C07=yAYUpHgp#+u=9*pe36gQ6)FQZKX1Z{0 zzVrAymIok0gf3rn8pD?V?&r~_=2HVzy-0c;)=ctc?p`A1CRf_Q(6tES49xbMP1|6y zwbmK)Pazl*SKTl6yi&naoe+{tbpiY+o!!ZQozFN_FdIje**MH zv|DIjaCD3JSiuPhK9ubpsBi~zz$)l?RrdeL;!R$d-GRw=AabP@ z0EeEXIX`yNmUnxYe*J9NupulN%sAeu`~xbTh_!Y8xZ1ATBBD)jqFo@ZfBuXby|p!? zvO%Pvz8k6*l_ZI&Jr&qDvC6&iLszzx>-L0$g8tUu3|CZ9k>MNDRo8F#z;7=-nMwL^ zmp6=VxQx^w+S63!5wE>sjWtwc2?u?;Zeh^0#xVeUUOSV_%B3GJg1gWnnZ%d5KNi7s zyJ!yjnarGQvmVe4)`L| zBy%!812yqU6)~5oaeRv{C!%7`FyX}V>#C4cKEMTSM$Zc{M5@HQ;yEW~<&B)`h^2uF zY<67A_gUHm>hOCw)P*zr{1nR=OkJ|kR&I9UStO%gj>R0~XR3dsWQXsFkvIp5;`h_| z<6=(+a@pm)%3r3#a9vj*%*jHiAynf7{90OEls&xaxcQ4G8_nfuP#?F43Z;HojJJx> zp`5nB>@T@jychV~nt}Cb+Bj!;@YZcTCKyY#HQK;|K_!5cHqHgWf!o5uDOq;Q8T}bd z3TttqS-UG?Wy0{7zYGz<((IO*y9h=>`;X=E+wdw!P>F55vdp zM7q1`+eY2JZ@EKV7ZUy$J^>m^T!)3RXs%w4 zf^eyzziB{Ci&TUqs^Ay_{$PDBnnxe=lfg<**SbWXARq;r?44Cy<4Ffg4IB)>kGyY* z`$Al@S72DzkIuS7U6;|XZYc3192Xq{wU!`qWNg*% z6Pvu*zXk>zN?ZZ_IBmZf#ya>>$LCJTzL{h{{^pebCGl)1C150RwOxe|b1+x0Jds%9 z!@eNd4?b&z;W+Qz>_KbNVj;g6)>U(uu??`N?Ort2(38^yg}j(M8Yy95WY4Q!)MNK2 zjBo%;DT?RTQ9`ea>*c)r$fWS;yACLS;HA83>5n1Q>EWC}&UXGu+)k&s2ikb>iT;FBkz^nlnie^ znV()q>N2QI`z3_+D9dNtKqno}X27X-c_AkTB8lR)U31|S_22!55vFV~ut_zKz`lv^ zOT|y?O0gxEUiRCGeWY~(FiLoVcxVnSfdk(68NvTp`DyFz<+hPz)yR*wJ3U0Y>-GL? zI(FgtDVw8ND^03}3y9YngL*>$VKO62qJsSoGlwC9y4GHfYoW2#u6u1$BYZu4O=^@; zX$Y=+vhP0DAYYI|U5Ul-A>#>3k(+s7=uHOxQsye!neUt+ZQOS`%(JnyIIR*$HbV4MH4>P2^dUBo&i6f-x41w zF|f(rOV7>LAQ~c0kq3Xn7w(rpxd8U~LR#U>RZ6>|8!xy5~q3{z>e=3&zsRxGQ+|;t&rKqd;_Q?8poMj0K+}8o%{%4kOo;i5NJ_b?R z5AAJr&QR$=1K1HY6dGe9!LAG4LVgn)VTPc{{aG!tBVGmf z@W$FPpP84uqVn)Jf3_rrB*P0{X@&QnR^-mtZCTzQD`js$9zIUM3zS)`@7Qh!b}evm zH=XjPNaRPxgqb~prT^9`x--zegO0@ND2$k7frYvvoTqxiV44*Gji`wW4D0=%`lSN! z%ewAUqas!p#5aKKzq_|-b99wAh`k-_QlUV5y<*9CBu|tlH(3KdNwE; zH1)%C|5uxo*$qCI(WPk${9*jkQ2JMTsJ0JDdKUnPqdZUG_B+yD0QwfRgrpg=@X$>d z)98$L7P~;twA`qgnb{bs#Rk3JCAi2osYj9f989B2f%C8ORDuVb6bmCf;D0Wcx0opg z+osc2)z4Wr4zCJ?zp)c^Qd0%c+@s0MU_0RdA}DygU}zU(%|#e_+ZD8YgrtJ?_cEad z<^J0TiEV3jGD0Z$uPL%{8)2*V`wiP2TXsoV zdb3mS?$iv;usXB44R+V{Y_(z(Lb68#aF$8ASKqlGR4i7xvSWrgqOdELd)3(75y`=h83&07Q#e3+n2sG$e-;xyGFsxtY zG6;rqEOa~zAWv&%e)t_aagv2vcFVe}$IvhPxO+}9l^1(jg87@;CmP>Po+cAGAF>wy zd|$UhHoX(Qo%?mp(%m9zI=IZ_18fDULhnvB6nz+_S3SRUP@!l)C(ou*j?ms!>CM?qVD!D6YjoTHP3rvl*ZX_?-rh#V7 z?k{u$>Ky2Ma))BvJf6j@xp9ry>KklZ2C3(nsZJ)Bts%*6V~B6#CTc;;=nP-6{9u~r zj1Th1dIyt;cf*Wo!kdjTz?Ok?T;od1+l#4okdo3k)F->XZVetZ3Dl!7*xqMD@yyZo zA25J%#c*#hVLP3ABj3d%+jq0qWs4ek)F z<&%^=G)-PYGUy`16S5;<(%hlh;8UBaZ=b{Y%JHh@4oN(#yT==FKclIX8ArWavV%fiI8c17Zbt%Le| zd{#2&r8m?A5*hjg)Q>%aC=VW5-ThrunrZ2K>xg&{f?2p?6)um3%sKe5q*GX&Li zIGG7QR4;OY{lhyXg1UhQb14P<3r+v>CRw};5HF)n;_KAcU7keO5)uTOcF>#TLr~L0Aw{8aXz-+DC}^1cHDxO7&y}rLn9CWX(8i3p;ghM~ z5nDJTbtfaj-B?RGYo17vmIw(j6d}B0(%>3?wk%ZY>ZnJ!?InBa_ z>K04b(^8dad0+Jh_Vnj`!6k{tgpQ&OVfqTYOJi>zu6&?R3~+v(6lDcBniZJuO3*rd z$vb?|TKDZ6t7g`@^g5JvkYjXT_PIV$cL|mItL4? zc?m+R4!s6?0a)gOX-#3k`I+m7x(toNRL{aHavir5dI+AfYPTv#D!h_@HL3B1Ry;suTst{^ROqguY{gh!5fHyS{ElzXf&5~6@NQ|?RspCX4;z2%?J4V8Bqx=^gg zZg17!&E9t>*JxmUgXhLp?!-F5$=(|d_DV>&LBw8f?u{Apax@6sLVE+jRU~C*OmaDq zDywE`B@9ya1~Gf^EE$T02Ql%s2iG^9~$FwLK^+O{~ME5R&?xRLWdi@^Fe|})4#icD@1OjlTB~WSO=K7f{ ztY8NJ`;>%xFX#C@ZjxI^4eQNNICb@PLX+lQoGq$0r$?I*9R7VM|TgxV~X8?1;^9Ybf9$hWe{LjI;p zE$v&qBXOKx_LBd5n=O3*5$Ox8x4=J8;&Mk~IQUPGwi!kr$t+d&o=^n*{pV7Tv*31l z&cRwVkg>~v3>6HDX?c`5dsff0rX(SO^dei^0@_!>4gus@!g5*g8)uNf^gGvkBYPl| zz%(rySSwpN=(tv@cN1grM+jsO^0~yrUJEU`%a3Eplj;zZS_|l30!PA$)J?PHK;1i4 zl)M6U8}*svby&NPDbxweRuKHAPERv$g5k+cn4(x+Iy}TGt#Oa1^H4(F9qsx=ZGduv z2Fi5L4b(`1ITzUi;QhnOyB0vJ5)#)OxA5wvJTC)u7kT>fX z?j)7_H+G#q59_dX?Wi{Vxcr&k##Zt3c22Skbldey-2sbjg~dNr{i+4k{AIfJA@I39 z4J2x?&k%t8A=?CrF?vY$G7sE!e@z3KO@~x9Y7?R*LRilX5_Ka@wn*9bY ztX#WKXS{;u0F(ccqu!%vYr2Af-Ui_3`uNL3VWNtBBW zN=~6eaHBYheu*o?E%UubfU(xZ1m0)4u#y+zY&lw)&G&|Wpl%s1GUMaO8Qh!0({CuC zdIz3z`Z9L%!?Qyxnf6qO2?0|LwQrWOJbW%r=ei5WC)?y40`zv|AcJY%KPrOjP82UEj#`uQ42*>orSMim;hDdr1r8k&S7Jef~db4|}##zDCE7 zVO6N0UEf!e=zpJ$QVYhw_r^lkgv4H!`Ng^-4R3Jc7H5e39ZAa)Zk@Xn_Wq6BhH&Y& zi~w9RXkzGTNS|0wacd{PkE-@9+wdD^)`sll3vhm~BU`d%oT^bu;v3A@sO8-ICHKn7jw+^OP&vt!+ z|6w@Yh0-H)?b#MzU3K4xf31v>eNO5zKHk)PC#NT3cZ6I6+Xed7Id*$^n8U3$XPO-< z4>KJ&I6^gmKqoxRiiH_<7lrL!JuCWVw$fb-(Iz1^D7&!2744hVHp^A%GjcAq7 z8xXIqSxDt_jo~AvDAV^=zXc+puu0>^VbID|>N#}cj}<4CVBx=0|6QInFXJSAiT$!f z9b0`k$ay$5wuhk{q()&^4#;O7RPR8&iy>|d7+7*kZ*ja|8F7I_6Rz^^^-$a2R?*0x zH&c=oa1Us~6A`kWo;j7*LfE+PZ%in`vB-PHjxWQ2=X!x)l<&+jqo}hr9aENAHm8jz zL$NU@n(_BE;zc{krAAUH@(Xut&Q_ z(iXeS%j>qsX3mXq?V_JCGjpGsU?1l*mG7*s>Y6ATr+X1hGwegu1yt`x@J~{Nu8;4- zvd;a+Z<0##M(v#de!zGwCyBw9^yQmsSzZtz>kX*HAUeJepf8_&aI^(9-|8_J=B z5C=PKiPaZ9!?hlr*$?gKrgD|`Yf!qQ*BJpgIqll?7?hrAedk*UJ4R?u4LqQ^Fv~}X z_8iWOHr43usA^HsKZ{wTqTIR7ww=+2Ynx6Xpev*&tV9NUe*Zox1pc3DYEZo%n+EV4 zD|vxxtykqNa%daY(Bt{I`(qdH52P)?Z zT{lbsBY{_})YA}UkkLE2>2DDZCiJ5x2+LfJ`(JumD5VS$G7#2REBaj6CCO{K3g8Yl z0mH`#C!y&K;T8WtzX}kX6~X~H!79^R-y+N66{LTDL_=2%kRQWNDFsT{uQ6mDC<~bF zWIuYB79ci!gLCgH*;pNHzOxareZ5ZsiZWb>`(kY?r~cQuup+T zHLz~hnFo9TqEx44KRx_n&JH1agxwG1^T-# zu7`Y|8c0q9zo5%4fai3KZGr{XO?K7cWh~zRE;0CMl5_e>p2-}1!&~ygYtzLb$y`Qt zU^5{`sD&d$X$`npP);Wr{k+zGE51s;qI&5D;KZVA9ZruhftX*QC{{KZQ;aCk^T^YQ z^uISJ^Hrg>AA`pPo*hjQg40XFc*S4FKya-poL?0axkTLkb(`;5hSvw+T$-`%WyLz4 z==rTJ{FIrY3GwvUs7VeTBc1s9ZvDjRT6a9L|Dp;zf(vz2Pe;G^CVgndB9tib|N@M6YAlO3+GGo_+-j=0C{ z|2_2YoZZ?pzGk+c(NO13U(&8R2_RmErt}p$nGPPY-2pJS3oY!C(06d%vjHzHl~)*2 zsd~~ROLfK}PZTSHP6rmF%N36l3h&rdNWutcSwF#HRVZ12I(*%()nN)|ky=yJb98S= zF1-VoI~Fl6PTxVAi+&)PubFiQQx7gvdPkvLpBa zdt%)ep}X{zF8DD_Cs@rkWTpriT<8PN8<%;zU1ctB3!gjn&<<`rPA-1In;sp3jNuXn zXO|?cm`wyz$EbfVmMs9_*cB|R`>y5M)uw*KlX+>sE*+eTq{C0WBivm%pl6$i%T{kM zThK7{h^(IauwZWDCb|k*VQJ>LQ%4*)3Le6{8tIYES`uZn=UED4u%3rQS(cR({-D}EGP)S7LWWRDCT;YA($zG{6fSbvp!lG8BKEx@ZjH-&>l7J9_(tsXmlNO1L7r|v_T5P#Q`D1RR3U>t z{ip0-E?LbmfIY+KSw7k3kLKnrj7BUTTIrnPa!|YL3WZ1xCM`=neeJ$A7!0Q`-Y|9k zO!;smFETeCbObX>h@n zJ`lq(X^=1t*+t)c`QR`$1+$}w$B$e5vIV@nAp3?FA)HtZO?~rICq=A6e4zZKQOJ9z_2g`!8U(SN!2ps9^zX9OK9tojx-^#=*(^a^w z41{5W;1Qh_NP%;oV5Cv-^G&h!Fp&85%ksnkXZ~#%h9t!dd(_du@3#&0jwjbxDb7wp|D}x{Cw^rOaG|P#pyfsWFFu` zFTF%AhfRXQs`D%*Xy$iNo+x^gdnrVevY;gjTT3KsC_|C7rjP|jvj_4z)DBQ8%mp6sI{NHK*V5aMCLS-P0 z;5KyN0B}59Xg(=8s#P-{kq?=O@L;tJYQ`-UTtvFpkp zoH4?o>X}4Jf;`q=Kq?7a_;j{I@I-oyp{Mliat!^AXFmgg6B#~V-%UlX>3rNXDLdK5 zY~ds?XoRa*_=L;(jb3!KXl%wxx9)t>NM6d#Z%O9g#LT0FTzQxqbp;r`9_t-u6B+=g zySADLvOhuos4sep*J9Rimi4dd%5^bGfPtdP`yI5uz*)zI`AS%3q zLwE@DqP33QnCQPNBHGbAUSV7lMts^>X`PQ=%GQ`Ffd9)__={@s!f+J@D9x(S#%6*4 zg*75d%A#54DLT-27bmg^%pFbZxltar?WqRR_baFsKc+NMO8{SICaHhxfS2qMz=(=- ztYpQw&(L#<HEYD~_-j4ECVf_(ta*P^6b(mWo= zOB^`Z^pdAf$3clsh#~W}2L#_GQcAzn7^J{Ih1BASX$VbG_~M zdtDNm@K~+cMtAy=HY@%@`p)A4`&=IIE4!?R_=O2vWKL?vr&*w8S|p9cVl=0S3*wH) z`qn##dlNcBd+7uPex3@Q685zul5^=k>lZabYh>@;pT)cb{6f!0u*`F8Pf*!SuvM;L z#zxwnnhY~;FOD;*i`pr(ap;W7{?!6z-pl{T8l)Wjcs>5_E4|fDxlbF_a7?siHzqd# z2Ukxo)bH++@HD?iP1O31=<9Gh+-qKjGJl7Ed#A5Gf*xv)#EyHS1|Qb<5!oY=sJ0=a zO__ITjEs_Ubxq~J$shoJ)*AXL8LLkrZ@~lXnq}>ya1NN1$Y;n-}%(OXZld}X;8&`SKMfP8S`k>?Z}Ek3f5d^>!bqao1^L#YMSQR?MD0#-`aqMTYEbtlBl` z6tq3R$EPtna%VP+8q+2hG3;?!UOezT&dRP46WjKmpa-I$Nu-?V$4O3&nKC=f5d*G` zMOLXy>2EWT@-N9S%aK&Wwqhz*>^-(;Z+CGmXgW%5~93p^Ut|+#{aQtz}bNc+ncML(s~)vFS(R>*nr_&wB#!l(p$(1{<8G|%GBFUTyciGPWn;DnV0!mw_J#($Wo4=LprWFLk|#=wzYq~?dg76K2g5*b z2$CydoAZMs6cQ}e;R<{9N*oDJqh4D?%8gGLAns{_&->CrJo&y@q@bIsEQ>%+mb*B0e+v13QCO*Pa$Uh^)(h2!-#?a*)7ntxz!c=%)0 zow>67t&^AHw-a7I@%%Y0CjZTvtVqX7hXlT#2T#mhX3yc%=Y_!2p zOoagZvB;LOUwQPd;jFJE<>qB0z#iqEkVzg8MJ5LC?r?}CX;V+UDgu$Vf5r@8Sl>Zg#NC|pabcaJOWvl7Zm<o!YGGvEzMlzC0PdqO96!_hE-W~|MKhm}>0c3W)$ZDF>gR^?*Yc=@CfqpORT_-~0O0sJywjo%)=mDsEnoUIqo8d{&)1A%I5 z*sNm;>28bq3eW-(UAiXSZ=cwCX|C)$X6#B3};KSQuj6B+wscNWl2You={r2Xj1(Om*)3kh;%zesfZN_Z+Arh2CpxY(nW#-i%DACI z9gTrkS<-~FBDG%x$K)G|So(Vb`#3lp+cfrzA1wN`f83hO6|E0qLlGd8{rcWR6(s4b zEGa7FtF40b?<`#cfyMx|+1G6s)HvYunDZc_l31E2yvP(dM=|-66v6|<+XTg!@wK>rHszY z&ZvTBc`kWTSd%ngLp04e$m3EmTmc*fxy+X+l>nUSQ2gv`#rKk>N&)!v_6oBrvISyt z-m%nK3Mj}eK{76yf?|d0hCZ;J9pUdTjBsQ4zdprFH(ze2|M0#wly)xL0dP)iHeg-p zAz;0pFj*{T6J`Ew^jEwzBuWNM#4c37A^UJv!Q^j4E-K?oz)U=)=il_E6~P-XebI&r zfJo+@u}>BP@~5>%y0`7aC13335iv!jKCPjA=={h;K&#-@ks5t}q>fuER*k)O`43H| zcv=@3_(j%eq1pQVB>i7vfnSQMS$Q7-92*P1f^o>W^WB9l9=9#OyN7yH9l?*|M)}kp z8_?_&y@I5HEzqWEF#0qe&<(#y)*k}9epE9FjtFvn>FN*A_`vhZQ0iNv`*b;~mGWE{ zvOY8>*@QsK;leTfx=`^A>cOYRfI!T#+V1btVwqTfj90h{oD-&lM$VkF%UjiY^GjEB zfafWS0a~PBr#Ff|ZzIED6MUFZWftjUXOb<1I(u0#@y?D}6TQ<2{~!>P7p46TC(L%q z%rO=9Hp0r9I?!rMnvZ-E0DkpgnZ3bi+@H8TGB_WYcUeV#z|V+wO;(Gg{R~QYwf{~N z)gBU(f;K&?c>5J9xU^@)tD<((=99h;307MqjmXQPSz5KsqIW5A4hH?pGBLZ zZUjO*xUCeE0_TR2w(`XPuzz>w0He_^gLhgA^6+e z{x|tV3tCS_`BqijP^+m1q*xORkI3ODC?F3d{`3bhXodws;>=RF9M5@j#)mMEtul0T zIkMM1u^L08l4x>ks`4LLI6sSrtE?p7wcZw@Cv0HqgnNFSUG&pB0&pCB(P|sF{#wB6 z)~2aS+}9v%|63@BKLQ&j_2Q3{o)eoHp-=x5@##=PqgMc?Nxqe(a4Wr@+H zF~>Irz+q2f++1Y~Q~{TsomUNk+-}J8MKve{|5e-jBel$~+RNT2Pk$_nc#lNOm1{9e zX^Yj~-08o;GTyF)pd*zCni-UUxS#Z$1?=K$Mu`b#kyBS9DlTr#2@#Gylp3|W-XayU zS}JnQSO}6i)4(}nFKCwV&b?l^@1>JewkOQSK8ekQMCkzRnQOA&&%DA`DdBgBeH&G2 zmZXrW4y|)Y4dZM^d=SxNZzdW7iR0VE**N^FJe~rXme)<*E0jxha zVttR|2q)cPsiar-OPMMU!hzs1*DoPbP=dxqgJw=sl)i2=t#r`xe=_AXRAVdx7m4hN z!0ActPaF8~boW(|0QmJg?cRBPz4BXBdY9lb>}Is>%&&6#8O1}PkJ5>XogW=u6wF*T z*XbnWT^af0W-4xu@T3|NMH1WEFju3~ld&7n0KXOXXy%Ap`;0jzzZ6q z(?RRSAEDl26j&Dpp}$HNXgy?ACxQQQZ?`KFIf);jqU%ylMi>qHxwBpl&g7!h3tLaS z-SKUl5qLgpzSsOsyn+qSXJRBNpSdw7{eD%btQ3P_V-Jzal>mD*FGe8{%_up&H8*%q z>GO@EjxWXT++Nl7Fo=1T-#WnQ%?w_DUP9!GN=YRL4@3{bbAAckKp)Y|1%43{^f7;`i;*i5G{TMRNX1L}b*70)j-oG%NeI3rU+}=qnIgdvVmod6i zV_$OrDTP;j#horpsznG6G~udBQKoDPA~$`8G&!O#9goEUjA%%F3Sd+ZH>aXibxPo7 z^LZR5sKe<;HBK_?(wxc`ao;rApF}do$I^o6jxYI1wJjk8n^uVxO3(@5yJLw2(@RVL)nU)zeW(UTaxOI3aDQn=UR8tl!co%>=0674s1$K$>9?6DQ0)qZJ-{2`bcfz{WTSGUyFtoqwWc~O;HY!h} zEAm8X)uw5Qjrg^#js$it?KM;hvf?3gpc=f)5a5?}R+;EzF8wJ+E%|y)mz6`_%90d8 z_G4xHj{WRWb2ZZFH0Z8*xg61HymHLw?>XRn|S ztJHEIqAW6aPkBN*BbJn+K!~I)M}Rs{Pje$X(6iUdf$Croo2#r{*LSEk$j*=w>%L|$ z1EzifVfX_hQHt2I5{a@DyG;uN1-5$0r)+cvWwEnvdXPk4;5}I&{K6P|vMkO8BP?~> znkVh(s$UaB%3OA*je$&QPqEnVCaKFgmN7prSG`c7QjGi`k0lcxTTe-0DPrvV4ZC(U z0X#R9C0BT7#v)QwHe9pSuD{dfY-5puS*@2HJylao?RS?$I+ResE>F`5Y}B4HdUa*_#hvTp{kXV)r90fpIS!|-wU&(8Y88#p24 zYYGhQPh12yz6|5-MrPr=RZ}d(R-$qSnw249TLgl0l<`GA2n_cdF$e8Z^&mjp5jEs9 z@UPjbLL^I88?NhXSqxuoQWp>^{u9xjtAbaYlbt`vb>e>ac-Zi$E!SS~leC$?80PZr z=Ue#jxp|G32hM*+v88l%lTI7L=IesC2|8%a%{6S$EyJblW02D68H^yXy9jZTeq@k| zOmffU@8sJy>78{JGaaGxk1B)w#sx8g79D_8Rryr)c zAYlR=i5fiYTmKqQ@#gO4Zzion@~_eI=E$m66DO=l{3ysdmS)g)ZH^cS{aBm+Q*b>vC>u{1o|xODBF!rwTGWSG z-z_|&wKwT;Tem2gal?kIc<79jx)x84@RS+i|8Qtd!UX0IalIL-R#Mtkhd1*Z25DQn zR^XeB@g;0c2`*QCm&0A4GKRj=x@x+U{_0%tk~vGloR{&uv0ROIZ3D*jKedul;5k0` zTk}jpyR2U?xOvE5=yHrPd&3mXB7}o>wgxlDWfYl>c(0dq5>6-D)NUJc*Q~*anBhbD zP=t`nliFFzDE=kjIevBEU6%90&I=8sv3Uz5#O}V`*BQSgZmM0yzGzcV%7D%l^vTNiI_+r?ljCXJ4%UR zE32&zz2b|#VmZeYN_8jtw(dByF9L_?Or6LO^$*zONi6=D2eA!XxR(O*N1yk4bFOJ^ zU&>-6-@&Lx76qF9EdRZcjF8WbKeIjIBmE=&u@8hcFv5P)o5-?#6Vfzej67(I74pYD zsVZ@}odE!c$LrV#DiHEztFnbb1@lOwSJxi)JXLsB~^Ff<8w?F z54DMC0d-v{=Ep=lQi5@`?{TOCCBQFdzCG|?VlQ|HS8vcK6;wtdjRJ;q#6=vB-e=k= z9KOb&%wSa&HLho0zmYWTNvsz#W7k+|T{8q9b=%k=5a!o`dUZ^ZrRf;F+AR(~XWuP7 zOz25`?5YIfImlsLK7Au@Wy2mKXE(KISEy~PvQ}JuN9HcnjK&md{vAT-*w$(os{)3gC(jdCiqYFSMZNn`~L2lF|QBLI8po5Jk;Np=&O!Kw+AX8DeUC9 z#RNr9Dh9T?7qN==2!ZeC09%(*aB ze**POIkCnOYl+#4R`d&tjL>nubtL{6PVTypSOoB6phvQOoC<63K?f35mB*>YAy$dH zKxm@ZR&v86+EMc(?{;z=elo0>RMGWqRu#=WdVN*x>*nlx9gzi5fwC5HEWoe-wMD>% zLGePZwEHqdzLV@ddB%P(3Syp{^PyDSU*p#y%2vV<|K^G4ow&3S3j(Jfi@xhnQJ}uC zW}pJ)cPgx)v|w5PH`1>VBGk7k?d3<$7vlldHfw(#My6D9*D> zU-Iz-KJS?VdSs;aPAwn1Fh4mW#vkBjXxQ1Ds%AUEbj!b;`TUV$O5HiDYRq+UnGTmV zvrf~~Eb0CJpoznJt;ECNf5Hn0xUF{OVb`oWVcHC6L@5bm7`NV>`o@V(BCFN`Q_^T0 zy^B#6NR3+AP`NtQ^Swlvq$a9zOUgW@cbVqlSTM84??3>Kp$df9!^wsR!78H*zb9U> znHt$nyRr9^e~^6eN)MOxfUJ=xBxjBC;1Hdnp z%>hD)R1*B2*5P^Q)3%Z}%7jPncBhLrmF~Qd`}!DV8ZpDtWRJz)r6q4n`sHn=u4^a{sXZLgRYyN*;5JPeE2%H>E<3DnfXHBlC7 z2oYv}Qh}HE9YuqD6URixPNBcezb{Ttk__Pg*R$XyUhF{F-vWANQ^as2w}|E3UQ6V8 z55?Yz=wuAkf4?0+Dyt~gJ@aO`@*szBgR+e<2K~A}zfQmk`)5ips)7Lgu)FR;Je9W% zh;2HAh*;)I_dq$a4EB?`L;sM5Trx+uCkG>fAVOMN^HFmS`C24ds|Ck*VT#1z$$W|n z!<7>k0`8;U3ZLI6g3fqz7X&?53C0dXpJYX%>cpLC+~#5ME5iz2zD-NukH;D6`XlkE z8%=*6i~D|31P@gzA@(gh??u=Kduv&Yt2*x-e^ZPXOPF1ak+t+_0yARd*x3GXTDZlLzoh1z zOJ^lP)<0eZ54rAfX&8XxycNJGbgP6~Ro2s7$fUq`vw1V$J@|sC6#vQXU&tnCZi-|ujmd-3udv=2(6o6xZ=si!JEA@@zN=B-iJjs{ooCHot zES<9!E!%W}kbh`{d)h-y9M;W6;#P(moRK#v_>q);!_Nk)T_|&7R1jnwfJ2e>f zdRDrrMo?Q;o%Yk9tsN8Dv>+tq|Cl?+?#i03ZO69Ju{*ZYv27Daby zuje44_n8v9WeB@ZlQ`gw=i-okq{{mjX$6zLaJ9b*yG|Q0S3uC}4cWL$j1^(kCLe`lE8h&%p%qFe4X*humD- z-cWtM>?mcPfY0RUwb}PCN28EGSKeQYAf2s#5zETC%POx()Kb8w0eopWHFZ(wqV`1K z_qZ>d4)2Y*ugt8CUB^>kLq929 zl6ZXI6Dsdsf6g@$X;-$M6qkP~m@B_>KqzalF|T(z z1k=!;L*WlZUT*1V7N4W-qecMG(*#ZL&>vExmXc}={4frJ*>IQbq8xO5rx(%KikyE6 zXV(Q@9UTY6&44=zNu1fKTww{Iw7Ug@#SUS~*Q^t)~p>Av|5xI)PD-Q19s27C?_| zw47%H{6CY&2cmap>ngtxoJ6#cw_|KlMH+qO%X3BDI;i~;k`j61QHQ;ir81^NZLaMR z%v|5b$6}fcaD(*keKcot(@e)?(824TsjkK$4Ra>gIiYYyM~{y&aeb`{7&r|cuQoiw zJJ$_4AZR|FoI+T?{7Ot*Tzh=$vm!T_Y}oJo@Wd3%Ib2N2U)yhXMjPqpvH3MPWuy%D ziyR!Bp`eGYoeH0bm@X?rRI<7wgfx@r4Vpe_Wr8&rBf>T70$XYhh~N3@o+oRDXwqO9 zd$c+0#{w4pz6TO!E}#jX+~D0aGY<8x7C*HyGfA_Qm>hSm+f}E}@x?3|0#cfS+XaKSIikfD_ZAHZS?tGZsWN4->pkz zRq0}`J-cW>tK;4hX^=bB#YCAWA})Hg@>`7=6<^YqT^JElYyq7AwPrDh<4~Vn<7rIv zAh=ywezMLq(MGWnN#m}XaNlDJ<=7=jHs(V@pS+SKf~I_TT9AsKqaZdq`)QMLLEvI< z;`#er$z88it&arS!DF450jC^T$2#?EuDHUhi~Gl(9$mBd)S(FPC9)Msfb!bKK%6&i zPNBbqOjvv4RG~MKXI`Jh<#%2R_8Jd;yr3>Vw=7YJDI;NOe$aCK=bLXE5QS!4#rd7O z!_0&E9GWD}b(@XfJZgX#hwHCV#zUkypkW5K*IHuY+jXvOq*JUC?iz4X$?JH@@ZwE3llGkEhIWW>;Mg9 z-1O)$x4RVw&4Rf%ekPBLEZ-~E&>J!Ldh2^Xg-8I6tm;`ZoLrvhyxBBiIqwa1+vi^s z7k=~&t2wx4?LM@-Re}!#V@$Tk>^!P(%!A=nLJlUFkEnmKGjS8da{oQsYu)wM zCClSAFNlhf%Nr@6cH2!>vH{Hy28grRs=1*|cqrI`e8)?eUNTa|3c8c|D(*)Y-s|QG zk#F++dot`O(7oio2nmEN=9x(qC$rIXT$CnLZzsNPJsaJ39cfTBS1!oaRyFlcqH>TU zo6YhHuE0auIsNu6qZrjz;)BZd9B)FZh6WLf`+RqEVnMCaBW~C^HFOP*l5DYJ8ha&x zuO6H?^mz<24EJM?%bJW@;o9I#-ZS*jNpuWmwHMUr$=))8r!1x8KOH67QP5K8R7uiw zj0OhvB94d&zQLEEbl-Uew3=Uw;ztBfYOh?aMS-=7tvEZPCPO^|0wwZF1otu&2$#Pk zVWCJaWZPl7<==XxJY>D{B?xk*Ap@-1a|0c?zUv7m>ni=skwjoAu+9etN=CF;_j*il z5k*8(5Sdlr$i4F$^HU};hKzsCH8xu-^#&p#uN2J~b#bhplz#2YxE)bK1jL#43jFG* zRr3UMK>Nc7;+zVGnxAtRw%4?xBm-Z;D;tUG;OZGx1qt>%^6!nb!5yrmm?-;Q@S~t8 zDL6lJKse&}c@T)@EG9)32Gtc}Ib+3PU%d0X-`Ng31owiU3vl>Ll3qD77#o!dt6u?w z*gjj-FyyOL20EmU!BNNVBlOzSsCaDvJ)kp9lnhB=NhT60)VVv8 zwr|_VV<0$j0h}+ZnkEOWqayswb-&XeyraY^KHsQ&8;^d~eg9nmmH*@2%!8KUXZd5* zs-V!HQc^x=bHq9<@JkDdjz^X8hGAVznzv!%*KO!uYRmg}mv(RysE)p0T7#F{lDJS8 zYv1z%B{9cZzgc-TY=+$Vgbq>Q;>T^Y zDvb(;a9d+B_cQ>G#*|mvAl<|MXzAKQ5UMr{utC~9syLJ>knfS~h}>T|vlNe=f}uZM z9Iot8Cg>?9x|Tbd_C{+ZAwW6wKTIZDL_xxOPA8^uOZ)e zli2$3d8*=+)}>anS^f8Cq;jpz3=Yr3*)x91oWCSJ8cvv1p*ncTZ3q?*7pyYNAeaxG zV>=PY0FD^CLV4CZ2VKB_Ii*2b-P$(|5FR-u$PQfif)xza-Yqas?u5)>Behnv*1s2W% z^j!8o7=dF0Jkfgwg6I3A^fhFaFMHeqECd@&-G$O-&J6$@^R9(4LwaQJ>4O3Jo8shj z$obu)@Dt&XB%g8&epQWl>U`2Qp6m6HZJ6isc8oasT_PO2VCr>_jS<;HR;^rpZ2;$J z<9-lNhw>L|!V-|`1ObmPtlHz-a~JSZ)B-~h&>=H*=n2tEm-K63=G;>8X%_C=Ox;qq z%vg43ndCS&I`_`^du(xUCs$Aj9vfY|A7(5JVn+^ueh0|-%UoipPE_;J>+9=!?_ghq1(F*mMbaaM-lk}P!AkLZ(9AL zML>PmPFv<`j(7-lKVcpxve%L62zvfDABI2*cufsXeF*k5Q}@wQl{ zOLXR(}{DTt22J zWhPU_SlQ@|bPa(RdmY{JaO_;tw(caFrT2ENqo!JN(Xjhl6^)ON_K2|4tt=!bUr_=u zD)TuD07qh}IlUAm6KaL|U5>1qFHGVqIYysm$p&v_r{QuZHqQS~j)VU}H~cvpM3)|f zK*fR-$$f;}lF-!$B}`CFH*h21iJ9f-!*6ec&-C^_WWSTN`gJn2X_Qb?^fXRNK_ma6 zL;zRTp&-V~tQW>wb=DR(ZpcV!cpXz6Lp?WzB{T785j_deV~mjCBC^jCfh^tM8x1c> z-d9a1@QZPEZ4R10yvr}y@8LlNqtQAa%F>J=*d%NI|0bL zc6*!otfQd^K#}Qx3=~O}8G~3o^?=6uZbkVVPcdgJ-`BZMLpI%TVcq_eqPg7a7;N#x0nGhFlzQCF*ibRhWo*ZC z5_}7#?bfe2`gHO{i$SC|wFll}kpbxWyuWd^CnvB9&5cGC6qI=!O9qnO&?#3jZs_d1 zYm{O&Lx6Bw5d2;F)`*FO)LXfd3ZjtB^=@k{(S`5SDH5id4dB2kFBz^X&i>Zb4{7r< zbJd7axqAll>QcHcyGhL$7;s^HikSR@%|g*q$u-Gk+MKY^>X<(`X}z%J=}<(`(K7u$ zpL-wM5L;Ew`!>^*lhtpEeH8`Ie&~!4-qhIqwfl}aZEgO05rTY#*IONfLN$h*+BchL zwSQ069l09X3u7v~-FF(G=fWjm`k;EMOXj-$cxxs)Q)k4{e}f{9CX7&7YB%J3D42n3 z1z78pJ#CL6XK!;+)ClW7_N^C7xw!uYiJSH#`5F-CXdZ)ZNZR}Xso|?;DDy>qsRCeR zZWq>(*c)@QRe?xvb%Lm$61mUzX}$(sl^9LvUF5wHzZGR`|0MW{tlnEOwI(7kanHtS zy3cUv2tQ~(Y_!)ka$p8rEbA~#ONq5+fLM`zGka4#63L3Mbdd*e+_Eu8`MjELbaUiK zK!Rh8(ec5(&v2?ZY2K!}HX0Ci>&3N^&_SNt&9L8C{y~}?*96n+V@08vk)X=Ls`aOm z&H{YJg9iTby$dZA`*5w9C3Ung-#Sej+jdk3Nm3#XdI^=`^)&1MO|CcpBB7OzOOM0E zRCs!N4{IC5Sbz`=CoReg1JFZ|gC3?ieYBKLr}1n1zgo7074(dIh#DzE6tQVQu-P{Al>bfjsD}By3-IlC2;Zai5mN-^04|DZ7V0r=4Zx-1v(F^Z=vUn z;C%JfTF*tS%Ty~d40x*-u#D(u5s5ZRqSi^E)PX@7eZ$!!3fI&#|iA}I&x=CMmMI+~oGfVU$NaZIopSyoUHeb{4C zYXz0n7ZCP%a#2erG^rh|*ijH$?#U#fRQ9yr%e#8SI#wNt?^!q5!>bri) z_JNf?y!&}b8~J`js=%PUFQOMi+fBdPl0KF%b9Isyn~k~+gK2pNFN7nR#BB}F0rc!N zsWffZdF?qG{#A921A#HO3>UOjoQ4(SkmF7F=u}2rwERauHYQlqjbu%L=wTd5a)p@r z@{(MV$UZKEiy5B@;FMBtz-v*w23&S-wDC4P*uzo@6Cc zXE?{oyT5}?;0*;|%!kRq*ADj)9O`JLv0@KmtHWk&`}Jq=8lIl!eI1~u0Eep>k+-QC z@tjE|v2g8mQ7r2V9IXiZ9fqjvMz&xDlA4c)rn6C&AQ6zdm@1C<*w-UcEXD`#{X^+7U41)%6E|#ty|iB7LA$!4S5cm= z)YmjT#m*Zmp3S`iU4uwhhM)aps3k&H1Op-B3HwZr_?C1TZ|YgR_OBW_grB`l`4Yf^ zxdCz(R$fi~DN|eYbBxtoKWUvoTjgGzK`OE0=SWt9jP~o1i!(@q4_p$FrlJn03k`Ii z3I4@iu3Pd}J`Q^4cfR_kS^NR7r`PKH{(7Vle_sQO`UW z6f0#qVsG+=!_K?hS)uLrVJO1NFAC(5G@!$^3Jx1f`{kYnbj(%1V*Y;Tj|IIqF}-^r zPGd~$sP;XaF{=sJ34V~)!DV2b=dcmMf9McTS~=EYiP2`sY(G-6^5Y#J9s*G_4m

    qt6Z zFfLrg{x24h#OrFYzan&pplGmRqaWL;M4td>MI7x#UJswMPScT$JYREw9-=T$Ydq3*I6aXx(05Du+U`EA z%ct9Ov`-)(5c@Vv=GYGd6T3we8e8^-kdL*=6YAuS`bGsrKC_(OIHZr7s z&*64Lo;!3R5YM3(A*;i{^oDo>@|3kQI8VkcZqJD=in6-sbEPsMJ`^m^pP7rOG?dl) z(5ro%2dP8j$^Me;U9mpAilnoIzRJIGl@_0xS8=OfJy8^@*h`PiZ9#YU8r380q(o|B z1y2B+t~lRf0|h8)7Kjz!--NzXoL_#~O~k#4C_;a#E3D$R(KlgPtCGaB@A5ci#!hDu zg{sI#CU19?U*y z&spbS*T!`f^qP*HYYa(MVjiE}r{@@B)n{NZH7j|3s{ghg{7T-R?PE#X(7f3O_%gs< zc|c&EcJpnt+0S@9zDsrXhA1h}#nn(@wqy>J7wiHI+LQ1CQF2vKqXgL$XJ=O3$5O%2+*^YRvtc z30{bAj zq<=BLpGlT9lqGlfu{SGo1ZwliJF%iNJncAt9P;%^8HTMrxh>J5QqbiDCq%i5W_ek( z8_a>V6(IuKFe3Ry{^Vcsu*LO_>+j-d8LYQYsgr)a2PB72_)hRFb z`y7XBzn_I=1_>{{%9IBkv`qK$*laoU%THcr^s6)JZB5=u)y3iE4YdqDbHzd73Xy!l zFR&)05SyU}vwv934MJ+)^U7ggZ^2Qfy`N%>RqOLqgZ)DM_l_)BICx&S_S}mq3Ubok zmyW`aoPx=R(6TMeEOmWqa7C*|f@x=Q=Hk!vPsPmuU(1_B1;Izde;Et(k6*}g3!PX~ z#8H+?nz^;=kG-jZhFDH*CdE}vqD%gK!)Jt4op zB4gcA@zA1Pg0LI9_4KUX|3Cr@laT)gWD|Y9<6vvncI|_wn|d$c3r<+P_R8b~1$3=Z zOo=qhNQMd=4I9AegW6hT-+BQph+12a`$b!;*6r9912sro5QwkD63|2$)XaeE2#H?; z#-yj{Ne|xoSkL!LD)r26tJxE5+{r^A3E;RJqmYTqen!X7G9!?N1&nZ}FmydV-&d8A z_~GTqOs~r0c!4@Jk&fZMNxiLd`0YK`Y(t~-qj}^ddj)1BbR3F&$A^9wcUy7|rWleZ zWrO6feN%7F9qb>EdvPXeXq^~v0(0f!R6s|umGC|>nXd2SD(-q=Gw|GnA0Lr3#W*o> z()T*a$?eS@LcVp=d&VL!SjqVhB^2N&u3cLq708ZU0sle*(k*S5the@!_n)qcW+3a~ zl&n@|LW56rgMY#%Bi8JVVF7x4vjXyrniWxrBfv3|;R}C9Zd+uA41sjoD!7?MTuPwk z(MxxXntSuU?~uahqR>fm>I;eGvL+7MY3IRNKD!ot*FUt!lKz>&ZsQ!VC8SXg1uN68 z89f0~U%HG5LHZY8Cs-+dI}fe4lz4ti4j9ynx1)18QVm(Nex#uHjm!PnTX4S@5NE{< zR$Y&G$aXT}TmzweQwjej6m+c#q5W`VWK2EJ&fp!;zwjE_qz~ofWnDbCJy@hdcz;C# zs3A~{i~r5h@oQ`VIGUE7O3y3sBpc!ZH!ESEB8oEe9${a3*5?chDVJkuybySpgSF7- zIb}{g&ByLl=r5FmQmGi0EMwr_%VDMs&BOrC8AoqfWa@$1jk-rp&r+5Fyx?J*4j&mN zcOMq>K5i+wcDX8c-uPR?8$>H;Wi)Uzho3^(WyA`qEva~yH)yY=41hyJg{&MBJcaBp zwNJE2uB<*z*bBAL2if&%-*%w`v;jIg0Q|4nJ-oRaC977EWK*1kFj3F9bSh(<(nA>X zK&k0FuRD|-8vm>SRA9!`%9%x*^re)3oy^@i$!y){DuH*N_eiKH8IEK6IJyw)kgn(c(ZWQuN-8oQ59B8h@Ap2wcEc8K5o3iM0b7FEeEM_? z#*E=M^2wNWIB*PD^6|KbTgvdFCTdV#MVl%;O=p%Vto3rpTMeP#mlY~mw524B03be4 zGY^b2P3ItzBS5>g#8kFwMiA1)&5T@W6RCaEP~5_j*;p)kFqZ>{yDeF$gN&ioE*oRrN%0?J6hIzZoM!PXCD(h zLO}gPc>p~*N$HUqfeq<3x-#h~z72e2a0jX-Zgt><$5wPLSaMDKCg-wSUyR*VwT4W$ zNQVW2%P87;9Ti2z9ktY}hp1K6fbq_M0Akc^Uj}i@pQ`Hiv1%41To@vDC!D7&XCYp8 z`VeI8yJ4oW3cNq33q*f67#DC8O4T4Cw)JHkNF-6N?L|LjPypo)Kn#g6FiP0JhyiE^cz)L@k>8fl-opm5cjs zI@BwmmbjH%Ab=j{GeN9Psh~zY$$3S}2dShDJ=)D^RNAf@@pmBgV>36(Kdusg zo*LZ@!)=+Aj%+}do8oJ1nLQzZ;yc;CzY`omxp_0mc^};;fx~oF=6#eQd#=k1TRy|@ z_Gd^{f~ZURG?3D?NzbUNK$T)C!6HS@_dR|O1doGi=ht!v>P&y{n-^XBYR(mNOBO6g zq3R8kSJ&w#LRsbm1y%PIVGhw*@>(A*sB0jKkyIdT<*FFB=yZ%arD9^xaUJe%HQ>XG zLoEY*1s1ON$A+JWb@i3qV$VZZjjDSW@|=1KqTs@MnB!C_Q%@RmdJi}9r#Nr(91N2% zJw_*pGKOvSUH_a>3HD6=PyuktB_DBDP&I#x>!MudsKQlZHQ~_1ZxO4q>4|!`jxst% zlbGw0WIvstLlP2ihT|P6U?TT&I@2KPdQi3|VDZ6x?+cSa{9v3tJR!U$9r>|RN=NI6 zE6c(ege9kPK$G7P5GLAx6;$hv_**mm@ns8rMz?4`BJ;@{_Vza^@B$H$Z&Cpp;5p=q zk^Rxo8GmOSVZ%yw;NhtS6{OW3{14Nyo;1!c-e?t^IsmC9sznKgtS3e6AISST5eQ;} zgw_4%SKleS4Lc<*faB>uQvE`2MNblPg{gt~h2j;fLOPp=Q-O&X`&?thUdbD_#jVXM zsS^!*d6_>P;ur<0Dili3m)-~}Km74peP0HMkKlg#`}E(km<|d1A4XpV z;DucDdhpgs;}c}GHrN)Ex~@3FkHJVV6K*mwK|pL2KPrO$PSIr-3YP)&Y`$BrCFk3c z%W*zNu(?M*&b#HH5o2J(AAQ9CnoA%2nk-&?Daq;nvs#SrJdriLgjQz;hvCN+Q!hnw1iX!O?9G_Qe!S)s=(S`tYh zTGSXj(Y=6w&ZP#Ueb;<_ySav|ZL&+Z-noB(^T4 z>L%A{-ku#9IjE8Lz~9CUhs26EKgfc2K9xl>*^Y*&1_RNn5|`>D3k!dtK$coGafY zBJ2vTkC+xG+|c9H(_WFfH3{-TFy?wFiCc~JpJA}Em7Iwt4+ad+f`UAXSb2YF;EPf8 zUwyw1-OBps{&{h6swVN|@e^}}UtaM|Js0+`DG~Sz-ReZ4_~CPBn|7j8DTe-V0<&k# zsP#rNrR@PvwC;9!oT`T0x9*IA64;=LsYOoy{6R=$FKG1zv; zaKjGO!OWaJBBvAH$DIPRPuCylShdH~L)L+lIMFlz`@KQP{QQ`!wNNsxe>srEB5`!w zV7Y_1lcR!+iBVl)R_xW7pVZaElzTab*WmN@1uYrV{fH{oW8TS{F7GcHbS9|^cyH+U zUE9G!Wce&v<~0^#xg&FA;B}UR#gaV#*KJ%NWTmQAhysTR^z~XBvmj zoyC!Ht0L{!T}h&o0{mhE5ZYT!R})uJ>7Q?S_SW^jdz$UXWgG9vi24a(=t@HK39DB|@f)^(yu4+x}fDLXMO@F)EyY*gp+)U;0OQ`tdyM4FD zPnI%QFBUWY`=5TYvspRx=IO6R`+X(6xWph)?N{#7hz>!J53MO&-J{N8<_V^U5VOrH z_UZ+wLsK_^9&A=Z<$r5CgvPJRW<~dmVlW@(Pe2??Zj)A|!HTBQq;At-)|_L!O~jiA zh%Gkoqu!MIkb5@Eu+)T_pyN*=7~l2z;eoJpSSsIHMh>7-W>+YpYy$$64^kO5kH-cL zJkNuV)}SB!FPWBztE0l2XmqtV#gXbGIB>ub!CRS{nSU#pg93aphQ6Ll@-bJ$+gOKR zJwjz#yd|HqE??f>io!u+3g2(!%bK%yYcS%2TX;oeFb>qbq22vSa4&zv2;9w|F`01~J8h~EXvq21*y?y-@E z25qH3?D{*&{kPOW8zY`O#1&(lh%c7a$`wQKg1MFePCM;$IKAtGK2n(OlV_ojx5p_D z2~RlpV6%F4N--_vH$Z z73GL34OapKlhxOTGOc++VanNdeXUneQTdXu{An$=*h_5GLuYWb&6FM(blo|PKxpJg zroI9F0qcXGGXOoPVPglzrM1^(XY1|SW=yEXv%ONsiw84xD}}DfwaH3G9b5=YDY!~A z+BH!`R6+c7%){x9R$sNt3zoa?*=$%50FE84R84;y5c$i?o%FBz#6rA~tQ9_Z6CzM? z&Qj}S-LtEC{P3we@Ljm}EkCCIgtFy2EpVubYoIrvcTI6cDW5q zl6Ff;L{Y|p$C2rsFv!_%7{~U7mH9+tA!_TvU27`GzaD*6%b=iZ@${hrV1KquM)q+p z@HGIQpAFsnpzRG|F4b6u6r*;b!RhH_XaC6=f4(sYKVro7Ixa>o8nJ{s6HvOTqSg1u zW!QVmED8jhi)w*1Ee5U$uS5Xnk5(7TIa~+~{|*G&AQ4S$F3--w2c}apCH`(}c-2~# z`E?!YsmNsZ?Z3M~SeH&R;EyoU$^LCBlVXGOUPkMk-vCZ&o+=Y;_(3^S?3KFH&iqv- z$uw@I3Uv@qSa6DcCEw2KqZf#zDlY+f)?((oD+mW6Pwpx{1rmMc;e*;OdnNcU0EhWT z8tROO5Ik0mB3rVe4_dekL00_zMFOQMO^ygQe4#R^>cuJ6YqK9gHgyz^A*J)@9BR27 zD^>6T%F(Y8KaKA^w9MB(VxW}tFf$zzf*u(s22Ls}dQhQKQ=sK9+|lZAWP1|}Uk(#v zqv4?APVg8aO6!3dZLWTbvXRtqTl&C{dQ=qp38xK^;z@pwVh-PA}?b#Ek-Q*yU_$#B76c;4}2yb4|C~ z-;4;*2{|DHY9$KWFNqSuFz{|;w=~i6oILv#p#}9i72e%FrHQb*8B5Ze#EiXT`2ZZ~ zA7PU)MucZ++vs|Ky5dijK;?6KdhI7fPz5tH?+eXoH-Zy;CYvds5ieLz^D(HFUZ#U* zRY}%p^*}L>77dTSUmOn)qHFc(sR+cM!q;U$Q587J;qND{cbp<`5`3C$|@{dd3K z0{#(U4~xNgtxs>klV41d`BLu?u`!2X|B!bBJcpf5iZs+Fj~@4WEAZk%4(zJ}V2nGt zGhlZzC$4fgTg0#A%>rE(MHpCRpk(6;t6bh!>=;IoGD@tZA2dY0mL8t?% zDAavk8gT*iykD*e#RtV`lk2s($4#mD$#MsB(6gdq>j+Nk=A>0*f``aNU^<;ip#TvdZ33QkN!yA3t3w#m9z_uynBQEN}D8v9Q<$+a1Qu$^g`#Jqe6pugh7 zd3s7LX+DY+G|8&kLJZW+nBR4wH?97@HC8Cw{xu0x3s?15X(DfH{(Qd*#8~(+{EXzq zTWM?!1y(&8GnpF#aJ<=sQ^p~@)W^~?)%76V71zc?<+3aN7Bt3Gxgd{IS&|}|7G-K% zTUt1#m2OV6qFDSb zIF&B^uIu=e&=h8lw23CW#3vH_lkm01V4yjkp9P!QRWB3$rgh`rmA%cx4btz#Y_I<& z|7#~E?fJOCS#>M-o~EU2K|~&Z0KOpCsu2Q14t?m2;D3l{GHJsu`+D;uAal^#lpv zAUn^5lS99K=)+LA5s4>_Bt_~o8}(fjskO{tZL){0wB{EERgPUE#t|VAfV6w4!bsu@ zv;RR03+6?XMZyp(YX$iF(IM&A`pK~U@gCwR_~cy{KY1WlA}fnp^Cdym$df%en2XF; zqR3PtOpe$_<crl5= zw@`W)x9y)KvDdD`?jYWifl;LQV9i^02Sng~wsW}|#|n2nVEgjES)E^`XdmoXOA{piQl#>9yiZGJh=GzhfR_7^|VPS!f&o>C`A2a87Tqh0( zJ;-GvZ;s`AH^PZaMfdd%;JqzPPJ#}P>liEjo90*ftJJ(r@ckXGF`(s{c*nkyb8T8l zY3SU<4DR_kJ)*^hucx|S(_`V*(}yY%y5JfhX=m(Jfm}wvsb!!$`vtiRBiv~gW5(Mp z2Ji*$4oaQQ?D5|RQ0hP;AN_s$~uki{FRN5~(SSuQX$ERp@m>UVKu3Z7XLPf%j z(YL!v!%dmWDXQ-}S_aGYcj^vR^aDI&xryFX7(4~(7w}WEq+4GHg49if_RFh#UX4P# zRr2>HIjK6w-T`_TPZjJ&e|`qsxg^y0a# zKxmnZV3O=gU&VoBbc^FZA&t_nysp88C2N%E+nVR(Or%SE~o6CS<6 z=uHD~E@K*obzpn4>$}6Ia^d@tH6}Qa!B!y82yl#ZIOjmo+M;e&{cZM>$BYM>{5-0kxu;wmhc7dPP5v=7;6-olq`mz&zFjGhv&7>eNVk$ZlFZRC! zL+@e(v?)w0Hy!{-$~yqFb;rJalWi*Oc?&tYS@om#CydpW$?@^h%m<{cU*}CjA@2J* zH^>@Qc*~Har^lZ4&)a5dpjTF!7_YtH@A`EB-2Y5-!{{SjFZz-L?V1kg4~cG^BtV=^1^@R zn}H-nqY&am(^cus3Rs@?OLf`9i@rwXd^YpOUMTl{^g=ZXp3&L7MX^Dms7+gw#wEpc zpaGn5sLCG<&Hgmym?lC}Pom~Xnfgl~R1)%8Ui4hXnUT0=lFY?8@QL`6nQ9>Cq@oXy zDsxfEWBj>eKho{zEf>_j>q{(O6(^8$AsEXwat`xCVIe^5na0zR+{vy*1zf`|?a|Jn zhl~jw8=;QWlKo&ClL@l3L+^homxBN5~PKlX^4Gf`A!Y>}i8D8Mxeq~P_#oE1_lkRs%cg2-cN$1T<<*c+16NM0KH0{rp$6b%u!Q^<2H)0b6_11JsC0_D464Y>~ z*d)Us;_vsWXmHdJPyF}M1s{W>j)0K(?|c35>;m@)Lm-rIHsEmi8jW#)wJy`H!Buex z8H3-D*+3VSXX0FXY`{v_$cFDJhd?~@oQq7GgpGK?ANDCWH4uJr01%(LD0;(1yc(T! z6Ybq2CMR~c~7{URK?DRZlnoECom;l0l*5cSA=P%iblb^xk-j zt~qb7mh(MgVZy`R_j>rH?~Ba!F!P2q`&rb3bvArY0<yE0@eu z+ahj>g*BN-2C8TR_^$;V%Gm$P_fqX}<pdJ0R_GWHUG<#`)(wtES^2z@mjNx-$yp<85jykgT%b?C-S43-Syz zk=`_+6FRH-#tb?ZPqapgHB)rfHnI+YL&yvbv?_YY7Gw4yYztfYJeTM8kf4sZ9IdW2 znx1%lR{7Xm5^mRUwIRPUWhx%rsnfM4UfW4QR9JrcYT5q&?|c7?Ml^24YXaqS$O+{H zomj7)j&8@Ei%(9cH=m~F&Ng5S-7N(tFZhSM@w|#pm76Td457@Z9oXZ-7Sn*fyOLz> zB_KWm)g04SdoW%8Jx(#khKK#x^~glY%Efo2y9bsk{2W0_aeW^p0wsquP{C$Tw4}Cn zkf9KwCc82M%I0c(DYe5a;*4C%IT%F<+_&Wazsh!H^U>*UI8F zC{3Ua3kNJn9}{I4SFq~1?@C6KtL=^7Dl~e+g7mH&8oRPKCbLyfUgra192g+ZrqZ=u zU`~2sFanJW5JqarknPTyXDZA->R}wH0$Gt=3C`qHc>aUX_kY)YSZMP>^d&&=t;l7l z>9|7O1Xbdazwb{8!u9@DJHwA0rWvh5r@V4-xhsYW6@z|#Imc773WIjPrJhQVxf_al zO1^Tlh3bO|8bioVC|#kc3YPV+Xu3<(7@)_UmNB7$RmD)JG%3GZg2=?LTwG{FfIeqLNAh&p7cSGVl1|v52(aQP}=jK+?jWZ z76kB-C`H`=8F@1xv?F{`_p?3z=5qi2mU4h8ZqFn$4PCC=yJ4t*K2aXl!GjnW8iHGC zGXXU$CzzBrss?ogZ|X7j`<@F3qyL#jF9IzP{uHv?xR5Km4t3EjS6tQa=^$#gr^(d# z6phu-(3RW%c#G{QOC>hvH|Zilm!Z#VOf}eQILdq;_8QEjC|Q z&qwdmwXx?}10MgL7#MH>e99K%ABFn+^GUUKglfKIdr263^2TNS==Q3k`Z37aJzoy* z*PuDNEf&h=TuPaIJ`&LXM2Cl2HmoSzH)^YId$9ocIIg}`!vmiNr7>3w!=BlfQ}S^t z3Py6qrmDDi)z^FVm$`JS-oo6*rB_!eFJ$%SdeQ%LII^RiWaBR6KHb$xDw;<$&lqHEK=!ZNaz2=DJfIQ~~lNm1esicVtzf%}GIHN1o zToV?IG)hmSsL^fIx=|d}ir3(JM}7YHz1y8)d9kNV-+QBiVubOdU?@a4I5?T&7QzK= z*c_|YkIIy>-d;miK4`9qf>koU{3qA_X68Z4^|hV@>UKOAa@;3Cmq{PC2&j&s>z<%Q zR{OSpL}a?a0&|q)R>o6EoO!}g4LVbfgW2?4l-jsbLcvH6kpVL&IFhDZMu`Fu3gvk- zRu90=YQ}t%aQ}cQ-)iJ6T}YXl3I@ZpI6K*}DO>p~NQ-R^JHj~3?kkk znbh@FQfukz&bns+&S?`Z0+~rL83yx&nJ(hX)aG+L2Goe7EXC4EZ2!0o{4!9FA@#eG z-Mqus_C5wPfgPO2W)Sk~J;V&9C2l+8O#%Q%3?js2ZxKNq>odC*B-5s1$KSiVxflsN zzPCy}7-?e{9a)nyxw3)Uh%A@F5C^0rM;cA+WUAv04|ZW|Wh#;GyS@$${_WllCxp(& z`?2<-rR5~pa1Wd0uWV;x&QLGw#>vI;SyU1tvBcZ^pcOiCO-U+3W(L=AE8Qsf-D8cY zW9gb0fKSLpuh?YsZYmnVz#l}&kJz#hwW+1MM1_;G+cmhJiC~9-d7UroXV8sQF>qF! zaV8{r4ok`T&)PBfJ6#u#$c*oEZwrW18!h#IB8>j0eoXsgy4xFVu~Uwryy?!{8S-Y} zGz!TbZ=B4g$1{l0PS|_L@q7yg$!vaBXRU)^2MJacPXWO1|G+zAfIe_492Ov7Prnip zWoaESXp*p;57t~K1CbjgI52eX|6!7aZG}7+LwpRHd0pOky-Tz4|4@b7YhrYSK@wR9T?NDjG6VjM!Bx(9Dq>f9YX=npVYq+W+14m_FSxxktcAtOL9QN{vo%`_>U^pN7bqj*45ttboh1 zESV1B;w35uM`T%afX$yGwmKt`{8fU(MPJuu46TlC;0@#N{ojo0jU{g~wbsecrFe}2 ze0*!~(SEeQM287^*IiSHo||RfEA6nP5(0f4!c)|}^Tl3ZR;I=Dd_`0G8xEeqfE3|S zoi-Bi)5Y#_Z_EvcxpM;IP&?l)ny8idB>dIuLs(Ok6sKXkb6x(NVU41mE*t`^1*VDs zZi>$?;pdJBkCKvQCy<;8*fs;Y{q;3*9>d~Vwl_^=xW>o9FWXKmwV-m_x+LiF};Juwj;{xU)-bzDn-q+mko!owjfq4p=Lzq zDb+u*!||9506qYD|1(P#q}8v1OFvu2R!{}OIEI_RTDNfwWY=wSY*|vebG5#@7wJ-c z-?VBWX^z<&K7Y?On^aQRIe+$+NfRa*hj2_+}U^J=(zP>9xKJc_3FF6k9 z=-?-V^6jN4dQN|9?6~t=8fU$ILBeZ)j5L8xzLb%$y8n|H0wR}ZuV)Md;JCoL3Z*Nr z1wTaZx#{AFWF?So`L{CYlXO@O9ADBAB(uQn-i(FZvv*YD&@xM3w)}SS^lnQ`oR{&} zl6VfK7#RiNn2_6v7@^4593Sk{g9r0({A=gC>I5Oqf78Dei{xnB)=v>PhLwqDE3xS* z6W2AJyiW~gC%NBWKxS1a96s)@x(E16qN@5Xqk!>Cqwem-(r_g2%57Y{Kz-9$wy4J! zIQy;ps-Qs>zG)93J9&@;DmzhXpU==^_8p|2q!&|=D6bE+7{CXTSxDp0T7~5E7_h9I z=C4k*KYcjGlK(jQyQyu1LpUcc2?bqV+6K_UJ4oXv4=FrQlf)FD;fsf>Ofc&KeAu=Wf~s@XJKhzepzFK4S)E@q%GD5^?DDn`pjyFpZy9yM z+&iPJTW19=^1aBgV$mC@xk78RvFRpJWVZj6f)2iH zP2q{$GfET1_%cIQadR%+=Sp>s6CAZuvuMqWl;nz}KtsG*6BdIL)uo|veb39qb)WI{ zlXe%(Ck-`*QkxDr7D$3x@bQ)8Bdw6C#U!-zyaxPO0KvD%|bVlS17i5XQdbwBa}z z^>0BTzxC~es)a7tOUfz3ie{>U%urUyr0AfkH}-AWXXl|V;E)Rg`1s9p?n-2CNrdD1 z@mt-8J3CfO!te(IWlUxVotXIi%e0ihfs9W{4L_Lub)$jj7+Q=EZ=Jr{;r>=-*YbIi;^>yBE=LH}qo2iehU!vo; zH3??Hd64PYTITPP&^HPJ+Je7$w1C{KSU4WD4s~47KmodNcsNPxXNzVC#)PYQ_J9^B|mk_ieGE zg%;$9mB{Iu`CJtJ#B^O7PP{pNF&(#y=@vZ#1AopRzyUIiNJedaquR1S_Ji0A3U^q- z-bY9)F1fikI*K0z@X>&*)5{$*%GC~1RN48rM$;2C*r7d7t54OD@Ax?qnMM{BNwr5Q zDfUUd%m@;TRIH2098>8=d+x_4YvLF3aaIMu(X~dsnf5yn&oPr?du;Z)6_(ypt-pW0 zKW2BZ)A9DX;`)q=r}(Yx@$br%q3#++ROdhn3x=WsN}*%e{;34isT8m;Qh5n6&)ID6 z9nN%)Fen+M4KeSZ+xH2corSFV z_-@QiQ-LC&O(M62WjfU6_)8@(F5!Jaxh&+YlrYxF?hgQ;zm6cA$R(=7*ESO}glB~Za>)}0 z1>>fCJp|JzbhroCQ=S2F8EB^v`}MP^xesvXPL^`MX#0suZ^){lGm z!x56q>|0U4V4_s*Eji73NsWQ&4w-nLf6~c4!~g9?ltwYWSNEBS#X5Occ5iMWiY&4< z>LzJo8iMB#1@M7?Q_>lthur6}m|*E!wa%&a!%O4eblG={^^Le{S4NrZ`j=KaoB+QjP%DgApNdFLjX zP#Dm?^Oc5sHWzryiR-yK7c)(?8R3E~OjadkH)CK~LHEYas39D=z`ixv)0XSxuxB{C zk_6ZXK#r`FH6JhVzMEW9s_hZYmL~qye5rS>{t~g^ z7lA@auU4^^XxzKOC>wEibc2$+(b9s|pVQ%*D4ySaKrG4PCz0!eeAf4wCPW}crRC+( zK}`RZt92n>8~!?6)K)E;DKk0Q?pXWr!e+D)6X-VYyU@U8$zuST)|Cph9zX-&RiqA)9o#?{nT!_4PbfHv(DSESRvFm9%#u7IP;(aLkMjC@q*t2V>>t4x95t`r zE9Q1u7PAxV4n6>mYRa2W6Csv0Fp3#H``8D!2aA}|zn^p1e$`$4R(!?h`@1Yy1ei}; zS`Ven2sRBN{q>nElPNh>Mt=ltjuUvXKLB5yO2@`4@ zY3ac-Jje~+lf?R|_>!=Sa8HJc$sG1u>C4#wni_$>N;TJ38q@a+>z{hwdywtck*39# zOC|<6K-U`jo_{E+2g`A_56{h#tMj1IxKoN(bbA&o8R13?!#KA8;foP_TpPchdx_X# zsu+QYfhYLgPd8o?S}u>VE9&q1me=U`%DUV~r2Zog;m-gyhi4#5)U40^*1Oy}W(F7E|MpM>y%ek}_F$V+j223s9G0=V8fis?no?f4>MCrbK++&bdCX zqwVvGprg=?tK#g=hrEM5SjQR@)*Czn7om}?>HYqGN)OF2t!{yk zuPsg8LT}`ifVmPs4sK#r%qC_JY9vv~vHx;X)DH#j-Nj6ky!3i_NL#!a^bz0-}8jsa<|2hKm@iW*Xddu7FzCRbQEbAl;1(8vU`%)EiCQZ!oeXlcxyu!(%0eo)i3O&qb&Y4L&yzl4M zfPWdkQRkD2u3-t6T|DSf<*Skr|KWG7f^eDq`yQjtAun6cU8|!6D(+MaBK)=p9=P}i z5I^J*DmGAmuGYfR4XWODRZ`1kQKdz52J#W9Jky*@T)J)_`Y>Y#2Ear5b@_;GOerB= z6xW(6XY4}2uQR6$Gfi#)eC)6ro%umh&6W@%lq(PPFP>mxbRjvXQEOFqA=P||Kls84{{Q)qqJU;`^jKz@QvRpx(8{bRl zGrtr6n6~$NJ|f2uI~>FwWh^TNLr-c#tuX!aAv$B5N-?!Ev?A+xJ%xAOcV9MbT+scpErFI14V{hG$YV|pL9nq zNcf{e0;=lMVzrQmBCTN_b9z;sjbFJPfP-ryV^7x-fs{-P{bpDE-*mzBB_dQ9!KAu+ zve9Xw~@e-YeCiOz;8_E4588DIZ6r4AYCZzId{_dK{1w@lw=;c7w{y~Kaq zE$Q$2mk$eKXc10U5PQTlT>-#(I4GVp9JcjVohW>5IPFDxs!xhFK%9pml3mLheb6UY zh3$k!M2;+Dq(icyfWoP{DXjKg-vXfZ zKeLp3=Pyl8GK-7|4To|hv{*!v4lAsWe^qTBdExy%M45;VCb)uCWtWyanZGc6lQd$2 zq^`3(!FG-&I+UwwBNj0P#80cYWB4iR2oeo>>vYM(?yAp0=kil;Ff{$4iz*L@xC>UJ z5cWwX79mU^c|-cpA8eNV4ow0Bo!wAsXsOWxJ;(2UhucvgHx)`P{xNQ|gQy}sHMKL*lGaJq0v*FOv2@nuL9wdqgB3WnUp!W@c zNsta@q)(y>`99aJBZ#nubFI;}`bl0rCxZJYYr3tt?r9!8@O2?4?(~VNB8n*y=%}*1 z(k)6|_3GUBO(m>hH@9dpANiN#Vp52*1MGvP4zq)|JAf&48vR-OVDB|Q7*7|2cj}b7 zJssc)C6@hWJOwd3vse@1_^N^4ytN9u7+L}HXXaM_*de!(q}(h9fCEf&0Xvu0^V{T~ zf0K5u!Vg$K#+sQgmN%Mg+04Wj4IQQ0|HjCR7_^-lqlvsP;-(-c&2MA_uMrw|ZBN8e zJ^b1P*r!73K92eCKapQmxJ(p94+j!hmUG5G^^>}weL|J2ALH~y&;?vfzt;5RTlgju zHHiC~aLZ-n!0M*dpfrJcykuJdd@h@F`_@all=s4xcS0vcJs7mr<-B4sFdM$K(P@WK zQR0|>8v2G7P+$S+G%gu}+fTOcx|~Rb`tV&`Wg6$uvP=VT=4Z1XS}+`7Qnt^lp7-!d zw9)(!qyCO?Lt?W##O9v}cv&HJOMd()iV08jhYv%P-6!`=elW39rJ;DnmnN7YYy;q= zXDAF6nICg#PYqx+?L64TIdpmN)#foHD(odF7lu4RYs?XP1#IInE>AgttXNTOEB#rZ zmpbq?X7=}6LEG_f0N6)KprDeqD{seU_fgD>J)%R2C44ZBY_6WL>1E$M2zeG|bM_H- zpK>44kEXva%bV9$vi_UF)1cQW+Wym= z(SOm(7ET6$!+%QSHJ4M9Elo0ymcKn3`TMMw;F=5Z*yDq~k_^Zv2HHo57wbhlJES^A4d02R+@5l3(DH2Oaa@v_nb_dxFaQeC$03!7~uV((&oCiX2tSRGys2@X~y!b4T`ho>U#Wg z2R(WT9nrI$vv8WMp3YdZdfH6UlG0iwZ?~oY&1G*LTqm1jRs{P5t)`6Qq^4rL(M-D< z3DCt$K|VjfFog`{QEFkpO5#4Avi?PHP%?NR^Un!;A%zefnWB|DeW91V5u2=0O%q{{ zr$h!}5O#j!hPB&ifU*DkJpuejx#k|MlkVT_1Z2NaA}V9Gs`GWxjslB>H^q4<+gP>( zh=7X_->thZ;lG{4`%NE|LR-MNkPbPfyiXQI7VY2f04=Hb_bP|vZ!;{wyKmvc4p6Sq z4m|!u+y|u)@{1*4C;VM~R0en8!lOCuWS`;AiK{ynK{RYYQ3OUbB!%9#_ zs9hE5J3+VDc0N78A{(ky>+_+2L)?Fy|4ZDkY5wvu9}u}AAci{w_^Ud}m~}5}zrXWF zBAQZKpqD8r_7jD5G`_JUEh6USfV7m&2S|w9SRNB)wQcqA<-bu6_7h*frq*gf_h=0k zCZ)^)d=`e}rRw8IN^C)o%(w6Q|4j?Kr|yo!O8*?R-z!Pe83nIU>o{}v9gS)h-*=E$ zl3&Tw`Pzos?u&aqh^^TuIl0^nGW zRZn~ki?h!k%kDbRQ<^j5ua?TJn$u!1?3_=M+fRT(v6-`mefkV`e$!?(*y8cs3bnSe#ryFw$@D4>67C z<_nc&XfAX@57@gSSicg~?as&M+aO+qSnBt^z!S%I%azEQn)SYoJP_7_8I{?I0DGar z@2+lH4y83^Z#h4NCBsnqVtaW5A*OMWNW3)1IMi=f+M9o-f{ke%{JWkXKgaCLpi=u$|S2Sx3$e(wId68&@Om4mT7yJ{)P~1jSTUXS-u3iHl&7h zH?{egoA?h(Ek5Of0$jMwokKr!Uq%rfzxz0FT_z{-IQgSYUrx|qC$#B=(2S|Uz%q#% z`je^0&AjF;xq9tx?A^IxqZHPQbyxFv&cJMZRXaXn9t77*T-%r`0r4EKha6)4Q}EF& zngp?eg2A`EggTzq0+Zkke40N{uH-DW`Joa26eBCs#p85wJ>046_m009VnpcJhqRIn zN`1f%Ko^dKq$-?+lRgDHRTSh~lhZEx{6 z>6O4OYqLHSJ#`}dg}co4Nz?E1;qX3~(tHLOD>&b;sHmQPJ3D!iJ*T28lVM6`}iF%lkNZ8h=>PAPl@oJs&6##x5yY=!R1sE>gA18n@M zs=%!L#!e0Bjz>FVV%Ff7*t!R}vqKo<1v)Mw#U%7{HIE_d@YDf|A)}9cPA$JFX#h^? z)-cs|>!4PM<4>5jWX72cH;^EWJ~_}7%grPxSRP{S(yc~+SZC145+9=(ezH&H9Nafd zxoM2Gl>fedjBmB_UAGh6;@KKKGXUA%&V_-Ng+V{OK;CPuJvF(X(!%4H)43w)zf9Ng zycuVIY2Ax^_vjF0n-^h&9wm;VfH`#$(fsEWz=w4)QWrbmKACsuv2T2~F)$w|o^%u0 z#}kXp21PO$$WwIPDArP5hxR}&ATNC4k@6?FpBr5tI!sxB)DuP>n!s+Q>9ZjCV(C2^xnOD~o0c?CGH+Wk~A>h6;YQuXm% zN~&J!ct=Q1B!fQl)Eim*X++@)(T0sGX=Yl!9pS1o{NevxHET+DbrJJdd6WzG?~T6m z4&|0bl!F?1=-VqqqiO#*3UDYV7fda@sg+}=!g=N|$6a+fk&K$_r8TWZNJ)(>#}5A~ z$I6v1iXyb>7!vQ0&wk%8T;LAagXdy2;tJ)-Ld1VXvh^#VhaN|vGm|W>P~+K%qasBQ z#AA9~yRwk?2Q$Qbg;rJ7Dm9nQAf!(spv00z9s~R}EJasG!^7dDx2-@Ro~_lMYUWCP z+s@;DmKr+}mDI{4s?eAJQ4dl`-ZtS3kvEGq4AB+h; z8bf{B#?bY*za)7j?}~o(< zlpv(~T+~lS)i25W+VbsfuJv+xSC#o6A(=0!IaWq#g|k{deLw)5od*9)X!I3aEl8JN z?|->^NPH4@MJL@qT8>Vs)}HV6F6IZ4dN|InAK^Wnc#FwFXi@|2+h4cxN0$z08;~9M z+yFRf4fXF28fZzueGK(#yrHOkdpuAnHsjgvu3_ky^;U~T!%lHGe4_ouZXPyg#*1Mh zL*jKkGU1IwBK+r!aG_(!0Do;by2a>r9R4=2)+Z*ySwrwW1!Mkg9D)uv(`~8Ar!@1^ zGoo{Hm5YtfyFV*oc&1(pR64w&48>5=4${O z7Ysg*$pgXXau^)9?7|7fW(7}wVU}&v7x#4E<%@M*QYv+|3Daxl45f;e4!;k;2Q-!a z^1+oEu5%INEEKy{fjXYFRod&XhyL1duulUrkp=1ptmBB-#hB|H3uu|GHfm9Z^Y44EjC=_z3?_o0@?qsP#v}a;aoT1D)xut!%+Ks#qklPVj&fJ_wXiMc z!kzfCQl>?Ftgg%AH*?ZbFI_K+$6VtZzuXeV`2|d;Kd*ZafdViT~8p!D+YDl<;zQ7$IDY zpr`)bU$Cyzo}j7LVBmi+=3w|Wls(9!OPj2_rLFoG%WhZV!MSYBQJ&Xsuo33`4QB?T zN~-(d^%H7|m61v4xYNJ3$@}{|;M-|7ebg5n$UsDqJ8BhZDi!MH$Wj~^_s?Mf~A{ov}MZP;PV!d?TX$NiJ zdGi|EUiX5tE0^C>B>k8RXalPS9!Eg*6iJD#Y0`9VL1pclU{n1G{*Lyh4)UZ#uZGMn5Oigz- zg7?8WhG?QH&qc(rrGTOn(a(U+GlG>m?}jh!&^HIE98~IpQ0ORkF)~a~`;CM0xPvjx zdJW8%-+a}tG|{0*mn**C%Rb>ykKRg(f<9+p{KbvN#8fk|XxD5Zzu){6(TYS8*RaSK zajOBowx)jt`2C6CEu}E}o8aJrBZYk1Mor1ZfUDZ*pc=__Z7>2#a~Ppr$*uz>sb}~D zNj;p`1C(Vi(@xbg6tBXyW8Rq8VbJI|00)LsWJ(~4Pm1&e8Qt4#mU4;#`KXvY;ih-9 z4=d>3eD&`d8pB>MqTZK%#OL784-&9`&wu`NKb;LrWj(;L-)DaT0uWlY-hVFD{Y3C% z+Q^Z-CJZzf+~mJ(6Ur!(04RTA)-_gcYMrb*agZj}MgY>sMlxYpx22=6OK>_0Y970T zI_L&~kECA3B2hsVy1RA^CQ~=E-O-j31w7U*@MFob)&hg!@|HNq7Vs#)9vGCb^DA}X0)rhYXk~sj*kK3KJ4(_}Am}4BokR-k9T-p6c|71HOY4GNN8jig^ zw7j<8=x*VjDJayNX(%j{j~y#S>(b3HU})ydKUk)(j{v$5IYVxcl1k1FTBF1eo)VHm zhT`zSoh+;`PoD^qhqH06g>-AKL;MDy<$;Gh{5h`99g&q?rDqc`QGT#=ydmQv1Nh_) z^AuT6L_Uo-CML2gEVmbs)xcst3N*whik2XO1vm$Uix0AdDAF60Y4 z1ym@_l>Ot5GP&_PZ(hhi4}OZnH=yBOOJh3GJq`DvoQGGG$)w#@WpAne%7*iLJ5tE5+)<9r)!=l&e+ zgFxpi1-{n2=FCZ}sZKE^WWlQMyaTe!Ch)aCd|i)FnS>zH-|YJC%Q7K<3l38_b`lBx zz5O$|FQzE@ltfJl9T}8}pUHRC%C7oamDbTriL`vUC-7+iUB4G$8O<{yO^yGg+`AeZ z32}R}4XkCp$J|TaOfKY|Uk&br)7FC1+j}EFuS*qG!`&D4a+i9Lt!x^{UMfG|DzBlnpav?b=W>HQBhfz*-|s0367)pa`Xt6XeQQwVWafC(3t z*$T#hXfBBXe17f_lHYF3#n)Y;k}w-(Np}sgtw)a7loY}DF-l#J4zH}1NtEIQ8>bX4@l4$XM+Ae#$lqhN@9y^k0U7{2zJ4VHijO6D##_%Mv_vps;Gnl|{D! zJ~wiyIuoaiRB9IgyD!+8k2@CkAc~Pb2I7zmocY;d-HfOx_wGskx!5uyg2xGoFmB`f}H_j2-~G7M*HN z=bQIf@XjqAmo| z|6LJ4b0P_NZUOkjm&^saa_2GSr}O`Tqq!5ZM#a&!^x$7xA`*p4lvVD6!ZxQ6j8jsP zNym7#9A5U;J%E-7K5+*t_PC_vv~3c~0q_C9{m(3fAztN42KTh^Z~Gpc`JEf72G}`X zNvN&(hYlhcF#97{nuj-p{DM&%wYG)>7PGzy*OZ!FyliD6h*M=tcVK80-?|2C`3kdk zx7DSuB^@2XHr*}?TeA|&IhCI!)kD?kfB8L9Pzo>y`nRk8dQ9S&7P?qM+fqgl%xtU8 zlFj`P?}-9fBt+aL{7XYXoo>q5Oyc%##X-h<3X+GV`^RN!JPvZ5g_e^gY;R~3oFc)Z z*s@h2sfNR9MEb3W`AncvI5%ZE!S`IOx9(i5q%GD?!wR53&+wc74W|huY~;)W6d{}}owxKJg$tTk5U&@2$9hsKoG z_j{&rR5-GtWP{Lg8s$_b@qnCh!AbQ)Vvi^><2crfcnT%>j;2E?gVR#0oeBsXjBoIt z#9M82*=pN|!Vw)Vy^R5BfSrAE_TZU*)EyzTDck+Vo-?&Pp0!-F^Ei9}`#BybfKj=} zg@hCp53QZ`uMpXd1u7h;NL=;J2%og1ee-xW>Xa)KfOGt4#EEDyrg{_xKWBk~h)`9z zin^3dGM#-9F>px}IX`8l7ldM?4b;gB+9L zpHxG#gl)HUZr^#C5(Z-<;PNH8{1uok-!~Pn!=K#!XJxUABdgDAT*N$kulY?RD}j|m ze;0yhm&eV3{$*XaO_;E+%*r1G;2kAi|-gEdm^ z$QxstORDn*_1e(|IR3TJ8`hy(mdVgMrLnhP5SubEk@}>zv*z8FSVV3>d@CCLIlBpj z)HSf*=P`qE<5iX2!N<}+5kTl1{UAB%-0U#IY{A4BP?Y{#R-g|2RgoMb7i$Gn5G?W^ zb2G^Ol#SVoS5uF4y$1nX$jsmMQL2F&a*{I-SK>KG>?3rpG;x=$<)oq7ZfZUfVG4}A7QC#%Y-UaR;qI9dkq`@5eY&$xiK7xN`AME=Yp z_&v|feZYvP^av-K*B2pH(u^IFx|IB@1R>GWs~-EGGVI|p|VlEYk0CWqAC7y)*^&I*UW^Qy%XN2&`L_${+R_PXyJ9;|ckR|CqF#GYtR+7E-O z|I3`*rHYVK{d2-uP5u_9kV!Xy^S`ex^dSlGe(U1;j14@c-gky8S$5!^3s&!5W^8o8 zY-6pbni2{!IyI-64#*25Hn37-;U$`*(TT(ZXRbcaRL$#fC}M2m)SvsF7e}iO*?x_& z23a(MwzWR^(hMt{*(~4@el&9Bqwd)Ei6#?cXv;cpwwwl@@|)Z5$iX_daP&;5;U?!DaVKod%hvt~_7R18>vr8|S+rVvnh3ZNma_w!&eTUbRZrbICEjiQUJ}13E@EB4@jbY!3mD~NqWH4=_4WXg$ zIuHW)R9!?{=$+FTk`xD)KJbju(eVngU-e8Y<;~+Pbdgo*m1Qa#>TJYaLwx6o}TciK$yAk^V5M&Wt? z9GI(RB=EP+w7e>}B%u;33zhCy_{{lTk4yVu6;*ZXNQ&rJJj7mh>uMg){#cRlviX&ZQdGVN#ziq) z(y&kOp-lBm?hC*vR%jvzOR1!oJPR)F`nzbB_@ZWiLO7BKJiqkWVrjTeYm~hC*X~K# zGAb10#*k=g5{2vQH&Moq(4K)a8c6yLOn~2A1>?3XNYEa;ymr6?3cp;k(?+`V2w73_ zW3kALz=ok_bU8}RD05B5lCyd2lRmW#(~gK+DK6=zkr?$A!w znnq0|PvsnFa*#&aSpm#+`g{h^1)%jmvz)bg8GMAifruR>oPt$y=55ma@e!g=hmTT= zk$B-Et5~rcA)t#I?x$wtp-&HH=TLQFR$QR^L*C}au{U0-;{9C@IVCQszbXAMS)T;i z>7-;XRNNc~dZqkkyzGh|$(1cS6R(uku)H8=Z(#=?)!&w~$U&Fb0CP6JJ*aFSk{_W+ z3j5n%_{UgAkWAy0))8RhmUF;aOT9kc_~9fkWA!rQ zA%C%m)qIXGBSh^yLuWRYEES=Su^ODUDGzZ@Fh%D8IE%y&ZnPzISF%{Dv8~Cnf^nGq z3rxXJesk9UrQu&JE;&+f#9AKNmZ`OWbw1q~hKK$JMf~$;bmt*)u{^Ph4 zO7u|QQ7{o3v+f0C7joUI@Tf41y)P0K1rUb{#W-V;2s+c#vLSjbZm)3mRQ+Zd*j->! zdth@fi2Ra`WCLJ-rk*nsA$5EB*L3#-0zL2U^|Yc!ACBLkT1Z^K&l`=Q45uQ7T4-St z=}fl;WBbfPKtCt1#GxG$)%0Ng-j^t+&S$>eKvLVt7}xi4na{OAOMtS#8#JtpnguKv zpTdX&_#j*oFBbmT;k%4JApuI;(LJ*ESN3IhAnq%Y(D^zY^+`IH{KIok7$As6&#`30 zHU5XYu#*RNy&p(nIo%`Y_Urqc0QFmubn;HKSp&T-Lb3X3;d9mLpws&9mvn_;WEd$X zr1BPdyu2X)BhuLTm;f(0c-=PK>5XHhTBPDpfSKbWIXQrjl6Wb0h~kl6+UN^z&wAcp zWr48pkr-g%QWX(_r@|KMy33Bh-(iBdN#auBPrZTUD5nF6g~440&W?{l46 zKnae_xYC4sMuqLbcAwg5to~ayHjq3A^wNy&bJ)`QFgK&}xXhb4j0Qs`q-{gSZK+ljy0Yr=E%Yq zVUFkc3#P0Vyq4=BSjpWF!(bN|?hH`pm?aNWvbE{x7m}-1R~ts~CQJZ6Pnku%4K48S z(>T4;;;>p&zS^0!IxQSZ+<|WSa^Y5nQh{|e&=knw88b&#K=Hks?JsD+!=mio1k}PO zgi!QVNC2GC*~LDOEzQcp3lj>VG^=AX|`wVb23>C$Y-$Q>4! zJN!5T{fO-j_8Abb18D0H*VnjSz`l@TFuUg06sLUX$Vg!lt0#n+=VNEy>C!jq!AhcM6l$`|;1pFS4;&f=zf^=gszh_s-a zboJTMg7Me|-fW<9MOZZ32y+rqlchN$tKEk-c$quA9r9!3fm9IQ_iZJp;{y_{*!a$) zNrS{x#p2A$j-ZU02Kh03DhFu%u=SMo z;Agr-V3=XvS5d5B5XcYuuD3xtyU-o}sjRGP>t+v@A3QS{L#s%N zN4s8bpx8tt)?Di7m+7h&vayo-5U`7`V*p=$Qr3KY9_Zb*s$mT>v}@-N;FIL~yPg{3 zxTES*8_&5n&?2@ygq1SPYZ1=oln?>w7zDwLN|9V4mhP-NvqQeOz(lY*_TMvH`=A$o zLK;MAXv%lpGFMWlT=R)*QOSmXp<+xG*@P&r#N<+A?9Q^q<{fiMMQv z`!1|#DCqDj^l1h&yaPK--n~j3pG&|Kn))c>0D^K1aO7%!BE3PTxTB;|Vr$;72qR(fPBx2Q zesAroZ8^VhB(X0nj&fZ2?suSZS~kk82QlV<=8vHAQUUL_qn_wJopLgMgU67^^#Nmo z7l=VUxi%t&KTGjNAm^8Us3cpu*8%`*NdwGiujQstcus)Sb`4IL{9FObgxLZe9@^SfFcjDb8b_3 zts9a7dQYu0J#5|#XG4H`b}|Sm~rQ9vAWcJH$H=HO(1z#LN2w3xT<5JADiq; zjFESDY6IRw;jc za5duzbZF5nRmjet2O944eISX5Kx&y=SYEWpmew0rj+*SmSrrOy?JS?f^5|ps^+aY)~SS((UI24SdA@1?PV6)K&G)gjb}Y&AwA;a zfs{F$L@A{kNA3^`gl*@_+Zdg75?1`)?*e0y5TInBTOc4GXn#IJ#JE0XM#7kjx|Dtu zlvn=MK!ENq=oJgin~c{r{eVn1W<`x_q*R}DheOQSH2uLGB4-1zPlgE2>Fp*Sl{IHK z=OX$>l}3^Ny$I>u9U);3)>+W!ODCl^tn`Kk+ ztM7dQ_TX&R2S&4c>a!KvPM^`FVw9h!Le)~&`1$mcqce>PDV|s45FCqXdl92)Eb3B+ zzv;k1c&&QG@?b)erPfaB+s=6I&d`c89d@{8@)C}#?k3o0wush=Fuq_9XE0L=(@$-T zaF{lbdsE_J|Hs@lJ=Xm^?Iewz#dwr#tyZQD*`+eu?Jww=bd*(C4x;+*$y_+9PY zb7r2I&z_wf>@zooWC%JriT@s?pYuMkgx52fCtM&-sgbA>V1Q0h>W%+CxAdca*|mMa@H)}N5q5HXlH?P`PLF5tolH^Pug0&&=Fhs z9ikL^Hms5YyAqfmd@k5=?ZSQy^aZt=TG88zoUyMMQwmu{`$?^11CIZ6A1TBwmhFc- zFW%caDmQY9bwrS4q9Bm)a)5NYyKXtof3HHA0{J%V3&*NjJ8a?3F zhPHg+lNj$)7Q!Ax_Ewx4-IPZ@Q;_VQ2g1U-^eUR+NWL_13JzZB47T@c;)X7xl&Cn( zpDMt2e0uKqLFbyuQ&Nq0cD)$oubNC;I}R7eOXGKLk64FH)NxrQ;I+74;&%y5tl5-Jid-x9BB) zm#hk^u-!~TZQS!L9@MS&OUf?Ya2GvsP9~2b@zNfPi+ash?EHR{J01KabhMmq1CP3 zv=|FXe*IG>Z*eOnK(7-2zr7Lsi}_mDRwWb-AMNep80Srw_WLwC^Ns!uC4U^QC|r?{ zHs*fs=Q$6H+bM&Bu#|=}zv-S#V`4f51tq8ga3bt-?fk7SjkI%5Oa~X$!Dsz;UsuNr z&c4&DPSkCW(C6GH#OS^XjyqB5a^%7Lv(4MQf;D;y&{52bpI8AW5(uEdSrEo zVgDH`u`azghN?+_u>BNIjWNWG{)jj6#=heXvlZ{2Mck;SWOjyGX=+F@s^M1?pXbW1 zmhV|V!423qo)!PiHP(`L!OLQ0aWtA}hY5NfA%2N;ae@+lGv;wLt@d3H?5=O-(|liM zx6ZVZqd96Oxs`*o?#Z_H$4mYWISRnh?V=T@Vo_9^%#i6qhXw;;rD5kn^6trhG-9X$xK`3SvA7(*<6zw*^uY z0B80)O`0u6jDdlk==?fCGl(jz<>_=bIc>Mmr!{_c_km77{$m%~3{#dX$gnf{vB)U0 z{R^ST8CXXB(vZDVM8=KUx_Lt4+#p<<;cRq)gJUg#Gf(nmX6CH^V)~ zZQHmnu&p}FoH^;(6R$PK!_Qroq6SPSsm%M-_FVuR&xK?nYI!oJ^o*Q~d<=N40Y$xo zT#+Bcu1$hj?#6051e~)XX`yRLk zZRmdmutm&!AytGla_Gqj?Z0_W<=QtzsO8w?GUnVy>oRF-diOty8S`;oR>7ODzF1}d z)!K-1LYzBU0`L>#kQw)Sm!PWpyzrr)=B#qpdlbpazEiV#S{modxT@7;P!WTczQMP^ z?-=@vr;EJp79Iv74h1f03TkAgLVt}4AkR%$#s%I`r?WQlm>Yd?qAkJ}oBJ216gQHR zV;&`+kp=5_qU^l-(TgOgJ1_)mVcK)A7gnVz@5Ga1;&w}6P-0--{BF7^F`C<&9lg90 zIrGw_mo=9~^pqPd!=7R6d%N$0_tBmA){i)9{@6+ccO@%drIDn)P(<1MfczaBZlP)# z@*IFX|7(lJW`_X(bAj;>`i=$*nclPG?t3>Ae|*O#0lcPQB}R;AjFpv7^C^Ti42uJg zPJFxPQr4F?W-67L*-zOCJZ#ehfW2zSJZ!m4I5u#(tQxMPQmpwwPoL_wo7649O+{6M$ ze5IGlsxwZ7Bu~7GHr4fUC%;GJJwG0GdX9qSx{9d{B@*gowL-8ni-nudt#%y=hZUHj ze$)*|WyBJ2PEM2CW+$A{??5n1ZuP&RKLvx8-7jqnTz6n?8&o7lm!Yi^6wDl2&Lw#{ zWUyL|;RUSR<(Pjbb}pBRN|12BXH_u*QFfP$bY<}D4aeTA zO6!ZvJ}W1C!vbgZ)Gcy?P|PYZU#3zU)QG7AbLT*md#k-m5sbNcAqWw0n3xt-9kqZ8 zj8kej45-+iV|W+|eTrTh%(8EojWh3UQ*6%_H14~q35Z-TblzUWjp=C4*jvT+?%{g7 zz~CD$(IaPeTW5Y7V_pH{%1-?(zo5KrS7^gf*FQl-#Qm!!Gc>kb_NUtu)^&Ky(0F;B z`8;aToaZqJU|9-3V`7b9(&wYh>1Oy&2)#wJP=Ne=s4;OI_cu}Xj@dwN8E8k7aT3dB zgx%kc|AIVz`u?(@6bC0HV*KO74%gEbLn%Z zue3wdb(i`W0B{by{pg7uy0zWL@#>f9kF~{2-ZE1#X*#kdW?M3sNxIvIPycOOv2L-O z2EOwS(i@ulHL%7rhvmc?|J`)GdJzHU^_aLw_&RuQaoIqudA@gguZ(uE^$Zf96-T1~ zt|Y!YOr^pT%4wCCYA$#?ns^*0E&g$QPlitodH+9^E2 zwll!TUs}&k3v;8p(x0pJnE6J_RiMv$|D?Y8+B(h(lUJRX`Xg5|?z{+MDr9P`n$~He zxdQx9=lN}vzLdP*KU%5wyq=i6393jI6RH?r56SHCM-I;Kg?l2Gum|15sT_z-5n~I4 z3Mb64XQjmRtNdD)?(@DC3jjZ4u|@N0hYzQ0MG)`a{A^+A#x-E@iwCwpqj6i&L)jhQ-ER zOi6WXi!U<7J@gA!m0i{FlU#=9Bu9g>UUYf;ioyc+h#+H^p#~iAf7!BnWrp0gYV0Q( zi-B~;$Ni(%)1t7eUnz^tQZdp|SpAOL1DhmjbbZ|R+JEWYt<|IMHJZbP>!Z=_XFbvP z!A@&{JabpA14FHB*&L=?2NLVFo$xBy&t6!y1r*(tO|Z?MT*q^h>BAsd+x#RjCvLc8 z#U>}p5C;rhFM74!;xrK9!WRIX?LuS-@8g?oIq~nU^c-1EIQ@oPi|Pd4zF~$nnzMwC z`vhHNqyu?I&fF79i!vQ$X*!-R6a!&?gyb}j;a}ZhQve*wEm@{ZDjJF+Xc4%wz>K;b z0@k80tHuvPbXnaF9Ju?c;-q&=ReB#QI^K1!=I$SjYqThQ21uw12vHC4#3vfSxM@hd zoee=Ktur~;_W6p*(uaU^l`)UZnSx1oty95Ko}G(G7GIH~>YFYM_El0|CzT;ie$ES% zgOV5$i{9rp0yyx0x#B!`sRl2#eTPyW4lQ^VLgE3D#qZ=E#6#8`e4F@a0V&L|aR2)= z=`h8_Iu^1?$_Tq^BbJx+pVv@HlLV`Ij6Q%~#8qb5EIhxX+O!TGMsFwsNOoQHG=E0y zT02a_wcv%BHa%n`C>+|!6sLs4l9?ci70$23^XlVnMG+dPziccP1LG8cTmR22CJfD} zqY}7gzxR4DP(>k*`*6y_ir}9%!=F3$HJbib(Qgz6&M_}%vbBwk`tuS9tUS~hVZAZ& zSU7tnF*z@?R7(f`^cVmI=#;;OTHX?4ys4s>ZuTK~*r3`S;S7#zBST{TwVgP zw@WamLgUe4wP3sOl6&*_w|LN9XSZ3`97f3E6fr;X6PUwmOkv^Ii?cL z8v%Uh(Gi7lPx_eG^-Aj%-JgTHc2j10eQ0bo;opp0)id&)m^m}mF0wu@tO?QITQyWrjSZ&?%uJP2j&^=f*uc}J9{^?(p0JY=H8h_ z?Nngo$mzuJf~MGc_?@j^4TIAbTXdou` z!gX4sx;QAv-RlHHV2PyTixb8bQTSZrSANLu3wc}^?%>8dH>9do2z)Qw0LRm>o7?W6 zS%v~vYq?=`2=;xwEQbHh($xEpU2i0wiyfx{73st`spq6`G$6u8E#q{lJe$7p`1&U$ z+rItN1pq%(G=1k`z_R0DaB>L;tkOspv`1NBclh31P4iv^xsy(R( z4Jv-kv0d~HDeh~l$~Ly_n?`aqk6EUEou|_;hiac1;Kza{cB(UT+LM zhLX?Knu{6CK3ZspRZOum=BaCpcIozUUW$5O2ozndI+(NGd~{feOU!b!+H;Ym9tPMC z0>heoniaT%)9XS*8nO|+{TOKJS}i=aBizpR6N@U)%wVSap>Wp3+E|EM&1-c?F6vho zQzK;soQKu^D#1?W6c2zO19if$=(zl}( z2Mzx&SqtC$@rjWFNpA*Ro|GA472R0-tFpOG$Vqg1`6isHk`|ZFnR!ZJ-SR6BZ;hHV z9a#FZ=yP*91=DolZT z?pLkJ0F562d#UkN9at|2-q}m@YTf8ecy(*sH1PIuigVB?i-N>pU$ti2z#V zTpY4#n#23y1y8jR<8#VMIxF&VTP>sOJj?#X&5@VJZ7Y3>_))rMaxCfi;-A(IU(NM{ z3IL}wzFfr$Pqn7S)91ba%qMV|wbHj*F2C4idH&x15$4TucI1lrS|u-e3MJ`5o1kXa zJtuXu^aknc*QMJvJ?R7kfS=(N+qBVjwnUAsUZ)8du9ls*;`u~J`~0P?D11$Fsmp0;2>hv6E%?~h4E^y~W^*f@7c>LvF-5u#dF7lJZ-O>H6J5xDgz;z7%hsFE_;e0k8 zwf2~Z`jy9DyAb@{WnGPj#Z%0BhJLCb9N4FDt1E!ZtNrTx&mH9KZik77ZL$isC4kcz znF$+|vP+SO|1&`M+IE3{nQHHZR%Z9En<2m@l#Tng!RHb4x*_@|9H5t#m~rXsdZ0o? z(_?6Zj%kdSfFd^H%&($M7?l-uY&qh}d9Hyi<9B*#3UIH&Th(mTeV5J(LjA3u)l;^_ zWc32|032NWsuKIziC)O=S~$W?IYD zffPKP<0Sr0euCRNHo~U0a?=i+_nuVar_O`+&|yBB{jR}!tshPEed53AbAox5ZD=R3 zhedGK_8h{pU`~73d+YR9CN}8(mP8()1)aBBY5sD-35)qkrRXLfyWv-)x2X(0%MIdljkfJ0svd5PnyHx8;$0gK{Qqh% zek+1=>q8g!TsF4nr>IsWM?#z-&=(l73;opFdtEkUIEZ{uXWj_-3!Sq!DVu^=5VoWj zj!QOCqaF&<;&TApR~e@>70P~*XFKJ7Vg?0ayI`i}OowSUs}Obzr4F6?b@4A3B^oPb zz*V_M|9H7~wcDN)BtzV+W0MRkkWe6wMbZWM&njpXv~VuAQo4~1BDTX=#WqOZMN~)h zB&@tj(SH}H8;8bnt>B2L3M#!8-#V%&_yt@RGfNy@UqQ&vN5Z+o{{LOz42szzHnL9m zT9$JsHB}B?rFO^&4Fk0o*dF?6HdSI2&UXj!o3fRgxCtmqiJF~3{%qbpkh@yh19~HI z&d4I2Mu0r)rp=z9Dq!0`MP(b}k`~VgL6UH`tP`wk7d)f*vTO0UW9dG?xkZ z^Bf44(P=!}=bW=$wSI)zusOIH)Kr!r)0}&ADV)apPZE$`rEgJuCqGj9)=^3`A67t9aBOl zJzc=fvlJZO37U20_XP{yeSxopc?>bOw; z-QJm!89%~L+cssqLC~E0Jppbml}0*y+Jo#LdS5UOnPs3&KozcBM1)}T0N|(M+i}4F z53fRWt{l?nx;*L>rNn1|>bhEE8HcQ9>}xrV*6R>2W*21g6_GM8^4(k~=#1CXr(J4f zbhwSMjf+|#0LMG)?+;ZjN^DwkBO1vkg0>WUQmi(*c*P`zdS=^EeY08-Oirt5$V~~- zQx5iA2=P^Fs~&^EYTJkmb~vXQ1(`qqPN%~*Gch;DAtxW#n8ftIzrFl_z}oohbXcyu z@z63OV`_8BVL5#@SqjulF8<{7gJxh}hu3~bu-bhepzZ`+F$CtDlt-HpdX0JBUErDU zehu;Z`$?}j8rlUr#85WdmiJ(Fdx@WwBnA#A6Rt5Ob!kq;R&$xOAnURg4CZbLuTY}I zN&);3hBPge1{}RuYR*0NU|lLHc28qdiVFpjOaFdV z#>!2t6uogb%PsM!m*9OebsBOICN%i1ZrxrB4@EGyz`s4 ztu61--1d}5yNHqpbG}0GS_`w&U3V^W3Sj5)%xh7V#Uo|0S++lxjBx8 zWu_{lPdJ_O{3`jF~)9<`|ko?21Q5>4rT_@e4SYmxg<-|5Srsl?Dh7 zk%(U@v6u)xzEdc(l2^g}SuLpamh_5w@&Vx7=KVv{+;lgmE=#rQ1oJ*j8OCx$|4sB* zM^ta9fZ1%s455b=JaLyVD}rQNspnJri6i{8^CiuWs~jxmV(ZY z)U&LnLpUKtP1h5%Q-zkVIfFf_&mtu6#N1m9qK;LuRXKm}$P5F~8IFq4GL008vmzb+ z$W0_*e0o2IA+k1a`)P21E|nvN^i4|W;pT@lQ*q>%7Mxc#?Xlk#rZ!#CXQX!jYv}54 z1i#>!e7-?EVzrU%Mb5^^qXZ8R{=Vr7umE+Z%R=d||$begKMm8tC zJyeOvUmr4c3~y_$!xqNPGt0&^)i#AhIHVI(txN z8WuN4cci5t5(Gl1h_H@t&}-l*Yt_{!c2Y`6{4%d*1N#?{s!{KEAmGNwLyu`N0KGb5 zGT8s#M>;>0(7727wn#@t&a?#0q-A-^MxnPLyfHqI@p0qe2DQwZ4!*SFxnsio8Zs7l z`^XC=#n?0`x<=du;1rVUeCvjTczaK}Vz+}rqrnnZVmvQP{zPWH$R7V)b5Sb>7v@_OYG^N zz%*ELPlhA36Hx_^lCBI4>X!St$i32dOmI@0p)ca+^_FYGcCUrWw;X^xJquiC6Sp^v zI@tFkb`|PnsFclH+gtDWUh3h}J-h8b&KGnDML4Ra^_1UWElDooi2Tvv`*FR~b}S?5 z1##?YfcyMJpW~pczpC)x18QIu?o}~NsyUDv)9#4iV5=wNKu9JM+cmC-RBl!7r zas%Ck+d~;bp_d?L2iy-Gs*qO~S%yC>%vE1`pDpOCBm*&{uOS~tNa--PJvLI+YByNh zZC>R1F9UsB$vqD2q;}O4ggkmK(jO$b-=J2Y0d_BrRd&D9zW7t(%g~@`AHxg&apEcg zXS+WX3F>$9uTF|>&wQ6YiIk3USlN&J+oP!OGCrat4i7$e-dDg6OQs=U-l`&1{585^ z<}D*BhC88%VI7t^rU5di@I`FV8l3B-2e~U*5GG$FtZW>qzc3AR$~|*rSljAy#-P?G zA*i*5Ik*fUk47Xm8a8guFAJB(Hg;M{7=6oCkCeuma;26ds#m|V#%`Vr#{w<7b_^7A zjsRlH+W1h-guhLfQF30^j@>;a3=sgH1*+Lv_-DIB0<^(msZF_I^Ytq?T_COaRW7}0 zlEjA41~T6c5LdY{Fj$>Ad~-o zInCpT*0CYiy!e5PJaBS9ppg?a2Pb0|t1}PgKMIWjaO`HBAu!LGz-RAUDI1k1UAs|7 z^Fhs-^2c;=dKXiuh)|c0vu&}%E2~3AP0`+qNHYaPPr385^wygO6FE2++<@neAoYaadR|Yt&`PC zQ#3X;=u%$Cp^041C_AOF3J$XJlW_1gAfSdm2yEM0^f4Zex*bMr->-VA|qZ~oY&YBcPL<%ZNhH)E+=Kf#+ zw&iLZ^>kOkn_xt?K4MOx9tOav)0+neL78X~A!nC%DK!{Og~ zLM;EIZ}ZUXh-zn;VBhTdE1jw=|E?d->67EX2@mnP1puC*USoc5aIG^_r=QLlCNR`P zp?5~6X)o0jIH<~4g^pYhPPCmFvXu*JCDPHN@Dk#AeFb~Ox1@WQjqi3#dz~Hw^A~e{ z7j26b_-{=@Zg#iy#79>4P3OaEF+oP<%P<%$~G0CmJ6L9HTyF2L(`*(nOfw-7FMr zHbiiw2-B?{XB&qd1A%+>aJ1sw%dKJ7gCA$oO)(3m*dhU{jae9G{lNcI#V2j4Td0av zy#a2oy{D$G8pxl9NVC&8IWxP3AawgN)p)NE>FJ0B+;thb)~Cc(=!5&68%GaGb<|_x zZh~1K=>U5P2%lW#5asKkoyWng?`W zKzpzK{UykBUTFL1I^t`M5S7l$WeZjK#OSi0M>fvH*AbaF$2Mjv0D39WzO0Uh-@UJo zBl@Qqj<=dqkfFDt4C1F<4)fX2znn|aR?Oy*1jTD>R_Bxzm%2ugagFL9bBhj)mEq>K zUCXrqaMU%DoUlLlh*&(PSy*e)C0pg=&j%9tu-3!^Xda`W5s^H>*VU;>U;3~>&svPc z|CEyb7eP>mG0IPKgdCS-thDbk;EGET)5<1_P-tJ;l9hENFIm)p^R3+M2 zvlbRL-A?c|Mv|MpM*vQ;X%ZJTv_Lk~Y9|;j(#onGZFhJ7 zN&P|iEmsoMt;1QV&jRJTG}1!mdfaH!ljb*g0FGF7=zjW{MkFTRSF@OlpTmq%k>IGD zt@OrmB`VL4uvyP$kK(Zh^;O>|A2}q!@F#0l<>}ubOm8VsZL=#{ssX_J4RTjfN%VPI zmBVI=7kU@8&P0hkInkb)zuQxyCM$ueopR5? zPD;wCh=F~hG!TE*a*){um#p3T-!i z>VP`8ZM!qGq;%#zOTo@;Ue9{RzhU%Ga!Zkf35fy9-FSwQ7yy2Jm>*OB=8oq z3xLCTwGHdI4;~m@2g|J>f&*q2kWff>QaCRsgFui2f#mL#Pn))PE$7{%^_N*fD>yJ9 z-ekGt)N*}6O9G{|>qb)*fHRW+oU2^ffN)>+fUycE)>sLKP(vr>07*MIv}ee+VFO9? z@Rwzt$NZ+p?s*AOjWl~AJ2r%67)kV+d{KZI?i|=pbo<)e<6U5UVkQI6Y>Dci@A2f$ zN^X7)YIGjG4&#olF0}rf3yxdpQyO*6r%05{5eNcE<*3Ra5NE^+COh`>QLDuWGplWEtm)R8n`$0vCqb(bOIoCQMjLnln$IH)1*-9kmR$n|y?bO6O4yL%|G=|fQdRNo{S zL0jfnE~|{!U&G50Z8s%hUf}Xo#denM037$g3+)E>HBf!2FKViI%rV#+R|`n(kkGS@ zBL1@vGg>j+%$E~%-om1WgoeYvE}-U1*R|o-+Q|bnT0ddfk3pC zaZzVTVdRs4j&@=~zw>#^&9r?TPh1p8C9x&Q>V8SsZlh z+GRxm&JpX_14nOLL)VX2?Y}$-Fgq?eHS?;HaW9W>`RA*gI%E+N1`inO3M=y5Zz=99W`h2xxl(YU|N`Xe8_)^*MAd#s>5k^8i!Eq(9V8 zL)IR&F${SpchfpxK9|stZI^h79joXvrex&NESM4Pv-AnAe~?EGM?98ONnFx1&jj{X z7I)tagZ(2H32mfJWTc+RUZk`S-jUuJUmnx*9JsK3@cXXK z>H?IAb%>8O8~rZyGUVw6*&CY7;`=+8394Post+g3hsVdtKF?J~x;ucMRVS}T==I@z z1UeirMaiO*!U-R!I5Z=lI6s0o`W+-oWiIX0%8dYh*aU3nI)}Y6nGVQ*!XaXHs*kkYF&`ay@>A^T!aFaJpgLBj+LjP9_WI^!HSKz4_VrP@%NN zc=|(z<^ zmW}wyu*Pb|w}*^4z&?6_SpPE%(MtetdYOZ;+Q$BW^ESnH+0O_DTEKn(#b1=@YjSrL$+3e3=g=@urdk1J@ANZ;K$avf#!J++2Iakj+ zsx-26vc}I#suYfu>(3Ted^gj%Z0=wkdRcz}1|>N8T9RGwSDm(;!lb+FyHZ^Wa35>% zGBGrU=T?Gl8cN#fA(t-xy=s=U#W%I+aAue_9XFllu2-kwnng9EW%^Z2eS;rO1v9-g z3_F{ollLn$E$^2-K%Q0J-z_9C6IjG}9~8F&u8U*q0mvHnbHxJ@TV&Pu2mZQdhWzV4 z%Vfm#N7!9VrRxk7?^21+RVA-kc|K>v(Sm_>!UETD{ERj@air0pX#9fctsL+IMwA(2 z*G#aOsOsJ#Ng(?cDe*nBCk|XyGPlrKN6V+0=!=~n`hfE@huH!#EJ_8w zG8ramM=npqA$xnn@EXrMS^k3GX&L!i!yb$fUW9hKhWDe@h#>PLb%eo!U^-%nUgylB zb`qJYxdZlj_?NnhO|8z$Nhl>I{5-~{m$}8vxqSq$Tqh35P;gv_I%u1=LETl<<9d;) za$}iKN(#`Z`=aQMr)~>kII_qRV1Jyt_3bK3ObnQ$d7w%ejr~{EQYUp)hWF1qOhS@2 zRC1}xl8cg#;`pGnKm>JKo!{v6jGyiz+dj?#lcIfCW4IH*IEATDk!LmG?FK0^^}`?n z{jX`%H_6>%2%))uP_>w=mW*x+lCuIo*dZ!qmfNlxdYX`jYBV4H?->c>o~07o-PM8h z68e;FBGb3Dd(6~bt>E$f`b4FKH#-X|6Xb{CsREuGQuGQDx<#@C{$y?bW zv-J^Nky8&1v+Qz3v&oT#mw+Y9ExU;N657lriT}G>TMa#h`=DAA_9?h@iDlt2a?yX)d6ylw6oZhFd-&2>VBs?veFjiV2oPHLrD zBgZ?Ek1~+SZ$8G(ocB`?Nun8Cg|;pMaBkQTH)M%uduP&>Tfh$dFmp5^U}&76k;z~5 z(^MsTl1kmK|9dC+R2C+HS-}yxT`j&n1kI?nCrvA??Fahr5It=G&sJ+5`1`UWA{{F3 z1)5!isMWtbHlJWc0!j$S5b~Lyr5d_l-P>(H zostTjG)&7sB4aN}{3{W;8w>Kc_^%h(Aw7uf@?P(R8-QbmZDmEKEg$3$@)Jbq*8x@L z2=7esaK5dGltEJucTXYV+C4KXWN6`ZSp6cb03tsIL+5a0{a+_ZsTV`?4y%bt06#)^ zXSKH4JNsT4&QSBsRictkpxx5hAGJ6vsjS#JN;8>7<};ftU8msgoVUiz8|6(>j0Cnr zI1@g^3s7Nx`J%2sc}fgN18sR1nc?qQ$Nb87X434xtyzJ5{#%<(0zsCYN-&siEK<21 z(Qazf2^g#}g3fl4B}RQ!6Xw^ax>)`$51cO{!%RA50k;t_Vz%zdgAlP?j@ui!@!u1q zlpzxT>R zw^$gb{EH!V5icHbVMwfY-gIO0&W;fqmtb!VseAA%$erB8JF<>KiGGN*A9>nWLr#Hy z!0xoD0)S)hZS?JtyfN4jMVpyxOgelE9{(eOrrFSe6r*6PDl+UY@~Z?5#s}`ZeabP2@lK{+KH>hbpLlBVPEAE z%43$1x;3q_24gJ?)WgL*V_TGOp)LF_Ho1~ACH`8J~fpOe! z0AwFJ!S=G|yp;fzx1v(T!ua)@bFt6AHD!3^$pz64n&uzZDS8GIbVIMP9UpbUjke3! z1RoyG6c!>_-_+QF{@h)7>;U5s*8kH{gB$+e!*;1B`QOIc*%@yQndAlrGo@R0%o__P z#PSuk!amIf2oLQ8%_%4(x$o;_>L9keJ4V3&(T@PT=d)9k2;EV4x9Tn)O`_Zw~A|}bzO=ruV44$csp`P>8CMQr4s;urq)lj6|iM> z(>RACqmdwbx*;egjG!5qOB(&tvu?4T{xePIKvXE~@{C%i$2(C7)1l>WUQl(8%rXim zO7pBi0&t!c!2JI+i)v?C&QiI~zlryeGR%{E>VFHc#otAXnCnq6=`EAJcOlHKFfhAD zgvQ(O`95Z@4CHrS!8};oCkUGHVWPc%*8=CEp{rZU=Ijs}P1^FXJh3@LZ_e;6bt-i~ zH%~=tC+SvYSlLfaI?taTpA|KeVK)YmLx|Gd?dN~KfR-IDfIRmJ+X5IVwJ@3Ca(OzX z3d57dL3Vi5t56<}+?~9C+?aN?t}6)b`kJ-iQ}U1QNijiv_rM%+$;(~~d_A7!5p^_u z*par11K7)!PJqzliPk}Db$xn^m$d)PWjG^P`mB72bnu>yHJWvR4|(&sv#>`tsvF5z zI(xBzuU%uy{6|?cr^REs*gPz7o=ls?gPVQ*A*XG5-bjL5^O^M2i`|wMVy4z?h{cJA zP0%wDF54#yX6n&;j_ARg)Lk7kY|&xEAd9zq4|)~dyC<;jeG%qq09VKi&+je8h@(GF zI?@2IczjI{Or~<_6f`zIoJWowiD7q)?fbJV+w3<{g8{=couF3hBkrGwi=v1_a=>%= zHvGg32fRG8zztjbpbT5ba)mZ5`9G7_2}=g^T)W=rhKhd}V`7;)dCqhe3sn#nEFrf8 z|1~f0MJ66W=(a#hoB;UA9_2?{SPM#1NNj2JivFcEY?AYKo8I8RI+r(hWh7_A=gF}g zCrQJNVEm5|2e%Vn^t+mja{ZZkzm2TOhaw{N48UG5!^FnO>*!)n<80lBs|LLVmgOVZ zH7<#_4CorDJ}mqPopdD3)@GX5i9&d2NmwZV{cJ0L&0)#M9=q6^mFdnY0Ozm+!&Z`v zuOXmGqmB(%M0nXBMjuq@bKF0<{%~2=(?AiHQjITgSB|#J@HdPu?08s)hMP6g9W)c( zI*mmf1#traXO?R)BicPVJ#6}gMl0UKQvu%gzb19%ek~nqt#uP*RY%i~TmtSO34tV1 z{Kz76|BVR-F{Uw6nH~+u{l5z%g24K*ksnL$(&>D^8IzgES*kG@8cf4z=XlpGVkHiz z2}@H)nXCJR6RJDe^!+J{5LPyHnQP0MdtBwB!?JlF?#I>8xd8G!bIkTNDL5)(K|;jn zyx~IK?xE*UWMKNgF&$He%zx6(vEx#`2AYMr*b1Z<+2cfDC z%nw?T3JXY&3H-1@81(*wB5mPn7o=QQM2F=c53x9TLFVmk&l%LwUSo4M`}0T3R@c>D zObmhUl0pJUw1)Kt(T51Io(=Z|j#*fPA>@u(EwH*|Ih*n3m7~ow5Rb>PT7mj48^n{Y zavoHP5N23C={gmX-GQcX_6>!8Y4uqDOYrV9^}Y(=Kc`cZ;wyWz?>{WrHw~$0sB>A~`G|}|SCR6@pF7ub%@h+>-^($mO&RSLxrGPib2rHlTQqqw7<%>53mCwb2$VY)-u%NLWt z>@~NP_xa}z>QfJEf2u(D5%N0*pTr@{f@y%@Zc_F|_Aaz!eT5AvT7*{f&m`P5-K;*R zpe3*$gv*TTdD9@nZu^%d1w^fAFJ%F~YmUH4@4O?`eBa}pZyE9(%z8+HQP-(@E)Gbu zWe{e-qvEEd05oIQPGP+Q3N647Pi*l)lj6@s$pW{DMD~#QxzXDkcYDm^{hIMpnCQ{Dz|1R7I(iDQIzGS<&aKxm`hN^lL9=A(MvEq?tTUd3rZUTo=g;5BE72iJ_`0JZW1R_a zQrzC|_xNV?4EoER0GtSntK|B^vlG?U6SKy%GTv?eJ_fyW2>{tp3o5F&9wnI;CzW493$4B z_dl0U`pVQ*J_0L-BPmxL%csTgCns6GM>U@3X$~we(h#&tv?59PbKQGPDkFvZwA(_) zK~wt|p&DHhJ76CilTO^;ml*fes;o3|iu9 zhS$PxW$%Ick5{%o&xijcO?_Ab!JoB|BMdgB-|wjY(T_<3z~N8#dh4;~KUo)?jZ3%S zt0b{P%S-l#7 z^S`#Z^%NGRjnXq`A+H+^^ttEuB*XOPa11s49qej7X?IFe^<{-2f8tODQ*oX#@`}`r zXru=jCyry6zLLi3_rYeZ1Nfn(!NB2n=L@$*7mq&eQWlG`6XJgDs#q>gpXT(@!l;+~ zn4eI6yYBxYv$WZeDk%SJf*_O1$8@0}rcV4X4^zYq%sV(t9VPw4bw^>-9yarr?0%ff zz=bAi<;9KXum&5{mt;Xj%~sCD8QF)rrugBzr89cIu^$qRs#{*x!c|xoc5(!qd-*9y zWLF}n{1l2&hXH+?hMvt0*Ulf?MC7$CkbRC+6xch!%<*5R0JxnkZB3KFXUz`e6%r{s z57`mMEP2xQFs%x}P}~$n8#C|=)U(+4nh(QUd|Qzbm0CSo77^};90J<7^EZd;ad3}+ z;B0AQXb;G2TkV&bH8h5rR|*snqwKgBw1D;VaQd4XaT^zM9$fpYwa6d4EH-zTaB~h_ zdgSvJL_fjM-wcf3MUM(BL_5jg^2Yi8_R%K4B+z4XqlQzsH4`}^0ps`YT)w{KqFQgM zL%xp*Pfhx+6saKb*lrOA9`KG7;8JD&=1F<6;2x@CihldZTRu_Ar zSdRF!JbZgM;?cEOK8E_YF-PFJj#TF)rqr^Hj1aWNyuc?TNPe|tv@=?|_!A** zcsg}6&-!96Sj^6xUY{QRUZ*h0wv5l^9PaL>1=TjpE9G(G6<@r3dVfu0SFsupWSF zWEZT#e^T^;x{M8>RHMSc_0P7n2;60yT2^YlKDPW|8m8IL7GtH3(gWNhe|?!n$fA}X z`2md~a{j80_xLPC0K5NQ6LM1pwH@tc?*jL2H4+X9k(qFulYwa4)hhUlw(cce>{{aG z{KP=I<{Q7w0~f<9g;!rzs9q|7paL)CzY+|Ln^ancj#ytgRjaE5@QTYp?O*EabF;e1 zwaIc*zw7o$Y*(wc9mcb+A%ENs97~*wg+?J1E_vVYVpVtrd#O9nkf;H^uK=2`!WecQ z;!(|T^EaQ;pEcgRhz(W7jrr1nmvIru3$(&p)-WODgxe=et7d;7Tthw1zsvXgboH$Y zWnq}cm8AppatsxZc%94dM1QzM(_=xG;ZSAPVW<)8sNX_UZYG^+%{&krM)amHI5gFw9#JPXeUwkUK$&QyvQO%9#f}t0w88rPFYHE>rls@?d z?i;!90DJgx`{q5z2IVCTz~LaWkj+4-=cRI!A}tbq;9OnS{=fFV1Rmc$feLx(fgAoZBb)$?{JNF5#t|Gbn| zw`}B)kwAQ%_3E{e#aWq8Gd;iN>sJ$YZt1z$A46|?)MT2}tr>-8JId!jed}7AIia}k zhQ!t_#a~0B*BFIO{POi@y~&9tl85ujYSPg~6@iVi*8BaY5At^s{jOGId?NFq#iFy@ zX6qGoZq`bpJbuD&E@#f{PRxQ_DQ^J_SZQ^KUu99Uiy`P`rT#Jr$cf9 zQ;oYKReqSXb5LP|_Dgf?8UfBh*k?Bb@c z^1S)euX`VAdG;zvKvR6k%iz-Pw}mPD6Xwh`jGcSr>E`{; z*B{^T_xb2KVbYHaj8Ag<8q?Aj|H>SCv@zOUHdyS5URgWE$)UbFm-GLGI>h!`3uYZW zCqVghWRUuCVOiAW!}g#2bdMJJ&->y=-zSl()KD}rZPygNET?Vp!=kT+D<>^9srt}a z^hK>B-saA1qTkJAGS-M_h|C;TR{!Yp$=!|D+w<1Rv-_fhjG|^7Ydz+0^C{I}z3gU+ zt!EFt@m}*P5gqxDcGB+Kh8Hf@J$p#|TrOd!)A2D+LQ`{V{Gie1`R$iKJ9Mire9Ze| zZPr}B`Ar_oO<$;sLxL;sZ&4C4YNxm?7x=n&<$9hjOKXQoFQt6*&t0l0T2*o#V)XciqW61)KNfv&37i#j92Bxfe;kNXA zhFaN%@+Y%ralWTXnEQ``{aY#mng**T9xbIOignBq7F^+epLzYAjf}YQ#lbwg9IMpwTKMKmLPpG1(G6}jd`gT1!OixbWiQKBKBP4UtD@dI^Qf)iMFmj&h1j!4-4fAH^pv)l_aQ+$Z2ZPZvkE-=%gwuZ+6bVxzF;*FBRTI;E4d?(iyg@^F4HE-1IC z(TP8>N8Nhq?J3QhlVc?Gbx;T=o%uQRKy_GqrX<)8UcReDL$@xvNb)y3*PksM|(|9-Xe7Wkjxw5(qU{ezxvI zc6Q;5N!Py|nlq>)+om5kxg+&M>pH)byP846wb!WENRL+T zx7!>aTWY>d`*l>ukKU5IjZc1>#k{gyyJhW_z+yXH)ztg7W*$^|;hYQWx4+LJ>~!`f zcZg*i(!5JA(|qb%^8R-Fw0z-B54%pv>{RSBv%2+d>-2yJ znLpA->20dl%KSeSaem)#_nX9p?=LqvuB5BHu#8>`d;M9{>#Ksp4=ZbB)ii%TZo1iA z;I#9;a3wvF4XoO4d-xk=RsSZ!5-N;8!rx3WL=i5J04q*eA1X=Sw++< z=%bNJTR7TXuxpOd!Xu`&pLm3)-F~ifO8n&N#7FL>wzFt~?MZ5b4>zs6pt@JWo2Qhw zs7~6~c6Z2gowD`G4zsoqc4X3beLsFMFRMch(XLeJSRV8ryxq3vyYGv0#b$c(FMTT? zf6#jQv#GLV#OWpNkdDD?fA8cn%usSNe|G4#!Lhs7Q8Zp7yitF^>nhN_*QP5j)Mu2l} zbM;D~j-t!E(|Qe84W%SjP;JhysCi=+f8;~()r``aX*Hi~R|N~S1Ox}Y>L@zBQ-8QN z`JKp<1hvp}rIigKM>@aVBg*4EOY{;?j7*vQbn357kE1_t7+9+)GH}5lP~p+-C)Z!b zQuCIt`zUlsyFL4Mpz`HQE)lv7&QB@jxqPeqcd8n>N^|^ZPF)A4zAQWz6`DJ^MDo_e zm@V=fwt8!a=FL!b?YR`_z2f!4ZxKemWx_A2kMCMgIpO#tm*#zq_bq*QmRgn1ICLtT zbB-;pt4D%|>%`$~=fG`JkEzAqc2B+|@F3&a#YERi+a|a4eDlJWp1L73?i%d5w*9RB z*=##OXS0J5K{M*l1szmd`;F)?%hZ*FqtExwysT;7pxI}qY;enVVantLgCUdZZMK4P z^DF0(>o%1a&AxiFbx7*ROqNtzghEi&z$mq3w&eEGrU^8{PRfc-r*g{z$<1GqM<2Ch z4x!gp`Y03qet~C|ZqimEQ_Dw6i+4B6 zQr7xhESYm>(c*NPcBJi@B?0Uy2qw|Jtm|+0UO>_S|S5tKWEB;z;g- zE`C400zQGFd^rWu)v4V}_^Ljg6n{|j?$)^$%ezLFf`;N^U2V48jCYIpjyxGY=)Zh+ zEvH`BUw>+Te4@d!sGHTMddj;c=271<^(YgtDMT6!~M;n^aKC85g= z8?{bN+pTzcewc7-)zct1FWzXPUjJ!u>je4XjRt4cEp=TNy_95b@%`w%c`&rm$@+UU zb+llc#~~m8?rRjCD?-bxRy?eJRrIwsS*4u6`DNkTGJUd9>bGuU{4DWa%1oFu@dLTA zO49XUp{=9RCh6DL1k}39!c~m4wU)HF1WKrq4mtSKc;i$#fMTp_04a;8MiLkcG}qH%$}=LUIzy#i&E6R zvjXfCM`!pwESerz;dWfReU;dW8;_>kP+AdVA$Q(uL#eh&e^-SG$Dcdm&CqSYJQSx%ra3{nO?86$jlN55C*2`NzR8S2lGd z{4k8!*fK=4kNk7{!S9p@er?^^Ti;(2Q~kzzWLxp=RnU5y^pL`-R&S&}ml`c{eqZ-= z#9zgI`txuvvwLms!=4Ro8E2$V?RT^MO4y;xNbkwBo;+{ivbK8#Yppf1n5~& ztC#jXIXidyJuEo4+%Cp&1}4%&rYm(>#JpyA*>KwUha;$Go^z zkUy!ge0BHS=Z05@*N{yYd^vo_`J|r9m-Xr+G#z!W`@Vw0m&&)YrpVzhTkk0hpmKtt7IZ)D<5|0B-8}zfrK)R8p^eX}kq?#M&oy;>n(u0olD+oY96dF48bjo|U@x8zn}pgEsFC-a_wwVYSh6*@44*yklJFbS=m9u`Z$i;iE6@y|&tOYX`TRF0ixY{7$LVNiKf# zysooKskvC+`dL_vA9KZlKHke8Gy~Rmd?E+S&#hIUn5f@7oo2AI;8VqUQ_lD7j1)II zJ}`EBdq(^FS=U?n%h!KBv?a-6vufaUQ=5IvX)j;QNQzNwci;NDdV^)_e7@RQf7DHr zT2Z0)#APE>%WQKj!r&hw4->%}<67v`N@ zUET9;R!NKXBgW!6x{rcpjtD&5P>|mCX2Q?7_2Oj3u%Gna-aa#9*QoUI*XvKEb)W>f0qKjQuhyS=QbjW4sLurh^?_7-J(-+ux=75h-o}z%v{Jqf+5;vG_calq+J`fw z-l+GmrkqooIXT_ZhVN14qgvskuCY;P755t&rmQ*N=E&5Pe0S6tH zyi%6C5IrzSWumIM!2yG{3O4aH-oZuRbmH?bTD%fWV=49)haB&ZU+=WhmofhV+EPuF zhj}eiH1w^;(#IjnuA|RpdPEOtSScEddU%LOtWO#`sr26UMDJJenK{3#6p|9&d8>GR zdEo!uXI^k*{LQVeo<_^fAliAi%zE97FCm`ADVq)MFVe4iq^&wQb>pJm#yM9V{HziT z^138g{ng&bGrMlPZhZLql;+WlW6>8zZx-a=Nh8Ve)L$a(c&rXR_mc7|^||U}f7)rA z$0gdccAdMUw#&77YeD%3JtN`g8Q%sZPQNR=t|SuG_i5md z0HR)=Ij-biLO*5d5>D)zG%&FvU(0iQrKY2e-m%z1cg>*rv75XTRrkzLmRMG3Reo1> zZU*0-%KgSJGnM1!$6Gu1>NXSQ(J6d#n71qSPM?v;P?t~JlCAFgF|^2(!1#hsUI|5w z{U*a(Jwn`GH*8bq6=PWQqGXq4Z|XebND;3_ibgCmCs7DHM6v#3Kz{e8Tg+WqVd*75 zd#yKX81coq`0U@?lr0h1xv%V1{g3?b0VehhYGv-Knu`V(I9{A(9si{K*XWa}Ry^X_ zJl%&m_qq$0byzCC2x9J%j5=^KWnxRvkNE23$187}pWR>3^pzIAZcoJ=_kNo6nrcl0 zKhtBif>+Du*X$27d%3RLU*^!T(oUj9qHR4FJ9zhI$vW2A25lj)qN*6}SivK2uuEh7Zb?GJQP2b4sPjAgm-fX4l)ANH@U*8oSy0AC${ODxDPVVM!FF)mUo15AB zRf@e9wY_XQhjBR|JG;37=2BfX5$pKd~_f*C*GA?hjzu7DvZ$0JV&u4w7r2Bhlc1Gfp#KL#d z?APo)7q|JFNM2(|&dJyBb`%%#IC~Rz%GW#Q55E;!zCz<#VS`oIXZ# zf4Wa`U>Y$FCH&^TF_ADCt*Pt0V(mBhG;;J}`_99u%P(9ByDYWv=z@naO~-;_mX>Ip zQxMmQ{*}5=i(H?taP<4z0S~oDOUhp-5O#i;Jyh15IzM+`*RB$osu9}<3fbkgGbl4O z4<8N5T}QVax_q2hI{nLtgGmml<<`EPwTm4}-h6C~zhq!l@+;)y)KQ{d^7^Y3=7}m? z&)(E9X={APtaW;o^sS9^o}CR;34boUY|aVB@fO8h!LzKbJET_!i-rhhcFPXm)Oy?9 z9j|8?GMd4;=dfIH!kW@Mi;TBq96Tdo{z>>-#Q}$_K3kd=y9ui$E-<36eJh5kdi z$xZJ~O?2aRJU-{Y5dEMZx&K$$y;La&$tHSTPO@vhKuV?XnZ2s9>(*Z++DAOxX}3#( z+>R3CZ$CEhX-?VTxm@m!sZHJ@E321bGan7Vm(hvbVb~vVYi4tX)`mUSoux1HeDv?f z==Y^PzH6`U%DIQ;@$-80tLaX!&(5|KHkT(&Y5(Y=Zh7FVqDyllvw2_jLXFr{f0!LL zOjJ0@*SJ=Fd5dymP4_mx%(zea^RmiLSor!9?L1iarRG^~YfppP*GzR@Nuv-newD|8^n>TVy;Ikq*KM1ZU1bs-C>?e*oHWnSWu+8f`GU-O zLKTU9l;MqOPyMT-4-{v`P-cX7MNOFUIe9bZ_dKWddif^tZ4(cquOiz%lP{247REyg%jm@t+VLh z)B*G3Vxg|#D`M4-Z(DtIId#%~PtnDFlQI{Ub?aU}=lXqQEiLq_rjVg+o#O4GFRD`F z5yCGctS9^4diTqjXy?`Qp12>LCZ%QC?q9d%h~&aUq1!H)l--J+TJp*?+S{Oen0nK& zBi!&50R?=JQC+_6)xmh!bC+flF)(u2?M6SuVQFTiNtIkZ?zA8~Pkx!gY zX|KwcA0O8j?&GU6T&%TZ)U@z=M@RTIe&$Lm{lK+dvTk_>d`~@x;%2HH$T_r%u#?@e z#KvKholIiJE?J8^TAQp*K5bV#UZ=JpZirXmRp*C;rpbc$#d|gct)S?6yzx)Y+ZuhL zJLiTDOL{JLA%YUcAv-hXDWU`<5GMPHQ}yRL*~otD|S zKP^pq^~XYH`t>&c&;Hu^tZr%Y>0hm~bMmS-zj;Hn&*I@{w|Ku_6d5^p;jDp_#Hk-2 zL}u#6m5k<&^5s3zuub&Fn$B#Fp=;~!Tt7jb$dZF|q{7FWb=>zIv!p^KZ6*Idf=Nez1_qcXfJF`qN z*-1=8-f>mKA1jYEip!NaaxO`C#*QVFM%~z7ySLmtbM?XnJNu#g9vb%~3fzJOHa+M%n-yob zOzr6;jizHC7D@2!v&?BC%AkJR_NNkMVib}w3FHc72}uA=niL_-Th(HXakeJ)QV z%0nhF;y^Lcc!&qTjQ*areeG&f6cVko2h$uJ&^nx(<^%*<*_-V~^w8uL0 z#>?1O>8RsmZfw1n@iMj^W1P&3t(P)h#@5RiC-Y*a%Ns8a|TGjScp#w8w~ zV_~gj%a%okuQ!~6$h?2*;ckS!0(<-* z8#9l*mO_1SI6QIeC@F)2QbZukAQbW`hziM!nTgB@EmQWgNd+0+w-YjUnIQNtKO_-; zBw{W u?E@_O-@YVh$OJ{J5h1u*8kfp9ypW$hyDa1h9bII@0Ggu~5}2$5%l9^~|U z7~A+W4K4|UId(}>WF%!S)+~h7U3X$HyN)c+TL@FW`mb{t+he z^B{h69xq8@G7^R|3H$Ifa9X@xI)Yv}&`%<*CGg9DzK z`6=-uB@1z+#FXomVMtT4eQcotbj#!s(g%AO*=6zJ@gj-yBXOvUIMhYloWdZ%kDUpH zL%bjv2{OqHUMi0%QEpR`C>e=b2q95(elH=M-$aBs_WM*2idRL53idIQcu0uH3s6Q7 z$wUJA2_PBBL&jW!L6YTzw!462NmYm+>#*B{9|@cDGsemTWwGEx;!+d_ZpX0}7{foy zA}xrd&Bc2~TPZa{3_+@Z7ysBe8xcc8P-kl>o73E}@xvWvkB?JgH6jd(mxc;IQt1~$ zQjst(EF@4Xq|wksXgNC_OcaPi5Qlz`!|gC4i1^xt(PPjb1u7x(`aR5TlQfZxCbIbP z7%*kmlf{dKBVjCBkdUGZ8Ht?|=;bpdd+EU#mNdl=jKGY!;XoNYWS{-PWtNnRxR zRJ4}@qwtkbV2qKjcAtVj`C{|BFe37E)t0h84 zGY~q$w#6yKZ|R_{+{6pj#Sob91=b!q?U>DY5YiMJj9&A^VA4#_JjkUj@ zFk5~E;s)4xj_Lg)4C^M?K%mcV4?G?r%{Y5d9_}znln;rLR1kkJxs?ZIkcJ=+jBjOR zX9LeVAgT|ioqm&Hy+}n=3OdJtvaue8j3^_pVEit}Yiqp%67*9-cR_aqr(xSg{}krV zS5Zw6iQlCR3AGB;@G-`o4}Xq#JZ@n9r_M#_HP8>j9(S1JDSIufPZH_Ey1;*zhe{IVN1{xjA(*|b|9IYsBfDUPQh{1=es~N< zia{U#CTEXxC_(+ z*gWJ%j06}nKsT^mDSjjc>!&aa)=60Vu{;;3mB{n=Fn3y#2pNfNVb{e}1M~IJe|}=f zbP41Sr0x&)+3S%YJDdwLLoAafBWcJ}uw4RSSs9QMmP5IJ4`Vxjrb95tU`(;sdq|5Q zKlFQkV1qk=)(vU*0T&qlt6ho3K0nZtd4h88zoDRfI7*g$3 zZdI(2XUI`yah!%Qq*F#3AI7I5X$c}Nk|a_cQtnl3Wow&i;C%y(EkQpClmTA`iIOLw zexj`7!>~57$Kb!#Bj%qed%d#|VDF#U^CFTT_A(^&3&?98Za=8tZ!$#%KG^4RR$$mi zz@7uY!?Dx%!(4_hqy7ZSW~YZ&iE&}D_iuC*72)|74o@HkUJ-w@4`re1lF%)ne!~9v z@K|}r%V4~6rTNQ!rG^O7h?GPc(7qZL;z)x@w0?~&gT#&l8O&=93sI!u^FJA zq>v(0c1XHcvQ@lBlp#W;c#$M{kc5wjDSJN#&*LI+j#UBG2SIzQ2_vNQ~ z>lk08Fv_1vLg#>7=Yw7Lvl`Y#s0Wty0?os6u=jhII~@tHr+%Tw=2 zWO(O;7vO)A!#iNV2=uKOj0bTP3Hlp>`iXM=9_F@565&H4S?pHxBEy=Zp&*D9`UQ~$ zJhSm0fsFT1aA^8!QU~ zLLnf>y6tc#0^vL$87u=EzlX=Af%tK@@hKMAMqnF(ExflGA3t87eU^{~&o(l=<3n?> z%V5EShTvk=goQBgaT)OX#X{_`5y&cta^ZOHFn5|i_p5krL)(d4P>?v2!eHBnwZSU{ z)m^%&oECcLve@5Yh!|7BCf)XRxF|ug@>}i3FiV2>Xcwegy`)mA;f#1>a zd!rq^U*^LLKHkf#`@nMz-h_cdIR(gNC3_P zC4n@7CIdMFLC?VxpBLUN_z*u5fZm6FIHDjCIEfcSp|pt5FiK)VrW=oB$fjg5tl%xark%!hf83j5^Q8bQg|FkBbvqv zP2t4ECWbP?VhJ@mE&5L`nLBUDgHB5h<<6a+z>E)zi(SUCnivrl7sf>RJe2cs!SBZm znE!Ad%J~$4ECXaQT(XTc3aS8EK9{TXRsx?B$UbWV`zp8&@G&Ql9e_W6L^zJXJOW#x-1zf=Kz0EBv=i}H39KUU2!R|u z9wMC#fyM+55PES$IGex`Lf%857a`}^yGQn6#W{vCSV>HpBQcnIu*@?dVQ~_}C5axv zicO$9(^djbSTv3Z zgk;Ev?43korh3tsQFy@7BcK*ZFejKuoObsfjFFykBfQ z%{~z-K*Jo;COO3>hQZ_*Q}&DvXNECT9pS$eNJI&|cuK~yN4c+OY$D4Q%%L18trIOg zDLR_Q)<-lfnYl329RaS?KmAJ#)Cj1g-TpJ)UF&nSY)GK!&vF^uBOjLeLPR`@NR2_peI zky~X3Zo>#z1CceGgC+1^ep(i)-;HV?9W~$o*l3ploF5a<61?)j;})5y6Uo1f;qNPw zBeFul$R8yl8e+oVVECJa=$P7YDo_Xvafr zFi z&W8oEWXO>TmJ*RVjJ28YHw>h7kgFpDAahX0C1S$AR0z*x>x_TchJ&^dqz;F?SYRm> z@}r?B;IU10DB0L&tYjuovINMHhEkwpp-@U(#yCjFDI0^GAC}^l7;ghx`p;-z8njX* zyNtMB6WQgDVONxd2h9K0XRN*cjfr14eoXTpV-$x8If)a;k#X#C7z+#{(;7b5Ufc)lA*eDEcpi3YZ0n7(N zAjen>E}~%I>j6m$2;~7!2DAjK03M@|BKQe_7Xe%WR04bkprI1v1w6*NKsc}EfIA3& zC*U1|ZwC}q9@D|7O7QA{t^|*9A;D7tYk&s9CPw_@!q`4WvdS2baW)Vxrx##1!S?{N z;O;^_$T98&!a5jh3BDarZ346d`1{oXgMlP~$GDT=_W;h+fcH3%R{%Z(vId^}9s<6f za2#kJ$T405iUXeezJP`%_{V|Pf}Hza0lsh0tp)W2ImV|zoxo!p0vZ4wHvXsy=nL?@ zfU9(%{@{n_z8AoKX8_+F_y}YTIv9Dt8Q=vxMrR-@@HK$*VZX`%o(i}O2;UjNcmXII zc#NF;0o?xm#~|n21;CgD=ODeHk8wND2=Ewp1ChZGj##V_*40Z)gkfGh-C>f${*4&ypnJ+{qevZXY-Hfo}(j13AXU z^Pum5$CwVZ2Y7rwWe^NLecq4jl_y!<|*i790FnhkC7)C`WSeObfA3TF{S~X z0X`p4G!6O@cy+)v=};al2mG7~;~e~<+@dPzTQRL5}eZPzCT9XM(@626&9#K<&VD`yjFZ@)ghzkYgmlrA!I%<6+dB1my%C zqcxBP@Kk-c7i)me{GlBHUm1cw2KXVs_Q@o~0=^eeYC4=t03YVxF1LXc#J(jdcb4+3}gvBx37=e4~YGP{x;B7po5VHv=(@bn}K!$ z&+YTWzCd;G>74;N#u%Vx;JJN!*yopRM?!-j$CwW!xB%<`O4>s?fma9I1*C=LfPN0p zZoqT<=CEI{9B3ZMG1ddc0grLE6A3K`9%C3#KJeUrH|(?f22=%djFQe!PT(pF-03Kt%CzK6%j9-9? zfM>vuVhsUR0MG67!u~Yu6Ke%Iw{Hyl%wGFKIY5q45qw?>;77t}0Hg;zx9!oar>gU{Z!b0 zbrQ%Bd_@>90@(qNu?ENsc#IuDRNygQTL9w_c#N$;`M~!AS_ebB0nhDU;`TjZAJkT$ zdeFhx2-FEY#y+3{oEFd~1o{AYZod-tK{iLuCLCK3_=__BmllDK5%;-Xv1zeov0bri@y_C+Vi4Gk deb-}NDWf!{RN<)lQN5$4NB>K=)c=>2{~xkJ>+=8r literal 0 HcmV?d00001 diff --git a/env/Lib/site-packages/Crypto/PublicKey/_openssh.py b/env/Lib/site-packages/Crypto/PublicKey/_openssh.py new file mode 100644 index 0000000..88dacfc --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/_openssh.py @@ -0,0 +1,135 @@ +# =================================================================== +# +# Copyright (c) 2019, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import struct + +from Crypto.Cipher import AES +from Crypto.Hash import SHA512 +from Crypto.Protocol.KDF import _bcrypt_hash +from Crypto.Util.strxor import strxor +from Crypto.Util.py3compat import tostr, bchr, bord + + +def read_int4(data): + if len(data) < 4: + raise ValueError("Insufficient data") + value = struct.unpack(">I", data[:4])[0] + return value, data[4:] + + +def read_bytes(data): + size, data = read_int4(data) + if len(data) < size: + raise ValueError("Insufficient data (V)") + return data[:size], data[size:] + + +def read_string(data): + s, d = read_bytes(data) + return tostr(s), d + + +def check_padding(pad): + for v, x in enumerate(pad): + if bord(x) != ((v + 1) & 0xFF): + raise ValueError("Incorrect padding") + + +def import_openssh_private_generic(data, password): + # https://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.key?annotate=HEAD + # https://github.com/openssh/openssh-portable/blob/master/sshkey.c + # https://coolaj86.com/articles/the-openssh-private-key-format/ + # https://coolaj86.com/articles/the-ssh-public-key-format/ + + if not data.startswith(b'openssh-key-v1\x00'): + raise ValueError("Incorrect magic value") + data = data[15:] + + ciphername, data = read_string(data) + kdfname, data = read_string(data) + kdfoptions, data = read_bytes(data) + number_of_keys, data = read_int4(data) + + if number_of_keys != 1: + raise ValueError("We only handle 1 key at a time") + + _, data = read_string(data) # Public key + encrypted, data = read_bytes(data) + if data: + raise ValueError("Too much data") + + if len(encrypted) % 8 != 0: + raise ValueError("Incorrect payload length") + + # Decrypt if necessary + if ciphername == 'none': + decrypted = encrypted + else: + if (ciphername, kdfname) != ('aes256-ctr', 'bcrypt'): + raise ValueError("Unsupported encryption scheme %s/%s" % (ciphername, kdfname)) + + salt, kdfoptions = read_bytes(kdfoptions) + iterations, kdfoptions = read_int4(kdfoptions) + + if len(salt) != 16: + raise ValueError("Incorrect salt length") + if kdfoptions: + raise ValueError("Too much data in kdfoptions") + + pwd_sha512 = SHA512.new(password).digest() + # We need 32+16 = 48 bytes, therefore 2 bcrypt outputs are sufficient + stripes = [] + constant = b"OxychromaticBlowfishSwatDynamite" + for count in range(1, 3): + salt_sha512 = SHA512.new(salt + struct.pack(">I", count)).digest() + out_le = _bcrypt_hash(pwd_sha512, 6, salt_sha512, constant, False) + out = struct.pack("IIIIIIII", out_le)) + acc = bytearray(out) + for _ in range(1, iterations): + out_le = _bcrypt_hash(pwd_sha512, 6, SHA512.new(out).digest(), constant, False) + out = struct.pack("IIIIIIII", out_le)) + strxor(acc, out, output=acc) + stripes.append(acc[:24]) + + result = b"".join([bchr(a)+bchr(b) for (a, b) in zip(*stripes)]) + + cipher = AES.new(result[:32], + AES.MODE_CTR, + nonce=b"", + initial_value=result[32:32+16]) + decrypted = cipher.decrypt(encrypted) + + checkint1, decrypted = read_int4(decrypted) + checkint2, decrypted = read_int4(decrypted) + if checkint1 != checkint2: + raise ValueError("Incorrect checksum") + ssh_name, decrypted = read_string(decrypted) + + return ssh_name, decrypted diff --git a/env/Lib/site-packages/Crypto/PublicKey/_openssh.pyi b/env/Lib/site-packages/Crypto/PublicKey/_openssh.pyi new file mode 100644 index 0000000..15f3677 --- /dev/null +++ b/env/Lib/site-packages/Crypto/PublicKey/_openssh.pyi @@ -0,0 +1,7 @@ +from typing import Tuple + +def read_int4(data: bytes) -> Tuple[int, bytes]: ... +def read_bytes(data: bytes) -> Tuple[bytes, bytes]: ... +def read_string(data: bytes) -> Tuple[str, bytes]: ... +def check_padding(pad: bytes) -> None: ... +def import_openssh_private_generic(data: bytes, password: bytes) -> Tuple[str, bytes]: ... diff --git a/env/Lib/site-packages/Crypto/Random/__init__.py b/env/Lib/site-packages/Crypto/Random/__init__.py new file mode 100644 index 0000000..0f83a07 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Random/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Random/__init__.py : PyCrypto random number generation +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['new', 'get_random_bytes'] + +from os import urandom + +class _UrandomRNG(object): + + def read(self, n): + """Return a random byte string of the desired size.""" + return urandom(n) + + def flush(self): + """Method provided for backward compatibility only.""" + pass + + def reinit(self): + """Method provided for backward compatibility only.""" + pass + + def close(self): + """Method provided for backward compatibility only.""" + pass + + +def new(*args, **kwargs): + """Return a file-like object that outputs cryptographically random bytes.""" + return _UrandomRNG() + + +def atfork(): + pass + + +#: Function that returns a random byte string of the desired size. +get_random_bytes = urandom + diff --git a/env/Lib/site-packages/Crypto/Random/__init__.pyi b/env/Lib/site-packages/Crypto/Random/__init__.pyi new file mode 100644 index 0000000..ddc5b9b --- /dev/null +++ b/env/Lib/site-packages/Crypto/Random/__init__.pyi @@ -0,0 +1,19 @@ +from typing import Any + +__all__ = ['new', 'get_random_bytes'] + +from os import urandom + +class _UrandomRNG(object): + + def read(self, n: int) -> bytes:... + def flush(self) -> None: ... + def reinit(self) -> None: ... + def close(self) -> None: ... + +def new(*args: Any, **kwargs: Any) -> _UrandomRNG: ... + +def atfork() -> None: ... + +get_random_bytes = urandom + diff --git a/env/Lib/site-packages/Crypto/Random/random.py b/env/Lib/site-packages/Crypto/Random/random.py new file mode 100644 index 0000000..5389b3b --- /dev/null +++ b/env/Lib/site-packages/Crypto/Random/random.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# +# Random/random.py : Strong alternative for the standard 'random' module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +from Crypto import Random + +from Crypto.Util.py3compat import is_native_int + +class StrongRandom(object): + def __init__(self, rng=None, randfunc=None): + if randfunc is None and rng is None: + self._randfunc = None + elif randfunc is not None and rng is None: + self._randfunc = randfunc + elif randfunc is None and rng is not None: + self._randfunc = rng.read + else: + raise ValueError("Cannot specify both 'rng' and 'randfunc'") + + def getrandbits(self, k): + """Return an integer with k random bits.""" + + if self._randfunc is None: + self._randfunc = Random.new().read + mask = (1 << k) - 1 + return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) + + def randrange(self, *args): + """randrange([start,] stop[, step]): + Return a randomly-selected element from range(start, stop, step).""" + if len(args) == 3: + (start, stop, step) = args + elif len(args) == 2: + (start, stop) = args + step = 1 + elif len(args) == 1: + (stop,) = args + start = 0 + step = 1 + else: + raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) + if (not is_native_int(start) or not is_native_int(stop) or not + is_native_int(step)): + raise TypeError("randrange requires integer arguments") + if step == 0: + raise ValueError("randrange step argument must not be zero") + + num_choices = ceil_div(stop - start, step) + if num_choices < 0: + num_choices = 0 + if num_choices < 1: + raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) + + # Pick a random number in the range of possible numbers + r = num_choices + while r >= num_choices: + r = self.getrandbits(size(num_choices)) + + return start + (step * r) + + def randint(self, a, b): + """Return a random integer N such that a <= N <= b.""" + if not is_native_int(a) or not is_native_int(b): + raise TypeError("randint requires integer arguments") + N = self.randrange(a, b+1) + assert a <= N <= b + return N + + def choice(self, seq): + """Return a random element from a (non-empty) sequence. + + If the seqence is empty, raises IndexError. + """ + if len(seq) == 0: + raise IndexError("empty sequence") + return seq[self.randrange(len(seq))] + + def shuffle(self, x): + """Shuffle the sequence in place.""" + # Fisher-Yates shuffle. O(n) + # See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle + # Working backwards from the end of the array, we choose a random item + # from the remaining items until all items have been chosen. + for i in range(len(x)-1, 0, -1): # iterate from len(x)-1 downto 1 + j = self.randrange(0, i+1) # choose random j such that 0 <= j <= i + x[i], x[j] = x[j], x[i] # exchange x[i] and x[j] + + def sample(self, population, k): + """Return a k-length list of unique elements chosen from the population sequence.""" + + num_choices = len(population) + if k > num_choices: + raise ValueError("sample larger than population") + + retval = [] + selected = {} # we emulate a set using a dict here + for i in range(k): + r = None + while r is None or r in selected: + r = self.randrange(num_choices) + retval.append(population[r]) + selected[r] = 1 + return retval + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample + +# These are at the bottom to avoid problems with recursive imports +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes, size + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/Random/random.pyi b/env/Lib/site-packages/Crypto/Random/random.pyi new file mode 100644 index 0000000..f873c4a --- /dev/null +++ b/env/Lib/site-packages/Crypto/Random/random.pyi @@ -0,0 +1,20 @@ +from typing import Callable, Tuple, Union, Sequence, Any, Optional + +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +class StrongRandom(object): + def __init__(self, rng: Optional[Any]=None, randfunc: Optional[Callable]=None) -> None: ... # TODO What is rng? + def getrandbits(self, k: int) -> int: ... + def randrange(self, start: int, stop: int = ..., step: int = ...) -> int: ... + def randint(self, a: int, b: int) -> int: ... + def choice(self, seq: Sequence) -> object: ... + def shuffle(self, x: Sequence) -> None: ... + def sample(self, population: Sequence, k: int) -> list: ... + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py new file mode 100644 index 0000000..05fc139 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/__init__.py: Self-test for cipher modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for cipher modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Salsa20; tests += test_Salsa20.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ChaCha20; tests += test_ChaCha20.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ChaCha20_Poly1305; tests += test_ChaCha20_Poly1305.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OCB; tests += test_OCB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CBC; tests += test_CBC.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CFB; tests += test_CFB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OpenPGP; tests += test_OpenPGP.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OFB; tests += test_OFB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CTR; tests += test_CTR.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CCM; tests += test_CCM.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_EAX; tests += test_EAX.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_GCM; tests += test_GCM.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_SIV; tests += test_SIV.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/common.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/common.py new file mode 100644 index 0000000..4ba2c33 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/common.py @@ -0,0 +1,512 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +import unittest +from binascii import a2b_hex, b2a_hex, hexlify + +from Crypto.Util.py3compat import b +from Crypto.Util.strxor import strxor_c + +class _NoDefault: pass # sentinel object +def _extract(d, k, default=_NoDefault): + """Get an item from a dictionary, and remove it from the dictionary.""" + try: + retval = d[k] + except KeyError: + if default is _NoDefault: + raise + return default + del d[k] + return retval + +# Generic cipher test case +class CipherSelfTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def shortDescription(self): + return self.description + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def isMode(self, name): + if not hasattr(self.module, "MODE_"+name): + return False + return self.mode == getattr(self.module, "MODE_"+name) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ a2b_hex(b(x)) for x in self.assoc_data] + + ct = None + pt = None + + # + # Repeat the same encryption or decryption twice and verify + # that the result is always the same + # + for i in range(2): + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ctX = b2a_hex(cipher.encrypt(plaintext)) + ptX = b2a_hex(decipher.decrypt(ciphertext)) + + if ct: + self.assertEqual(ct, ctX) + self.assertEqual(pt, ptX) + ct, pt = ctX, ptX + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(a2b_hex(self.mac)) + +class CipherStreamingSelfTest(CipherSelfTest): + + def shortDescription(self): + desc = self.module_name + if self.mode is not None: + desc += " in %s mode" % (self.mode_name,) + return "%s should behave like a stream cipher" % (desc,) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # The cipher should work like a stream cipher + + # Test counter mode encryption, 3 bytes at a time + ct3 = [] + cipher = self._new() + for i in range(0, len(plaintext), 3): + ct3.append(cipher.encrypt(plaintext[i:i+3])) + ct3 = b2a_hex(b("").join(ct3)) + self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) + + # Test counter mode decryption, 3 bytes at a time + pt3 = [] + cipher = self._new() + for i in range(0, len(ciphertext), 3): + pt3.append(cipher.encrypt(ciphertext[i:i+3])) + # PY3K: This is meant to be text, do not change to bytes (data) + pt3 = b2a_hex(b("").join(pt3)) + self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) + + +class RoundtripTest(unittest.TestCase): + def __init__(self, module, params): + from Crypto import Random + unittest.TestCase.__init__(self) + self.module = module + self.iv = Random.get_random_bytes(module.block_size) + self.key = b(params['key']) + self.plaintext = 100 * b(params['plaintext']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) + + def runTest(self): + + ## ECB mode + mode = self.module.MODE_ECB + encryption_cipher = self.module.new(a2b_hex(self.key), mode) + ciphertext = encryption_cipher.encrypt(self.plaintext) + decryption_cipher = self.module.new(a2b_hex(self.key), mode) + decrypted_plaintext = decryption_cipher.decrypt(ciphertext) + self.assertEqual(self.plaintext, decrypted_plaintext) + + +class IVLengthTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), + self.module.MODE_ECB, b("")) + + def _dummy_counter(self): + return "\0" * self.module.block_size + + +class NoDefaultECBTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key)) + + +class BlockSizeTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = a2b_hex(b(params['key'])) + + def runTest(self): + cipher = self.module.new(self.key, self.module.MODE_ECB) + self.assertEqual(cipher.block_size, self.module.block_size) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can use bytearray's for encrypting and decrypting""" + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def runTest(self): + + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ bytearray(a2b_hex(b(x))) for x in self.assoc_data] + + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ct = b2a_hex(cipher.encrypt(bytearray(plaintext))) + pt = b2a_hex(decipher.decrypt(bytearray(ciphertext))) + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(bytearray(a2b_hex(self.mac))) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can use memoryviews for encrypting and decrypting""" + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def runTest(self): + + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ memoryview(a2b_hex(b(x))) for x in self.assoc_data] + + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ct = b2a_hex(cipher.encrypt(memoryview(plaintext))) + pt = b2a_hex(decipher.decrypt(memoryview(ciphertext))) + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(memoryview(a2b_hex(self.mac))) + + +def make_block_tests(module, module_name, test_data, additional_params=dict()): + tests = [] + extra_tests_added = False + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary with + # - plaintext + # - ciphertext + # - key + # - mode (default is ECB) + # - (optionally) description + # - (optionally) any other parameter that this cipher mode requires + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + if not "mode" in params: + params["mode"] = "ECB" + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_mode = _extract(p2, 'mode') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif p_mode == 'ECB' and not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + params.update(additional_params) + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + RoundtripTest(module, params), + IVLengthTest(module, params), + NoDefaultECBTest(module, params), + ByteArrayTest(module, params), + BlockSizeTest(module, params), + ] + extra_tests_added = True + + # Add the current test to the test suite + tests.append(CipherSelfTest(module, params)) + + return tests + +def make_stream_tests(module, module_name, test_data): + tests = [] + extra_tests_added = False + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + ByteArrayTest(module, params), + ] + + import sys + if sys.version[:3] != '2.6': + tests.append(MemoryviewTest(module, params)) + extra_tests_added = True + + # Add the test to the test suite + tests.append(CipherSelfTest(module, params)) + tests.append(CipherStreamingSelfTest(module, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py new file mode 100644 index 0000000..116deec --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py @@ -0,0 +1,1351 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/AES.py: Self-test for the AES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.AES""" + +from __future__ import print_function + +import unittest +from Crypto.Hash import SHA256 +from Crypto.Cipher import AES +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. +test_data = [ + # FIPS PUB 197 test vectors + # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + + ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', + '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), + + ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', + '000102030405060708090a0b0c0d0e0f1011121314151617', + 'FIPS 197 C.2 (AES-192)'), + + ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'FIPS 197 C.3 (AES-256)'), + + # Rijndael128 test vectors + # Downloaded 2008-09-13 from + # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz + + # ecb_tbl.txt, KEYSIZE=128 + ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=1'), + ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=2'), + ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=3'), + ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=4'), + ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=5'), + ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=6'), + ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=7'), + ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=8'), + ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=9'), + ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=10'), + ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=11'), + ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=12'), + ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=13'), + ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=14'), + ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=15'), + ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=16'), + ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=17'), + ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=18'), + ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=19'), + ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=20'), + ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=21'), + ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=22'), + ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=23'), + ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=24'), + ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=25'), + ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=26'), + ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=27'), + ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=28'), + ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=29'), + ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=30'), + ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=31'), + ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=32'), + ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=33'), + ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=34'), + ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=35'), + ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=36'), + ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=37'), + ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=38'), + ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=39'), + ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=40'), + ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=41'), + ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=42'), + ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=43'), + ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=44'), + ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=45'), + ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=46'), + ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=47'), + ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=48'), + ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=49'), + ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=50'), + ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=51'), + ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=52'), + ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=53'), + ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=54'), + ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=55'), + ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=56'), + ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=57'), + ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=58'), + ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=59'), + ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=60'), + ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=61'), + ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=62'), + ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=63'), + ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=64'), + ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=65'), + ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=66'), + ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=68'), + ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=69'), + ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', + '18191a1b1d1e1f20222324252728292a', + 'ecb-tbl-128: I=70'), + ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=71'), + ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=72'), + ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=73'), + ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=74'), + ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=75'), + ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', + '90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-128: I=76'), + ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=77'), + ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-128: I=78'), + ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-128: I=79'), + ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=80'), + ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', + 'f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-128: I=81'), + ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=82'), + ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', + '1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-128: I=83'), + ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', + '30313233353637383a3b3c3d3f404142', + 'ecb-tbl-128: I=84'), + ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', + '44454647494a4b4c4e4f505153545556', + 'ecb-tbl-128: I=85'), + ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', + '58595a5b5d5e5f60626364656768696a', + 'ecb-tbl-128: I=86'), + ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=87'), + ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=88'), + ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=89'), + ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=90'), + ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=91'), + ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=92'), + ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=93'), + ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=94'), + ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=95'), + ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=96'), + ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=97'), + ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=98'), + ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=99'), + ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=100'), + ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=101'), + ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=102'), + ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=103'), + ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=105'), + ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=106'), + ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=107'), + ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=108'), + ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=109'), + ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=110'), + ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=111'), + ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=112'), + ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=113'), + ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=114'), + ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=115'), + ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=116'), + ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=117'), + ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=118'), + ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=119'), + ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=120'), + ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=121'), + ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=122'), + ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=123'), + ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=124'), + ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=125'), + ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=126'), + ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=127'), + ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=128'), + + # ecb_tbl.txt, KEYSIZE=192 + ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=1'), + ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=2'), + ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=3'), + ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=4'), + ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=5'), + ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=6'), + ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=7'), + ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=8'), + ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=9'), + ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=10'), + ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=11'), + ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=12'), + ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=13'), + ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=14'), + ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', + 'ecb-tbl-192: I=15'), + ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', + 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-192: I=16'), + ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', + 'ecb-tbl-192: I=17'), + ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', + 'fefe01010304050608090a0b0d0e0f10121314151718191a', + 'ecb-tbl-192: I=18'), + ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', + '1c1d1e1f21222324262728292b2c2d2e3031323335363738', + 'ecb-tbl-192: I=19'), + ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', + '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-192: I=20'), + ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', + 'ecb-tbl-192: I=21'), + ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', + '767778797b7c7d7e80818283858687888a8b8c8d8f909192', + 'ecb-tbl-192: I=22'), + ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', + '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', + 'ecb-tbl-192: I=23'), + ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', + 'ecb-tbl-192: I=24'), + ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', + '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-192: I=25'), + ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', + 'ecb-tbl-192: I=26'), + ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', + '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', + 'ecb-tbl-192: I=27'), + ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', + 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-192: I=28'), + ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', + 'ecb-tbl-192: I=29'), + ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', + '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-192: I=30'), + ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', + 'ecb-tbl-192: I=31'), + ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', + '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-192: I=32'), + ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', + '464748494b4c4d4e50515253555657585a5b5c5d5f606162', + 'ecb-tbl-192: I=33'), + ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', + '828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-192: I=34'), + ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', + 'ecb-tbl-192: I=35'), + ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', + 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-192: I=36'), + ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', + 'ecb-tbl-192: I=37'), + ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', + 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-192: I=38'), + ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', + 'ecb-tbl-192: I=39'), + ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', + '363738393b3c3d3e40414243454647484a4b4c4d4f505152', + 'ecb-tbl-192: I=40'), + ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', + '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', + 'ecb-tbl-192: I=41'), + ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', + '727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-192: I=42'), + ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', + 'ecb-tbl-192: I=43'), + ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', + 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-192: I=44'), + ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', + 'ecb-tbl-192: I=45'), + ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', + 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-192: I=46'), + ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', + 'ecb-tbl-192: I=47'), + ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', + '262728292b2c2d2e30313233353637383a3b3c3d3f404142', + 'ecb-tbl-192: I=48'), + ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', + '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', + 'ecb-tbl-192: I=49'), + ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', + '626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-192: I=50'), + ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c', + 'ecb-tbl-192: I=51'), + ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', + '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-192: I=52'), + ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', + 'ecb-tbl-192: I=53'), + ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', + 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-192: I=54'), + ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', + 'ecb-tbl-192: I=55'), + ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', + '161718191b1c1d1e20212223252627282a2b2c2d2f303132', + 'ecb-tbl-192: I=56'), + ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', + '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', + 'ecb-tbl-192: I=57'), + ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', + '525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-192: I=58'), + ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', + '70717273757677787a7b7c7d7f80818284858687898a8b8c', + 'ecb-tbl-192: I=59'), + ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', + '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-192: I=60'), + ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', + 'ecb-tbl-192: I=61'), + ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', + 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-192: I=62'), + ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', + 'ecb-tbl-192: I=63'), + ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', + '060708090b0c0d0e10111213151617181a1b1c1d1f202122', + 'ecb-tbl-192: I=64'), + ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', + '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', + 'ecb-tbl-192: I=65'), + ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', + '424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-192: I=66'), + ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c', + 'ecb-tbl-192: I=67'), + ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', + '7e7f80818384858688898a8b8d8e8f90929394959798999a', + 'ecb-tbl-192: I=68'), + ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', + 'ecb-tbl-192: I=69'), + ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', + 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-192: I=70'), + ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', + 'ecb-tbl-192: I=71'), + ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', + 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', + 'ecb-tbl-192: I=72'), + ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', + '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', + 'ecb-tbl-192: I=73'), + ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', + '323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-192: I=74'), + ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', + '50515253555657585a5b5c5d5f60616264656667696a6b6c', + 'ecb-tbl-192: I=75'), + ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', + '6e6f70717374757678797a7b7d7e7f80828384858788898a', + 'ecb-tbl-192: I=76'), + ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', + '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', + 'ecb-tbl-192: I=77'), + ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', + 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-192: I=78'), + ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', + 'ecb-tbl-192: I=79'), + ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', + 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-192: I=80'), + ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', + '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', + 'ecb-tbl-192: I=81'), + ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', + '222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-192: I=82'), + ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', + '40414243454647484a4b4c4d4f50515254555657595a5b5c', + 'ecb-tbl-192: I=83'), + ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', + '5e5f60616364656668696a6b6d6e6f70727374757778797a', + 'ecb-tbl-192: I=84'), + ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', + '7c7d7e7f81828384868788898b8c8d8e9091929395969798', + 'ecb-tbl-192: I=85'), + ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', + '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-192: I=86'), + ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', + 'ecb-tbl-192: I=87'), + ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', + 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-192: I=88'), + ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', + 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', + 'ecb-tbl-192: I=89'), + ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', + '121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-192: I=90'), + ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', + '30313233353637383a3b3c3d3f40414244454647494a4b4c', + 'ecb-tbl-192: I=91'), + ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', + '4e4f50515354555658595a5b5d5e5f60626364656768696a', + 'ecb-tbl-192: I=92'), + ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', + '6c6d6e6f71727374767778797b7c7d7e8081828385868788', + 'ecb-tbl-192: I=93'), + ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', + '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-192: I=94'), + ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', + 'ecb-tbl-192: I=95'), + ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', + 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-192: I=96'), + ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', + 'ecb-tbl-192: I=97'), + ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', + '020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-192: I=98'), + ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', + '20212223252627282a2b2c2d2f30313234353637393a3b3c', + 'ecb-tbl-192: I=99'), + ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', + '3e3f40414344454648494a4b4d4e4f50525354555758595a', + 'ecb-tbl-192: I=100'), + ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', + '5c5d5e5f61626364666768696b6c6d6e7071727375767778', + 'ecb-tbl-192: I=101'), + ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', + '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-192: I=102'), + ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', + 'ecb-tbl-192: I=103'), + ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', + 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-192: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', + 'ecb-tbl-192: I=105'), + ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', + 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-192: I=106'), + ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', + '10111213151617181a1b1c1d1f20212224252627292a2b2c', + 'ecb-tbl-192: I=107'), + ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', + '2e2f30313334353638393a3b3d3e3f40424344454748494a', + 'ecb-tbl-192: I=108'), + ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', + '4c4d4e4f51525354565758595b5c5d5e6061626365666768', + 'ecb-tbl-192: I=109'), + ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', + '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-192: I=110'), + ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', + 'ecb-tbl-192: I=111'), + ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', + 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-192: I=112'), + ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', + 'ecb-tbl-192: I=113'), + ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', + 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-192: I=114'), + ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=115'), + ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=116'), + ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=117'), + ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=118'), + ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=119'), + ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=120'), + ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=121'), + ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=122'), + ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=123'), + ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=124'), + ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=125'), + ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=126'), + ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=127'), + ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=128'), + + # ecb_tbl.txt, KEYSIZE=256 + ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=1'), + ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=2'), + ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=3'), + ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=4'), + ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=5'), + ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=6'), + ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=7'), + ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=8'), + ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=9'), + ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=10'), + ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=11'), + ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=12'), + ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=13'), + ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=14'), + ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=15'), + ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=16'), + ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=17'), + ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=18'), + ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=19'), + ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=20'), + ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=21'), + ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=22'), + ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=23'), + ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=24'), + ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=25'), + ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=26'), + ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=27'), + ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=28'), + ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=29'), + ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=30'), + ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=31'), + ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=32'), + ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=33'), + ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=34'), + ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=35'), + ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=36'), + ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=37'), + ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=38'), + ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=39'), + ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=40'), + ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=41'), + ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=42'), + ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=43'), + ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=44'), + ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=45'), + ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=46'), + ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=47'), + ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=48'), + ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=49'), + ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=50'), + ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=51'), + ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=52'), + ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=53'), + ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=54'), + ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=55'), + ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=56'), + ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=57'), + ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=58'), + ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=59'), + ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=60'), + ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=61'), + ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=62'), + ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=63'), + ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=64'), + ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=65'), + ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=66'), + ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=68'), + ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=69'), + ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=70'), + ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=71'), + ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=72'), + ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=73'), + ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=74'), + ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=75'), + ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=76'), + ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=77'), + ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=78'), + ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=79'), + ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=80'), + ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=81'), + ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=82'), + ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=83'), + ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=84'), + ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=85'), + ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=86'), + ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=87'), + ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=88'), + ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=89'), + ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=90'), + ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=91'), + ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=92'), + ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=93'), + ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=94'), + ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=95'), + ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=96'), + ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=97'), + ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=98'), + ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=99'), + ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=100'), + ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=101'), + ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=102'), + ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=103'), + ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=104'), + ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=105'), + ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=106'), + ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=107'), + ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=108'), + ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=109'), + ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=110'), + ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=111'), + ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=112'), + ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=113'), + ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=114'), + ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=115'), + ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=116'), + ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=117'), + ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=118'), + ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=119'), + ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=120'), + ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=121'), + ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=122'), + ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=123'), + ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=124'), + ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=125'), + ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=126'), + ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=127'), + ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=128'), + + # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ + '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.1.1, ECB and AES-128'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ + 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.1.3, ECB and AES-192'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ + 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.1.3, ECB and AES-256'), + +] + +test_data_8_lanes = [] +for td in test_data: + test_data_8_lanes.append((td[0] * 8, td[1] * 8, td[2], td[3])) +test_data += test_data_8_lanes + +class TestMultipleBlocks(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt data which is 8*2+4 bytes long, so as to trigger (for the + # AESNI variant) both the path that parallelizes 8 lanes and the one + # that processes data serially + + tvs = [ + (b'a' * 16, 'c0b27011eb15bf144d2fc9fae80ea16d4c231cb230416c5fac02e6835ad9d7d0'), + (b'a' * 24, 'df8435ce361a78c535b41dcb57da952abbf9ee5954dc6fbcd75fd00fa626915d'), + (b'a' * 32, '211402de6c80db1f92ba255881178e1f70783b8cfd3b37808205e48b80486cd8') + ] + + for key, expected in tvs: + + cipher = AES.new(key, AES.MODE_ECB, use_aesni=self.use_aesni) + h = SHA256.new() + + pt = b"".join([ tobytes('{0:016x}'.format(x)) for x in range(20) ]) + ct = cipher.encrypt(pt) + self.assertEqual(SHA256.new(ct).hexdigest(), expected) + + +class TestIncompleteBlocks(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt data with length not multiple of 16 bytes + + cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) + + for msg_len in range(1, 16): + self.assertRaises(ValueError, cipher.encrypt, b'1' * msg_len) + self.assertRaises(ValueError, cipher.encrypt, b'1' * (msg_len+16)) + self.assertRaises(ValueError, cipher.decrypt, b'1' * msg_len) + self.assertRaises(ValueError, cipher.decrypt, b'1' * (msg_len+16)) + + self.assertEqual(cipher.encrypt(b''), b'') + self.assertEqual(cipher.decrypt(b''), b'') + + +class TestOutput(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from Crypto.Util import _cpu_features + from .common import make_block_tests + + tests = make_block_tests(AES, "AES", test_data, {'use_aesni': False}) + tests += [ TestMultipleBlocks(False) ] + tests += [ TestIncompleteBlocks(False) ] + if _cpu_features.have_aes_ni(): + # Run tests with AES-NI instructions if they are available. + tests += make_block_tests(AES, "AESNI", test_data, {'use_aesni': True}) + tests += [ TestMultipleBlocks(True) ] + tests += [ TestIncompleteBlocks(True) ] + tests += [ TestOutput(True) ] + else: + print("Skipping AESNI tests") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py new file mode 100644 index 0000000..fd9448c --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC2""" + +import unittest + +from Crypto.Util.py3compat import b, bchr + +from Crypto.Cipher import ARC2 + +# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. +test_data = [ + # Test vectors from RFC 2268 + + # 63-bit effective key length + ('0000000000000000', 'ebb773f993278eff', '0000000000000000', + 'RFC2268-1', dict(effective_keylen=63)), + + # 64-bit effective key length + ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', + 'RFC2268-2', dict(effective_keylen=64)), + ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', + 'RFC2268-3', dict(effective_keylen=64)), + #('0000000000000000', '61a8a244adacccf0', '88', + # 'RFC2268-4', dict(effective_keylen=64)), + ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', + 'RFC2268-5', dict(effective_keylen=64)), + ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', + 'RFC2268-6', dict(effective_keylen=64)), + + # 128-bit effective key length + ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', + "RFC2268-7", dict(effective_keylen=128)), + ('0000000000000000', '5b78d3a43dfff1f1', + '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', + "RFC2268-8", dict(effective_keylen=129)), + + # Test vectors from PyCrypto 2.0.1's testdata.py + # 1024-bit effective key length + ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', + 'PCTv201-0'), + ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', + 'PCTv201-1'), + ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', + 'PCTv201-2'), + ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', + 'PCTv201-3'), + ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', + 'PCTv201-4'), + ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', + 'PCTv201-5'), + ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', + 'PCTv201-6'), + ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', + 'PCTv201-7'), + ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-8'), + ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-9'), + ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-10'), + ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-11'), + ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', + 'PCTv201-12'), + ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', + 'PCTv201-13'), + ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', + 'PCTv201-14'), + ('0011223344556677', '584644c34503122c', '53e5ffe553', + 'PCTv201-15'), +] + +class BufferOverflowTest(unittest.TestCase): + # Test a buffer overflow found in older versions of PyCrypto + + def runTest(self): + """ARC2 with keylength > 128""" + key = b("x") * 16384 + self.assertRaises(ValueError, ARC2.new, key, ARC2.MODE_ECB) + +class KeyLength(unittest.TestCase): + + def runTest(self): + ARC2.new(b'\x00' * 16, ARC2.MODE_ECB, effective_keylen=40) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 4, ARC2.MODE_ECB) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 129, ARC2.MODE_ECB) + + self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, + effective_keylen=39) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, + effective_keylen=1025) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = ARC2.new(b'4'*16, ARC2.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from Crypto.Cipher import ARC2 + from .common import make_block_tests + + tests = make_block_tests(ARC2, "ARC2", test_data) + tests.append(BufferOverflowTest()) + tests.append(KeyLength()) + tests += [TestOutput()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py new file mode 100644 index 0000000..856cf4a --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import * +from binascii import unhexlify + +from Crypto.Cipher import ARC4 + +# This is a list of (plaintext, ciphertext, key[, description]) tuples. +test_data = [ + # Test vectors from Eric Rescorla's message with the subject + # "RC4 compatibility testing", sent to the cipherpunks mailing list on + # September 13, 1994. + # http://cypherpunks.venona.com/date/1994/09/msg00420.html + + ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', + 'Test vector 0'), + + ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', + 'Test vector 1'), + + ('0000000000000000', 'de188941a3375d3a', '0000000000000000', + 'Test vector 2'), + + #('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', + # 'Test vector 3'), + + ('01' * 512, + '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' + + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' + + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' + + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' + + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' + + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' + + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' + + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' + + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' + + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' + + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' + + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' + + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' + + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' + + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' + + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', + '0123456789abcdef', + "Test vector 4"), +] + +class RFC6229_Tests(unittest.TestCase): + # Test vectors from RFC 6229. Each test vector is a tuple with two items: + # the ARC4 key and a dictionary. The dictionary has keystream offsets as keys + # and the 16-byte keystream starting at the relevant offset as value. + rfc6229_data = [ + # Page 3 + ( + '0102030405', + { + 0: 'b2 39 63 05 f0 3d c0 27 cc c3 52 4a 0a 11 18 a8', + 16: '69 82 94 4f 18 fc 82 d5 89 c4 03 a4 7a 0d 09 19', + 240: '28 cb 11 32 c9 6c e2 86 42 1d ca ad b8 b6 9e ae', + 256: '1c fc f6 2b 03 ed db 64 1d 77 df cf 7f 8d 8c 93', + 496: '42 b7 d0 cd d9 18 a8 a3 3d d5 17 81 c8 1f 40 41', + 512: '64 59 84 44 32 a7 da 92 3c fb 3e b4 98 06 61 f6', + 752: 'ec 10 32 7b de 2b ee fd 18 f9 27 76 80 45 7e 22', + 768: 'eb 62 63 8d 4f 0b a1 fe 9f ca 20 e0 5b f8 ff 2b', + 1008:'45 12 90 48 e6 a0 ed 0b 56 b4 90 33 8f 07 8d a5', + 1024:'30 ab bc c7 c2 0b 01 60 9f 23 ee 2d 5f 6b b7 df', + 1520:'32 94 f7 44 d8 f9 79 05 07 e7 0f 62 e5 bb ce ea', + 1536:'d8 72 9d b4 18 82 25 9b ee 4f 82 53 25 f5 a1 30', + 2032:'1e b1 4a 0c 13 b3 bf 47 fa 2a 0b a9 3a d4 5b 8b', + 2048:'cc 58 2f 8b a9 f2 65 e2 b1 be 91 12 e9 75 d2 d7', + 3056:'f2 e3 0f 9b d1 02 ec bf 75 aa ad e9 bc 35 c4 3c', + 3072:'ec 0e 11 c4 79 dc 32 9d c8 da 79 68 fe 96 56 81', + 4080:'06 83 26 a2 11 84 16 d2 1f 9d 04 b2 cd 1c a0 50', + 4096:'ff 25 b5 89 95 99 67 07 e5 1f bd f0 8b 34 d8 75' + } + ), + # Page 4 + ( + '01020304050607', + { + 0: '29 3f 02 d4 7f 37 c9 b6 33 f2 af 52 85 fe b4 6b', + 16: 'e6 20 f1 39 0d 19 bd 84 e2 e0 fd 75 20 31 af c1', + 240: '91 4f 02 53 1c 92 18 81 0d f6 0f 67 e3 38 15 4c', + 256: 'd0 fd b5 83 07 3c e8 5a b8 39 17 74 0e c0 11 d5', + 496: '75 f8 14 11 e8 71 cf fa 70 b9 0c 74 c5 92 e4 54', + 512: '0b b8 72 02 93 8d ad 60 9e 87 a5 a1 b0 79 e5 e4', + 752: 'c2 91 12 46 b6 12 e7 e7 b9 03 df ed a1 da d8 66', + 768: '32 82 8f 91 50 2b 62 91 36 8d e8 08 1d e3 6f c2', + 1008:'f3 b9 a7 e3 b2 97 bf 9a d8 04 51 2f 90 63 ef f1', + 1024:'8e cb 67 a9 ba 1f 55 a5 a0 67 e2 b0 26 a3 67 6f', + 1520:'d2 aa 90 2b d4 2d 0d 7c fd 34 0c d4 58 10 52 9f', + 1536:'78 b2 72 c9 6e 42 ea b4 c6 0b d9 14 e3 9d 06 e3', + 2032:'f4 33 2f d3 1a 07 93 96 ee 3c ee 3f 2a 4f f0 49', + 2048:'05 45 97 81 d4 1f da 7f 30 c1 be 7e 12 46 c6 23', + 3056:'ad fd 38 68 b8 e5 14 85 d5 e6 10 01 7e 3d d6 09', + 3072:'ad 26 58 1c 0c 5b e4 5f 4c ea 01 db 2f 38 05 d5', + 4080:'f3 17 2c ef fc 3b 3d 99 7c 85 cc d5 af 1a 95 0c', + 4096:'e7 4b 0b 97 31 22 7f d3 7c 0e c0 8a 47 dd d8 b8' + } + ), + ( + '0102030405060708', + { + 0: '97 ab 8a 1b f0 af b9 61 32 f2 f6 72 58 da 15 a8', + 16: '82 63 ef db 45 c4 a1 86 84 ef 87 e6 b1 9e 5b 09', + 240: '96 36 eb c9 84 19 26 f4 f7 d1 f3 62 bd df 6e 18', + 256: 'd0 a9 90 ff 2c 05 fe f5 b9 03 73 c9 ff 4b 87 0a', + 496: '73 23 9f 1d b7 f4 1d 80 b6 43 c0 c5 25 18 ec 63', + 512: '16 3b 31 99 23 a6 bd b4 52 7c 62 61 26 70 3c 0f', + 752: '49 d6 c8 af 0f 97 14 4a 87 df 21 d9 14 72 f9 66', + 768: '44 17 3a 10 3b 66 16 c5 d5 ad 1c ee 40 c8 63 d0', + 1008:'27 3c 9c 4b 27 f3 22 e4 e7 16 ef 53 a4 7d e7 a4', + 1024:'c6 d0 e7 b2 26 25 9f a9 02 34 90 b2 61 67 ad 1d', + 1520:'1f e8 98 67 13 f0 7c 3d 9a e1 c1 63 ff 8c f9 d3', + 1536:'83 69 e1 a9 65 61 0b e8 87 fb d0 c7 91 62 aa fb', + 2032:'0a 01 27 ab b4 44 84 b9 fb ef 5a bc ae 1b 57 9f', + 2048:'c2 cd ad c6 40 2e 8e e8 66 e1 f3 7b db 47 e4 2c', + 3056:'26 b5 1e a3 7d f8 e1 d6 f7 6f c3 b6 6a 74 29 b3', + 3072:'bc 76 83 20 5d 4f 44 3d c1 f2 9d da 33 15 c8 7b', + 4080:'d5 fa 5a 34 69 d2 9a aa f8 3d 23 58 9d b8 c8 5b', + 4096:'3f b4 6e 2c 8f 0f 06 8e dc e8 cd cd 7d fc 58 62' + } + ), + # Page 5 + ( + '0102030405060708090a', + { + 0: 'ed e3 b0 46 43 e5 86 cc 90 7d c2 18 51 70 99 02', + 16: '03 51 6b a7 8f 41 3b eb 22 3a a5 d4 d2 df 67 11', + 240: '3c fd 6c b5 8e e0 fd de 64 01 76 ad 00 00 04 4d', + 256: '48 53 2b 21 fb 60 79 c9 11 4c 0f fd 9c 04 a1 ad', + 496: '3e 8c ea 98 01 71 09 97 90 84 b1 ef 92 f9 9d 86', + 512: 'e2 0f b4 9b db 33 7e e4 8b 8d 8d c0 f4 af ef fe', + 752: '5c 25 21 ea cd 79 66 f1 5e 05 65 44 be a0 d3 15', + 768: 'e0 67 a7 03 19 31 a2 46 a6 c3 87 5d 2f 67 8a cb', + 1008:'a6 4f 70 af 88 ae 56 b6 f8 75 81 c0 e2 3e 6b 08', + 1024:'f4 49 03 1d e3 12 81 4e c6 f3 19 29 1f 4a 05 16', + 1520:'bd ae 85 92 4b 3c b1 d0 a2 e3 3a 30 c6 d7 95 99', + 1536:'8a 0f ed db ac 86 5a 09 bc d1 27 fb 56 2e d6 0a', + 2032:'b5 5a 0a 5b 51 a1 2a 8b e3 48 99 c3 e0 47 51 1a', + 2048:'d9 a0 9c ea 3c e7 5f e3 96 98 07 03 17 a7 13 39', + 3056:'55 22 25 ed 11 77 f4 45 84 ac 8c fa 6c 4e b5 fc', + 3072:'7e 82 cb ab fc 95 38 1b 08 09 98 44 21 29 c2 f8', + 4080:'1f 13 5e d1 4c e6 0a 91 36 9d 23 22 be f2 5e 3c', + 4096:'08 b6 be 45 12 4a 43 e2 eb 77 95 3f 84 dc 85 53' + } + ), + ( + '0102030405060708090a0b0c0d0e0f10', + { + 0: '9a c7 cc 9a 60 9d 1e f7 b2 93 28 99 cd e4 1b 97', + 16: '52 48 c4 95 90 14 12 6a 6e 8a 84 f1 1d 1a 9e 1c', + 240: '06 59 02 e4 b6 20 f6 cc 36 c8 58 9f 66 43 2f 2b', + 256: 'd3 9d 56 6b c6 bc e3 01 07 68 15 15 49 f3 87 3f', + 496: 'b6 d1 e6 c4 a5 e4 77 1c ad 79 53 8d f2 95 fb 11', + 512: 'c6 8c 1d 5c 55 9a 97 41 23 df 1d bc 52 a4 3b 89', + 752: 'c5 ec f8 8d e8 97 fd 57 fe d3 01 70 1b 82 a2 59', + 768: 'ec cb e1 3d e1 fc c9 1c 11 a0 b2 6c 0b c8 fa 4d', + 1008:'e7 a7 25 74 f8 78 2a e2 6a ab cf 9e bc d6 60 65', + 1024:'bd f0 32 4e 60 83 dc c6 d3 ce dd 3c a8 c5 3c 16', + 1520:'b4 01 10 c4 19 0b 56 22 a9 61 16 b0 01 7e d2 97', + 1536:'ff a0 b5 14 64 7e c0 4f 63 06 b8 92 ae 66 11 81', + 2032:'d0 3d 1b c0 3c d3 3d 70 df f9 fa 5d 71 96 3e bd', + 2048:'8a 44 12 64 11 ea a7 8b d5 1e 8d 87 a8 87 9b f5', + 3056:'fa be b7 60 28 ad e2 d0 e4 87 22 e4 6c 46 15 a3', + 3072:'c0 5d 88 ab d5 03 57 f9 35 a6 3c 59 ee 53 76 23', + 4080:'ff 38 26 5c 16 42 c1 ab e8 d3 c2 fe 5e 57 2b f8', + 4096:'a3 6a 4c 30 1a e8 ac 13 61 0c cb c1 22 56 ca cc' + } + ), + # Page 6 + ( + '0102030405060708090a0b0c0d0e0f101112131415161718', + { + 0: '05 95 e5 7f e5 f0 bb 3c 70 6e da c8 a4 b2 db 11', + 16: 'df de 31 34 4a 1a f7 69 c7 4f 07 0a ee 9e 23 26', + 240: 'b0 6b 9b 1e 19 5d 13 d8 f4 a7 99 5c 45 53 ac 05', + 256: '6b d2 37 8e c3 41 c9 a4 2f 37 ba 79 f8 8a 32 ff', + 496: 'e7 0b ce 1d f7 64 5a db 5d 2c 41 30 21 5c 35 22', + 512: '9a 57 30 c7 fc b4 c9 af 51 ff da 89 c7 f1 ad 22', + 752: '04 85 05 5f d4 f6 f0 d9 63 ef 5a b9 a5 47 69 82', + 768: '59 1f c6 6b cd a1 0e 45 2b 03 d4 55 1f 6b 62 ac', + 1008:'27 53 cc 83 98 8a fa 3e 16 88 a1 d3 b4 2c 9a 02', + 1024:'93 61 0d 52 3d 1d 3f 00 62 b3 c2 a3 bb c7 c7 f0', + 1520:'96 c2 48 61 0a ad ed fe af 89 78 c0 3d e8 20 5a', + 1536:'0e 31 7b 3d 1c 73 b9 e9 a4 68 8f 29 6d 13 3a 19', + 2032:'bd f0 e6 c3 cc a5 b5 b9 d5 33 b6 9c 56 ad a1 20', + 2048:'88 a2 18 b6 e2 ec e1 e6 24 6d 44 c7 59 d1 9b 10', + 3056:'68 66 39 7e 95 c1 40 53 4f 94 26 34 21 00 6e 40', + 3072:'32 cb 0a 1e 95 42 c6 b3 b8 b3 98 ab c3 b0 f1 d5', + 4080:'29 a0 b8 ae d5 4a 13 23 24 c6 2e 42 3f 54 b4 c8', + 4096:'3c b0 f3 b5 02 0a 98 b8 2a f9 fe 15 44 84 a1 68' + } + ), + ( + '0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + { + 0: 'ea a6 bd 25 88 0b f9 3d 3f 5d 1e 4c a2 61 1d 91', + 16: 'cf a4 5c 9f 7e 71 4b 54 bd fa 80 02 7c b1 43 80', + 240: '11 4a e3 44 de d7 1b 35 f2 e6 0f eb ad 72 7f d8', + 256: '02 e1 e7 05 6b 0f 62 39 00 49 64 22 94 3e 97 b6', + 496: '91 cb 93 c7 87 96 4e 10 d9 52 7d 99 9c 6f 93 6b', + 512: '49 b1 8b 42 f8 e8 36 7c be b5 ef 10 4b a1 c7 cd', + 752: '87 08 4b 3b a7 00 ba de 95 56 10 67 27 45 b3 74', + 768: 'e7 a7 b9 e9 ec 54 0d 5f f4 3b db 12 79 2d 1b 35', + 1008:'c7 99 b5 96 73 8f 6b 01 8c 76 c7 4b 17 59 bd 90', + 1024:'7f ec 5b fd 9f 9b 89 ce 65 48 30 90 92 d7 e9 58', + 1520:'40 f2 50 b2 6d 1f 09 6a 4a fd 4c 34 0a 58 88 15', + 1536:'3e 34 13 5c 79 db 01 02 00 76 76 51 cf 26 30 73', + 2032:'f6 56 ab cc f8 8d d8 27 02 7b 2c e9 17 d4 64 ec', + 2048:'18 b6 25 03 bf bc 07 7f ba bb 98 f2 0d 98 ab 34', + 3056:'8a ed 95 ee 5b 0d cb fb ef 4e b2 1d 3a 3f 52 f9', + 3072:'62 5a 1a b0 0e e3 9a 53 27 34 6b dd b0 1a 9c 18', + 4080:'a1 3a 7c 79 c7 e1 19 b5 ab 02 96 ab 28 c3 00 b9', + 4096:'f3 e4 c0 a2 e0 2d 1d 01 f7 f0 a7 46 18 af 2b 48' + } + ), + # Page 7 + ( + '833222772a', + { + 0: '80 ad 97 bd c9 73 df 8a 2e 87 9e 92 a4 97 ef da', + 16: '20 f0 60 c2 f2 e5 12 65 01 d3 d4 fe a1 0d 5f c0', + 240: 'fa a1 48 e9 90 46 18 1f ec 6b 20 85 f3 b2 0e d9', + 256: 'f0 da f5 ba b3 d5 96 83 98 57 84 6f 73 fb fe 5a', + 496: '1c 7e 2f c4 63 92 32 fe 29 75 84 b2 96 99 6b c8', + 512: '3d b9 b2 49 40 6c c8 ed ff ac 55 cc d3 22 ba 12', + 752: 'e4 f9 f7 e0 06 61 54 bb d1 25 b7 45 56 9b c8 97', + 768: '75 d5 ef 26 2b 44 c4 1a 9c f6 3a e1 45 68 e1 b9', + 1008:'6d a4 53 db f8 1e 82 33 4a 3d 88 66 cb 50 a1 e3', + 1024:'78 28 d0 74 11 9c ab 5c 22 b2 94 d7 a9 bf a0 bb', + 1520:'ad b8 9c ea 9a 15 fb e6 17 29 5b d0 4b 8c a0 5c', + 1536:'62 51 d8 7f d4 aa ae 9a 7e 4a d5 c2 17 d3 f3 00', + 2032:'e7 11 9b d6 dd 9b 22 af e8 f8 95 85 43 28 81 e2', + 2048:'78 5b 60 fd 7e c4 e9 fc b6 54 5f 35 0d 66 0f ab', + 3056:'af ec c0 37 fd b7 b0 83 8e b3 d7 0b cd 26 83 82', + 3072:'db c1 a7 b4 9d 57 35 8c c9 fa 6d 61 d7 3b 7c f0', + 4080:'63 49 d1 26 a3 7a fc ba 89 79 4f 98 04 91 4f dc', + 4096:'bf 42 c3 01 8c 2f 7c 66 bf de 52 49 75 76 81 15' + } + ), + ( + '1910833222772a', + { + 0: 'bc 92 22 db d3 27 4d 8f c6 6d 14 cc bd a6 69 0b', + 16: '7a e6 27 41 0c 9a 2b e6 93 df 5b b7 48 5a 63 e3', + 240: '3f 09 31 aa 03 de fb 30 0f 06 01 03 82 6f 2a 64', + 256: 'be aa 9e c8 d5 9b b6 81 29 f3 02 7c 96 36 11 81', + 496: '74 e0 4d b4 6d 28 64 8d 7d ee 8a 00 64 b0 6c fe', + 512: '9b 5e 81 c6 2f e0 23 c5 5b e4 2f 87 bb f9 32 b8', + 752: 'ce 17 8f c1 82 6e fe cb c1 82 f5 79 99 a4 61 40', + 768: '8b df 55 cd 55 06 1c 06 db a6 be 11 de 4a 57 8a', + 1008:'62 6f 5f 4d ce 65 25 01 f3 08 7d 39 c9 2c c3 49', + 1024:'42 da ac 6a 8f 9a b9 a7 fd 13 7c 60 37 82 56 82', + 1520:'cc 03 fd b7 91 92 a2 07 31 2f 53 f5 d4 dc 33 d9', + 1536:'f7 0f 14 12 2a 1c 98 a3 15 5d 28 b8 a0 a8 a4 1d', + 2032:'2a 3a 30 7a b2 70 8a 9c 00 fe 0b 42 f9 c2 d6 a1', + 2048:'86 26 17 62 7d 22 61 ea b0 b1 24 65 97 ca 0a e9', + 3056:'55 f8 77 ce 4f 2e 1d db bf 8e 13 e2 cd e0 fd c8', + 3072:'1b 15 56 cb 93 5f 17 33 37 70 5f bb 5d 50 1f c1', + 4080:'ec d0 e9 66 02 be 7f 8d 50 92 81 6c cc f2 c2 e9', + 4096:'02 78 81 fa b4 99 3a 1c 26 20 24 a9 4f ff 3f 61' + } + ), + # Page 8 + ( + '641910833222772a', + { + 0: 'bb f6 09 de 94 13 17 2d 07 66 0c b6 80 71 69 26', + 16: '46 10 1a 6d ab 43 11 5d 6c 52 2b 4f e9 36 04 a9', + 240: 'cb e1 ff f2 1c 96 f3 ee f6 1e 8f e0 54 2c bd f0', + 256: '34 79 38 bf fa 40 09 c5 12 cf b4 03 4b 0d d1 a7', + 496: '78 67 a7 86 d0 0a 71 47 90 4d 76 dd f1 e5 20 e3', + 512: '8d 3e 9e 1c ae fc cc b3 fb f8 d1 8f 64 12 0b 32', + 752: '94 23 37 f8 fd 76 f0 fa e8 c5 2d 79 54 81 06 72', + 768: 'b8 54 8c 10 f5 16 67 f6 e6 0e 18 2f a1 9b 30 f7', + 1008:'02 11 c7 c6 19 0c 9e fd 12 37 c3 4c 8f 2e 06 c4', + 1024:'bd a6 4f 65 27 6d 2a ac b8 f9 02 12 20 3a 80 8e', + 1520:'bd 38 20 f7 32 ff b5 3e c1 93 e7 9d 33 e2 7c 73', + 1536:'d0 16 86 16 86 19 07 d4 82 e3 6c da c8 cf 57 49', + 2032:'97 b0 f0 f2 24 b2 d2 31 71 14 80 8f b0 3a f7 a0', + 2048:'e5 96 16 e4 69 78 79 39 a0 63 ce ea 9a f9 56 d1', + 3056:'c4 7e 0d c1 66 09 19 c1 11 01 20 8f 9e 69 aa 1f', + 3072:'5a e4 f1 28 96 b8 37 9a 2a ad 89 b5 b5 53 d6 b0', + 4080:'6b 6b 09 8d 0c 29 3b c2 99 3d 80 bf 05 18 b6 d9', + 4096:'81 70 cc 3c cd 92 a6 98 62 1b 93 9d d3 8f e7 b9' + } + ), + ( + '8b37641910833222772a', + { + 0: 'ab 65 c2 6e dd b2 87 60 0d b2 fd a1 0d 1e 60 5c', + 16: 'bb 75 90 10 c2 96 58 f2 c7 2d 93 a2 d1 6d 29 30', + 240: 'b9 01 e8 03 6e d1 c3 83 cd 3c 4c 4d d0 a6 ab 05', + 256: '3d 25 ce 49 22 92 4c 55 f0 64 94 33 53 d7 8a 6c', + 496: '12 c1 aa 44 bb f8 7e 75 e6 11 f6 9b 2c 38 f4 9b', + 512: '28 f2 b3 43 4b 65 c0 98 77 47 00 44 c6 ea 17 0d', + 752: 'bd 9e f8 22 de 52 88 19 61 34 cf 8a f7 83 93 04', + 768: '67 55 9c 23 f0 52 15 84 70 a2 96 f7 25 73 5a 32', + 1008:'8b ab 26 fb c2 c1 2b 0f 13 e2 ab 18 5e ab f2 41', + 1024:'31 18 5a 6d 69 6f 0c fa 9b 42 80 8b 38 e1 32 a2', + 1520:'56 4d 3d ae 18 3c 52 34 c8 af 1e 51 06 1c 44 b5', + 1536:'3c 07 78 a7 b5 f7 2d 3c 23 a3 13 5c 7d 67 b9 f4', + 2032:'f3 43 69 89 0f cf 16 fb 51 7d ca ae 44 63 b2 dd', + 2048:'02 f3 1c 81 e8 20 07 31 b8 99 b0 28 e7 91 bf a7', + 3056:'72 da 64 62 83 22 8c 14 30 08 53 70 17 95 61 6f', + 3072:'4e 0a 8c 6f 79 34 a7 88 e2 26 5e 81 d6 d0 c8 f4', + 4080:'43 8d d5 ea fe a0 11 1b 6f 36 b4 b9 38 da 2a 68', + 4096:'5f 6b fc 73 81 58 74 d9 71 00 f0 86 97 93 57 d8' + } + ), + # Page 9 + ( + 'ebb46227c6cc8b37641910833222772a', + { + 0: '72 0c 94 b6 3e df 44 e1 31 d9 50 ca 21 1a 5a 30', + 16: 'c3 66 fd ea cf 9c a8 04 36 be 7c 35 84 24 d2 0b', + 240: 'b3 39 4a 40 aa bf 75 cb a4 22 82 ef 25 a0 05 9f', + 256: '48 47 d8 1d a4 94 2d bc 24 9d ef c4 8c 92 2b 9f', + 496: '08 12 8c 46 9f 27 53 42 ad da 20 2b 2b 58 da 95', + 512: '97 0d ac ef 40 ad 98 72 3b ac 5d 69 55 b8 17 61', + 752: '3c b8 99 93 b0 7b 0c ed 93 de 13 d2 a1 10 13 ac', + 768: 'ef 2d 67 6f 15 45 c2 c1 3d c6 80 a0 2f 4a db fe', + 1008:'b6 05 95 51 4f 24 bc 9f e5 22 a6 ca d7 39 36 44', + 1024:'b5 15 a8 c5 01 17 54 f5 90 03 05 8b db 81 51 4e', + 1520:'3c 70 04 7e 8c bc 03 8e 3b 98 20 db 60 1d a4 95', + 1536:'11 75 da 6e e7 56 de 46 a5 3e 2b 07 56 60 b7 70', + 2032:'00 a5 42 bb a0 21 11 cc 2c 65 b3 8e bd ba 58 7e', + 2048:'58 65 fd bb 5b 48 06 41 04 e8 30 b3 80 f2 ae de', + 3056:'34 b2 1a d2 ad 44 e9 99 db 2d 7f 08 63 f0 d9 b6', + 3072:'84 a9 21 8f c3 6e 8a 5f 2c cf be ae 53 a2 7d 25', + 4080:'a2 22 1a 11 b8 33 cc b4 98 a5 95 40 f0 54 5f 4a', + 4096:'5b be b4 78 7d 59 e5 37 3f db ea 6c 6f 75 c2 9b' + } + ), + ( + 'c109163908ebe51debb46227c6cc8b37641910833222772a', + { + 0: '54 b6 4e 6b 5a 20 b5 e2 ec 84 59 3d c7 98 9d a7', + 16: 'c1 35 ee e2 37 a8 54 65 ff 97 dc 03 92 4f 45 ce', + 240: 'cf cc 92 2f b4 a1 4a b4 5d 61 75 aa bb f2 d2 01', + 256: '83 7b 87 e2 a4 46 ad 0e f7 98 ac d0 2b 94 12 4f', + 496: '17 a6 db d6 64 92 6a 06 36 b3 f4 c3 7a 4f 46 94', + 512: '4a 5f 9f 26 ae ee d4 d4 a2 5f 63 2d 30 52 33 d9', + 752: '80 a3 d0 1e f0 0c 8e 9a 42 09 c1 7f 4e eb 35 8c', + 768: 'd1 5e 7d 5f fa aa bc 02 07 bf 20 0a 11 77 93 a2', + 1008:'34 96 82 bf 58 8e aa 52 d0 aa 15 60 34 6a ea fa', + 1024:'f5 85 4c db 76 c8 89 e3 ad 63 35 4e 5f 72 75 e3', + 1520:'53 2c 7c ec cb 39 df 32 36 31 84 05 a4 b1 27 9c', + 1536:'ba ef e6 d9 ce b6 51 84 22 60 e0 d1 e0 5e 3b 90', + 2032:'e8 2d 8c 6d b5 4e 3c 63 3f 58 1c 95 2b a0 42 07', + 2048:'4b 16 e5 0a bd 38 1b d7 09 00 a9 cd 9a 62 cb 23', + 3056:'36 82 ee 33 bd 14 8b d9 f5 86 56 cd 8f 30 d9 fb', + 3072:'1e 5a 0b 84 75 04 5d 9b 20 b2 62 86 24 ed fd 9e', + 4080:'63 ed d6 84 fb 82 62 82 fe 52 8f 9c 0e 92 37 bc', + 4096:'e4 dd 2e 98 d6 96 0f ae 0b 43 54 54 56 74 33 91' + } + ), + # Page 10 + ( + '1ada31d5cf688221c109163908ebe51debb46227c6cc8b37641910833222772a', + { + 0: 'dd 5b cb 00 18 e9 22 d4 94 75 9d 7c 39 5d 02 d3', + 16: 'c8 44 6f 8f 77 ab f7 37 68 53 53 eb 89 a1 c9 eb', + 240: 'af 3e 30 f9 c0 95 04 59 38 15 15 75 c3 fb 90 98', + 256: 'f8 cb 62 74 db 99 b8 0b 1d 20 12 a9 8e d4 8f 0e', + 496: '25 c3 00 5a 1c b8 5d e0 76 25 98 39 ab 71 98 ab', + 512: '9d cb c1 83 e8 cb 99 4b 72 7b 75 be 31 80 76 9c', + 752: 'a1 d3 07 8d fa 91 69 50 3e d9 d4 49 1d ee 4e b2', + 768: '85 14 a5 49 58 58 09 6f 59 6e 4b cd 66 b1 06 65', + 1008:'5f 40 d5 9e c1 b0 3b 33 73 8e fa 60 b2 25 5d 31', + 1024:'34 77 c7 f7 64 a4 1b ac ef f9 0b f1 4f 92 b7 cc', + 1520:'ac 4e 95 36 8d 99 b9 eb 78 b8 da 8f 81 ff a7 95', + 1536:'8c 3c 13 f8 c2 38 8b b7 3f 38 57 6e 65 b7 c4 46', + 2032:'13 c4 b9 c1 df b6 65 79 ed dd 8a 28 0b 9f 73 16', + 2048:'dd d2 78 20 55 01 26 69 8e fa ad c6 4b 64 f6 6e', + 3056:'f0 8f 2e 66 d2 8e d1 43 f3 a2 37 cf 9d e7 35 59', + 3072:'9e a3 6c 52 55 31 b8 80 ba 12 43 34 f5 7b 0b 70', + 4080:'d5 a3 9e 3d fc c5 02 80 ba c4 a6 b5 aa 0d ca 7d', + 4096:'37 0b 1c 1f e6 55 91 6d 97 fd 0d 47 ca 1d 72 b8' + } + ) + ] + + def test_keystream(self): + for tv in self.rfc6229_data: + key = unhexlify(b((tv[0]))) + cipher = ARC4.new(key) + count = 0 + for offset in range(0,4096+1,16): + ct = cipher.encrypt(b('\x00')*16) + expected = tv[1].get(offset) + if expected: + expected = unhexlify(b(expected.replace(" ",''))) + self.assertEquals(ct, expected) + count += 1 + self.assertEqual(count, len(tv[1])) + +class Drop_Tests(unittest.TestCase): + key = b('\xAA')*16 + data = b('\x00')*5000 + + def setUp(self): + self.cipher = ARC4.new(self.key) + + def test_drop256_encrypt(self): + cipher_drop = ARC4.new(self.key, 256) + ct_drop = cipher_drop.encrypt(self.data[:16]) + ct = self.cipher.encrypt(self.data)[256:256+16] + self.assertEquals(ct_drop, ct) + + def test_drop256_decrypt(self): + cipher_drop = ARC4.new(self.key, 256) + pt_drop = cipher_drop.decrypt(self.data[:16]) + pt = self.cipher.decrypt(self.data)[256:256+16] + self.assertEquals(pt_drop, pt) + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, ARC4.new, bchr(0) * 4) + self.assertRaises(ValueError, ARC4.new, bchr(0) * 257) + + +def get_tests(config={}): + from .common import make_stream_tests + tests = make_stream_tests(ARC4, "ARC4", test_data) + tests += list_test_cases(RFC6229_Tests) + tests += list_test_cases(Drop_Tests) + tests.append(KeyLength()) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py new file mode 100644 index 0000000..4ce3a41 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Blowfish""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.Cipher import Blowfish + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from http://www.schneier.com/code/vectors.txt + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), + ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), + ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), + ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), + ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), + ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), + ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), + ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), + ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), + ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), + ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), + ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), + ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), + ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), + ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), + ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), + ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), + ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), + ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), + ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), + ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), + ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), + ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), + ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), + ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), + ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), + ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), + ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), + ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), + ('0000000000000000', '245946885754369a', '0123456789abcdef'), + ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), + #('fedcba9876543210', 'f9ad597c49db005e', 'f0'), + #('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), + #('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), + ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), + ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), + ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), + ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), + ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), + ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), + ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), + ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), + ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), + ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), + ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), + ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), + ('fedcba9876543210', '93142887ee3be15c', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), + ('fedcba9876543210', '03429e838ce2d14b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), + ('fedcba9876543210', 'a4299e27469ff67b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), + ('fedcba9876543210', 'afd5aed1c1bc96a8', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), + ('fedcba9876543210', '10851c0e3858da9f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), + ('fedcba9876543210', 'e6f51ed79b9db21f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), + ('fedcba9876543210', '64a6e14afd36b46f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), + ('fedcba9876543210', '80c7d7d45a5479ad', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), + ('fedcba9876543210', '05044b62fa52d080', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, Blowfish.new, bchr(0) * 3, + Blowfish.MODE_ECB) + self.assertRaises(ValueError, Blowfish.new, bchr(0) * 57, + Blowfish.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = Blowfish.new(b'4'*16, Blowfish.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + tests = make_block_tests(Blowfish, "Blowfish", test_data) + tests.append(KeyLength()) + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py new file mode 100644 index 0000000..ff13bd4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.CAST""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.Cipher import CAST + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from RFC 2144, B.1 + ('0123456789abcdef', '238b4fe5847e44b2', + '0123456712345678234567893456789a', + '128-bit key'), + + ('0123456789abcdef', 'eb6a711a2c02271b', + '01234567123456782345', + '80-bit key'), + + ('0123456789abcdef', '7ac816d16e9b302e', + '0123456712', + '40-bit key'), +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, CAST.new, bchr(0) * 4, CAST.MODE_ECB) + self.assertRaises(ValueError, CAST.new, bchr(0) * 17, CAST.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = CAST.new(b'4'*16, CAST.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + + tests = make_block_tests(CAST, "CAST", test_data) + tests.append(KeyLength()) + tests.append(TestOutput()) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CBC.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CBC.py new file mode 100644 index 0000000..c82e8e5 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CBC.py @@ -0,0 +1,555 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, is_string +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +class BlockChainingTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + iv_128 = get_tag_random("iv_128", 16) + iv_64 = get_tag_random("iv_64", 8) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_iv(self): + # If not passed, the iv is created randomly + cipher = AES.new(self.key_128, self.aes_mode) + iv1 = cipher.iv + cipher = AES.new(self.key_128, self.aes_mode) + iv2 = cipher.iv + self.assertNotEqual(iv1, iv2) + self.assertEqual(len(iv1), 16) + + # IV can be passed in uppercase or lowercase + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, self.aes_mode, iv=self.iv_128) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + cipher = AES.new(self.key_128, self.aes_mode, IV=self.iv_128) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_iv_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv = u'test1234567890-*') + + def test_only_one_iv(self): + # Only one IV/iv keyword allowed + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv=self.iv_128, IV=self.iv_128) + + def test_iv_with_matching_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + b"") + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + self.iv_128[:15]) + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + self.iv_128 + b"0") + + def test_block_size_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_unaligned_data_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + for wrong_length in range(1,16): + self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + for wrong_length in range(1,16): + self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) + + def test_unaligned_data_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + for wrong_length in range(1,8): + self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) + + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + for wrong_length in range(1,8): + self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) + + def test_IV_iv_attributes(self): + data = get_tag_random("data", 16 * 100) + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + getattr(cipher, func)(data) + self.assertEqual(cipher.iv, self.iv_128) + self.assertEqual(cipher.IV, self.iv_128) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + self.iv_128, 7) + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv=self.iv_128, unknown=7) + # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, self.aes_mode, iv=self.iv_128, use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_bytearray(self): + data = b"1" * 16 + data_ba = bytearray(data) + + # Encrypt + key_ba = bytearray(self.key_128) + iv_ba = bytearray(self.iv_128) + + cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(key_ba, self.aes_mode, iv_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + iv_ba[:3] = b'\xFF\xFF\xFF' + ref2 = cipher2.encrypt(data_ba) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.iv, cipher2.iv) + + # Decrypt + key_ba = bytearray(self.key_128) + iv_ba = bytearray(self.iv_128) + + cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(key_ba, self.aes_mode, iv_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + iv_ba[:3] = b'\xFF\xFF\xFF' + ref4 = cipher4.decrypt(data_ba) + + self.assertEqual(ref3, ref4) + + def test_memoryview(self): + data = b"1" * 16 + data_mv = memoryview(bytearray(data)) + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + iv_mv = memoryview(bytearray(self.iv_128)) + + cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(key_mv, self.aes_mode, iv_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + iv_mv[:3] = b'\xFF\xFF\xFF' + ref2 = cipher2.encrypt(data_mv) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.iv, cipher2.iv) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + iv_mv = memoryview(bytearray(self.iv_128)) + + cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(key_mv, self.aes_mode, iv_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + iv_mv[:3] = b'\xFF\xFF\xFF' + ref4 = cipher4.decrypt(data_mv) + + self.assertEqual(ref3, ref4) + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + + def test_output_param_same_buffer(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + pt_ba = bytearray(pt) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.encrypt(pt_ba, output=pt_ba) + self.assertEqual(ct, pt_ba) + self.assertEqual(res, None) + + ct_ba = bytearray(ct) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.decrypt(ct_ba, output=ct_ba) + self.assertEqual(pt, ct_ba) + self.assertEqual(res, None) + + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class CbcTests(BlockChainingTests): + aes_mode = AES.MODE_CBC + des3_mode = DES3.MODE_CBC + + +class NistBlockChainingVectors(unittest.TestCase): + + def _do_kat_aes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CBC KAT", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + + cipher = AES.new(tv.key, self.aes_mode, tv.iv) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + # See Section 6.4.2 in AESAVS + def _do_mct_aes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CBC Montecarlo", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, self.aes_mode, tv.iv) + + if direction == '[ENCRYPT]': + cts = [ tv.iv ] + for count in range(1000): + cts.append(cipher.encrypt(tv.plaintext)) + tv.plaintext = cts[-2] + self.assertEqual(cts[-1], tv.ciphertext) + elif direction == '[DECRYPT]': + pts = [ tv.iv] + for count in range(1000): + pts.append(cipher.decrypt(tv.ciphertext)) + tv.ciphertext = pts[-2] + self.assertEqual(pts[-1], tv.plaintext) + else: + assert False + + def _do_tdes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "TDES"), + file_name, + "TDES CBC KAT", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + if hasattr(tv, "keys"): + cipher = DES.new(tv.keys, self.des_mode, tv.iv) + else: + if tv.key1 != tv.key3: + key = tv.key1 + tv.key2 + tv.key3 # Option 3 + else: + key = tv.key1 + tv.key2 # Option 2 + cipher = DES3.new(key, self.des3_mode, tv.iv) + + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + +class NistCbcVectors(NistBlockChainingVectors): + aes_mode = AES.MODE_CBC + des_mode = DES.MODE_CBC + des3_mode = DES3.MODE_CBC + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "CBCGFSbox128.rsp", + "CBCGFSbox192.rsp", + "CBCGFSbox256.rsp", + "CBCKeySbox128.rsp", + "CBCKeySbox192.rsp", + "CBCKeySbox256.rsp", + "CBCVarKey128.rsp", + "CBCVarKey192.rsp", + "CBCVarKey256.rsp", + "CBCVarTxt128.rsp", + "CBCVarTxt192.rsp", + "CBCVarTxt256.rsp", + # MMT + "CBCMMT128.rsp", + "CBCMMT192.rsp", + "CBCMMT256.rsp", + ) +nist_aes_mct_files = ( + "CBCMCT128.rsp", + "CBCMCT192.rsp", + "CBCMCT256.rsp", + ) + +for file_name in nist_aes_kat_mmt_files: + def new_func(self, file_name=file_name): + self._do_kat_aes_test(file_name) + setattr(NistCbcVectors, "test_AES_" + file_name, new_func) + +for file_name in nist_aes_mct_files: + def new_func(self, file_name=file_name): + self._do_mct_aes_test(file_name) + setattr(NistCbcVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TCBCMMT2.rsp", # 2TDES + "TCBCMMT3.rsp", # 3TDES + "TCBCinvperm.rsp", # Single DES + "TCBCpermop.rsp", + "TCBCsubtab.rsp", + "TCBCvarkey.rsp", + "TCBCvartext.rsp", + ) + +for file_name in nist_tdes_files: + def new_func(self, file_name=file_name): + self._do_tdes_test(file_name) + setattr(NistCbcVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST CBC TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CBC test vectors found in Section F.2 + of NIST SP 800-3A""" + + def test_aes_128(self): + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '7649abac8119b246cee98e9b12e9197d' +\ + '5086cb9b507219ee95db113a917678b2' +\ + '73bed6b8e3c1743b7116e69e22229516' +\ + '3ff1caa1681fac09120eca307586e1a7' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192(self): + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '4f021db243bc633d7178183a9fa071e8' +\ + 'b4d9ada9ad7dedf4e5e738763f69145a' +\ + '571b242012fb7ae07fa9baac3df102e0' +\ + '08b0e27988598881d920a9e64f5615cd' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256(self): + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'f58c4c04d6e5f1ba779eabfb5f7bfbd6' +\ + '9cfc4e967edb808d679f777bc6702c7d' +\ + '39f23369a9d9bacfa530e26304231461' +\ + 'b2eb05e2c39be9fcda6c19078c6a9d1b' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CbcTests) + if config.get('slow_tests'): + tests += list_test_cases(NistCbcVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CCM.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CCM.py new file mode 100644 index 0000000..5c84d44 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CCM.py @@ -0,0 +1,930 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class CcmTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # If not passed, the nonce is created randomly + cipher = AES.new(self.key_128, AES.MODE_CCM) + nonce1 = cipher.nonce + cipher = AES.new(self.key_128, AES.MODE_CCM) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 11) + self.assertNotEqual(nonce1, nonce2) + + cipher = AES.new(self.key_128, AES.MODE_CCM, self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + nonce=u'test12345678') + + def test_nonce_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=b"") + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=bchr(1) * 6) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=bchr(1) * 14) + for x in range(7, 13 + 1): + AES.new(self.key_128, AES.MODE_CCM, nonce=bchr(1) * x) + + def test_block_size(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 11 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_CCM).nonce + nonce2 = AES.new(self.key_128, AES.MODE_CCM).nonce + self.assertEqual(len(nonce1), 11) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + for mac_len in range(3, 17 + 1, 2): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, mac_len=mac_len) + + # Valid MAC length + for mac_len in range(4, 16 + 1, 2): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_longer_assoc_data_than_declared(self): + # More than zero + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=0) + self.assertRaises(ValueError, cipher.update, b"1") + + # Too large + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=15) + self.assertRaises(ValueError, cipher.update, self.data_128) + + def test_shorter_assoc_data_than_expected(self): + # With plaintext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=17) + cipher.update(self.data_128) + self.assertRaises(ValueError, cipher.encrypt, self.data_128) + + # With empty plaintext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=17) + cipher.update(self.data_128) + self.assertRaises(ValueError, cipher.digest) + + # With ciphertext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=17) + cipher.update(self.data_128) + self.assertRaises(ValueError, cipher.decrypt, self.data_128) + + # With empty ciphertext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=17) + cipher.update(self.data_128) + self.assertRaises(ValueError, cipher.verify, mac) + + def test_shorter_and_longer_plaintext_than_declared(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=17) + cipher.encrypt(self.data_128) + self.assertRaises(ValueError, cipher.digest) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=15) + self.assertRaises(ValueError, cipher.encrypt, self.data_128) + + def test_shorter_ciphertext_than_declared(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=17) + cipher.decrypt(ct) + self.assertRaises(ValueError, cipher.verify, mac) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=15) + self.assertRaises(ValueError, cipher.decrypt, ct) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=127, assoc_len=127) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=127, assoc_len=127) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEquals(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = AES.new(self.key_128, + AES.MODE_CCM, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_CCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_CCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_128, + AES.MODE_CCM, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_CCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_CCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(16) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class CcmFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + for assoc_len in (None, 0): + for msg_len in (None, len(self.data_128)): + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + for assoc_len in (None, len(self.data_128)): + for msg_len in (None, 0): + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + for assoc_len in (None, len(self.data_128)): + for msg_len in (None, len(self.data_128)): + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + # Only possible if msg_len is declared in advance + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + msg_len=64, + assoc_len=assoc_len) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_multiple_encrypt_decrypt_without_msg_len(self): + # Once per method, with or without assoc. data + for method_name in "encrypt", "decrypt": + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + method = getattr(cipher, method_name) + method(self.data_128) + self.assertRaises(TypeError, method, self.data_128) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + msg_len=32) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +class TestVectors(unittest.TestCase): + """Class exercising the CCM test vectors found in Appendix C + of NIST SP 800-38C and in RFC 3610""" + + # List of test vectors, each made up of: + # - authenticated data + # - plaintext + # - ciphertext + # - MAC + # - AES key + # - nonce + test_vectors_hex = [ + # NIST SP 800 38C + ( '0001020304050607', + '20212223', + '7162015b', + '4dac255d', + '404142434445464748494a4b4c4d4e4f', + '10111213141516'), + ( '000102030405060708090a0b0c0d0e0f', + '202122232425262728292a2b2c2d2e2f', + 'd2a1f0e051ea5f62081a7792073d593d', + '1fc64fbfaccd', + '404142434445464748494a4b4c4d4e4f', + '1011121314151617'), + ( '000102030405060708090a0b0c0d0e0f10111213', + '202122232425262728292a2b2c2d2e2f3031323334353637', + 'e3b201a9f5b71a7a9b1ceaeccd97e70b6176aad9a4428aa5', + '484392fbc1b09951', + '404142434445464748494a4b4c4d4e4f', + '101112131415161718191a1b'), + ( (''.join(["%02X" % (x*16+y) for x in range(0,16) for y in range(0,16)]))*256, + '202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f', + '69915dad1e84c6376a68c2967e4dab615ae0fd1faec44cc484828529463ccf72', + 'b4ac6bec93e8598e7f0dadbcea5b', + '404142434445464748494a4b4c4d4e4f', + '101112131415161718191a1b1c'), + # RFC3610 + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', + '588c979a61c663d2f066d0c2c0f989806d5f6b61dac384', + '17e8d12cfdf926e0', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000003020100a0a1a2a3a4a5'), + ( + '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '72c91a36e135f8cf291ca894085c87e3cc15c439c9e43a3b', + 'a091d56e10400916', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000004030201a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '51b1e5f44a197d1da46b0f8e2d282ae871e838bb64da859657', + '4adaa76fbd9fb0c5', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000005040302A0A1A2A3A4A5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e', + 'a28c6865939a9a79faaa5c4c2a9d4a91cdac8c', + '96c861b9c9e61ef1', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000006050403a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'dcf1fb7b5d9e23fb9d4e131253658ad86ebdca3e', + '51e83f077d9c2d93', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000007060504a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '6fc1b011f006568b5171a42d953d469b2570a4bd87', + '405a0443ac91cb94', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000008070605a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', + '0135d1b2c95f41d5d1d4fec185d166b8094e999dfed96c', + '048c56602c97acbb7490', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000009080706a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '7b75399ac0831dd2f0bbd75879a2fd8f6cae6b6cd9b7db24', + 'c17b4433f434963f34b4', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000a090807a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '82531a60cc24945a4b8279181ab5c84df21ce7f9b73f42e197', + 'ea9c07e56b5eb17e5f4e', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000b0a0908a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e', + '07342594157785152b074098330abb141b947b', + '566aa9406b4d999988dd', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000c0b0a09a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f', + '676bb20380b0e301e8ab79590a396da78b834934', + 'f53aa2e9107a8b6c022c', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000d0c0b0aa0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f20', + 'c0ffa0d6f05bdb67f24d43a4338d2aa4bed7b20e43', + 'cd1aa31662e7ad65d6db', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000e0d0c0ba0a1a2a3a4a5'), + ( '0be1a88bace018b1', + '08e8cf97d820ea258460e96ad9cf5289054d895ceac47c', + '4cb97f86a2a4689a877947ab8091ef5386a6ffbdd080f8', + 'e78cf7cb0cddd7b3', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00412b4ea9cdbe3c9696766cfa'), + ( '63018f76dc8a1bcb', + '9020ea6f91bdd85afa0039ba4baff9bfb79c7028949cd0ec', + '4ccb1e7ca981befaa0726c55d378061298c85c92814abc33', + 'c52ee81d7d77c08a', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0033568ef7b2633c9696766cfa'), + ( 'aa6cfa36cae86b40', + 'b916e0eacc1c00d7dcec68ec0b3bbb1a02de8a2d1aa346132e', + 'b1d23a2220ddc0ac900d9aa03c61fcf4a559a4417767089708', + 'a776796edb723506', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00103fe41336713c9696766cfa'), + ( 'd0d0735c531e1becf049c244', + '12daac5630efa5396f770ce1a66b21f7b2101c', + '14d253c3967b70609b7cbb7c49916028324526', + '9a6f49975bcadeaf', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00764c63b8058e3c9696766cfa'), + ( '77b60f011c03e1525899bcae', + 'e88b6a46c78d63e52eb8c546efb5de6f75e9cc0d', + '5545ff1a085ee2efbf52b2e04bee1e2336c73e3f', + '762c0c7744fe7e3c', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00f8b678094e3b3c9696766cfa'), + ( 'cd9044d2b71fdb8120ea60c0', + '6435acbafb11a82e2f071d7ca4a5ebd93a803ba87f', + '009769ecabdf48625594c59251e6035722675e04c8', + '47099e5ae0704551', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00d560912d3f703c9696766cfa'), + ( 'd85bc7e69f944fb8', + '8a19b950bcf71a018e5e6701c91787659809d67dbedd18', + 'bc218daa947427b6db386a99ac1aef23ade0b52939cb6a', + '637cf9bec2408897c6ba', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0042fff8f1951c3c9696766cfa'), + ( '74a0ebc9069f5b37', + '1761433c37c5a35fc1f39f406302eb907c6163be38c98437', + '5810e6fd25874022e80361a478e3e9cf484ab04f447efff6', + 'f0a477cc2fc9bf548944', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00920f40e56cdc3c9696766cfa'), + ( '44a3aa3aae6475ca', + 'a434a8e58500c6e41530538862d686ea9e81301b5ae4226bfa', + 'f2beed7bc5098e83feb5b31608f8e29c38819a89c8e776f154', + '4d4151a4ed3a8b87b9ce', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0027ca0c7120bc3c9696766cfa'), + ( 'ec46bb63b02520c33c49fd70', + 'b96b49e21d621741632875db7f6c9243d2d7c2', + '31d750a09da3ed7fddd49a2032aabf17ec8ebf', + '7d22c8088c666be5c197', + 'd7828d13b2b0bdc325a76236df93cc6b', + '005b8ccbcd9af83c9696766cfa'), + ( '47a65ac78b3d594227e85e71', + 'e2fcfbb880442c731bf95167c8ffd7895e337076', + 'e882f1dbd38ce3eda7c23f04dd65071eb41342ac', + 'df7e00dccec7ae52987d', + 'd7828d13b2b0bdc325a76236df93cc6b', + '003ebe94044b9a3c9696766cfa'), + ( '6e37a6ef546d955d34ab6059', + 'abf21c0b02feb88f856df4a37381bce3cc128517d4', + 'f32905b88a641b04b9c9ffb58cc390900f3da12ab1', + '6dce9e82efa16da62059', + 'd7828d13b2b0bdc325a76236df93cc6b', + '008d493b30ae8b3c9696766cfa'), + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, **extra_params): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._extra_params = extra_params + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_ccm_test.json", + "Wycheproof AES CCM", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt CCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): + assert not tv.valid + return + if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): + assert not tv.valid + return + raise e + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt CCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): + assert not tv.valid + return + if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): + assert not tv.valid + return + raise e + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt CCM Test #" + str(tv.id) + if len(tv.iv) not in range(7, 13 + 1, 2) or len(tv.ct) == 0: + return + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(CcmTests) + tests += list_test_cases(CcmFSMTests) + tests += [TestVectors()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CFB.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CFB.py new file mode 100644 index 0000000..cb0c352 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CFB.py @@ -0,0 +1,411 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, is_string +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class CfbTests(BlockChainingTests): + + aes_mode = AES.MODE_CFB + des3_mode = DES3.MODE_CFB + + # Redefine test_unaligned_data_128/64 + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + # Extra + + def test_segment_size_128(self): + for bits in range(8, 129, 8): + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, + segment_size=bits) + + for bits in 0, 7, 9, 127, 129: + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CFB, + self.iv_128, + segment_size=bits) + + def test_segment_size_64(self): + for bits in range(8, 65, 8): + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, + segment_size=bits) + + for bits in 0, 7, 9, 63, 65: + self.assertRaises(ValueError, DES3.new, self.key_192, AES.MODE_CFB, + self.iv_64, + segment_size=bits) + + +class NistCfbVectors(unittest.TestCase): + + def _do_kat_aes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CFB%d KAT" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, + segment_size=segment_size) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + # See Section 6.4.5 in AESAVS + def _do_mct_aes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CFB%d Montecarlo" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + assert(segment_size in (8, 128)) + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, + segment_size=segment_size) + + def get_input(input_text, output_seq, j): + # CFB128 + if segment_size == 128: + if j >= 2: + return output_seq[-2] + return [input_text, tv.iv][j] + # CFB8 + if j == 0: + return input_text + elif j <= 16: + return tv.iv[j - 1:j] + return output_seq[j - 17] + + if direction == '[ENCRYPT]': + cts = [] + for j in range(1000): + plaintext = get_input(tv.plaintext, cts, j) + cts.append(cipher.encrypt(plaintext)) + self.assertEqual(cts[-1], tv.ciphertext) + elif direction == '[DECRYPT]': + pts = [] + for j in range(1000): + ciphertext = get_input(tv.ciphertext, pts, j) + pts.append(cipher.decrypt(ciphertext)) + self.assertEqual(pts[-1], tv.plaintext) + else: + assert False + + def _do_tdes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "TDES"), + file_name, + "TDES CFB%d KAT" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + if hasattr(tv, "keys"): + cipher = DES.new(tv.keys, DES.MODE_CFB, tv.iv, + segment_size=segment_size) + else: + if tv.key1 != tv.key3: + key = tv.key1 + tv.key2 + tv.key3 # Option 3 + else: + key = tv.key1 + tv.key2 # Option 2 + cipher = DES3.new(key, DES3.MODE_CFB, tv.iv, + segment_size=segment_size) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "CFB?GFSbox128.rsp", + "CFB?GFSbox192.rsp", + "CFB?GFSbox256.rsp", + "CFB?KeySbox128.rsp", + "CFB?KeySbox192.rsp", + "CFB?KeySbox256.rsp", + "CFB?VarKey128.rsp", + "CFB?VarKey192.rsp", + "CFB?VarKey256.rsp", + "CFB?VarTxt128.rsp", + "CFB?VarTxt192.rsp", + "CFB?VarTxt256.rsp", + # MMT + "CFB?MMT128.rsp", + "CFB?MMT192.rsp", + "CFB?MMT256.rsp", + ) +nist_aes_mct_files = ( + "CFB?MCT128.rsp", + "CFB?MCT192.rsp", + "CFB?MCT256.rsp", + ) + +for file_gen_name in nist_aes_kat_mmt_files: + for bits in "8", "128": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_kat_aes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_AES_" + file_name, new_func) + +for file_gen_name in nist_aes_mct_files: + for bits in "8", "128": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_mct_aes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TCFB?MMT2.rsp", # 2TDES + "TCFB?MMT3.rsp", # 3TDES + "TCFB?invperm.rsp", # Single DES + "TCFB?permop.rsp", + "TCFB?subtab.rsp", + "TCFB?varkey.rsp", + "TCFB?vartext.rsp", + ) + +for file_gen_name in nist_tdes_files: + for bits in "8", "64": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_tdes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST CBC TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CFB test vectors found in Section F.3 + of NIST SP 800-3A""" + + def test_aes_128_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = '3b79424c9c0dd436bace9e0ed4586a4f32b9' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = 'cda2521ef0a905ca44cd057cbf0d47a0678a' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = 'dc1f1a8520a64db55fcc8ac554844e889700' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_128_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ + 'c8a64537a0b3a93fcde3cdad9f1ce58b' +\ + '26751f67a3cbb140b1808cf187a4f4df' +\ + 'c04b05357c5d1c0eeac4c66f9ff7f2e6' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ + '67ce7f7f81173621961a2b70171d3d7a' +\ + '2e1e8a1dd59b88b1c8e60fed1efac4c9' +\ + 'c05f9f9ca9834fa042ae8fba584b09ff' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + + ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ + '39ffed143b28b1c832113c6331e5407b' +\ + 'df10132415e54b92a13ed0a8267ae2f9' +\ + '75a385741ab9cef82031623d55b1e471' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CfbTests) + if config.get('slow_tests'): + tests += list_test_cases(NistCfbVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CTR.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CTR.py new file mode 100644 index 0000000..ed367f8 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_CTR.py @@ -0,0 +1,471 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128, SHA256 +from Crypto.Util import Counter + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +class CtrTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + nonce_32 = get_tag_random("nonce_32", 4) + nonce_64 = get_tag_random("nonce_64", 8) + ctr_64 = Counter.new(32, prefix=nonce_32) + ctr_128 = Counter.new(64, prefix=nonce_64) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_invalid_counter_parameter(self): + # Counter object is required for ciphers with short block size + self.assertRaises(TypeError, DES3.new, self.key_192, AES.MODE_CTR) + # Positional arguments are not allowed (Counter must be passed as + # keyword) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, self.ctr_128) + + def test_nonce_attribute(self): + # Nonce attribute is the prefix passed to Counter (DES3) + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + self.assertEqual(cipher.nonce, self.nonce_32) + + # Nonce attribute is the prefix passed to Counter (AES) + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(cipher.nonce, self.nonce_64) + + # Nonce attribute is not defined if suffix is used in Counter + counter = Counter.new(64, prefix=self.nonce_32, suffix=self.nonce_32) + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.failIf(hasattr(cipher, "nonce")) + + def test_nonce_parameter(self): + # Nonce parameter becomes nonce attribute + cipher1 = AES.new(self.key_128, AES.MODE_CTR, nonce=self.nonce_64) + self.assertEqual(cipher1.nonce, self.nonce_64) + + counter = Counter.new(64, prefix=self.nonce_64, initial_value=0) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Nonce is implicitly created (for AES) when no parameters are passed + nonce1 = AES.new(self.key_128, AES.MODE_CTR).nonce + nonce2 = AES.new(self.key_128, AES.MODE_CTR).nonce + self.assertNotEqual(nonce1, nonce2) + self.assertEqual(len(nonce1), 8) + + # Nonce can be zero-length + cipher = AES.new(self.key_128, AES.MODE_CTR, nonce=b"") + self.assertEqual(b"", cipher.nonce) + cipher.encrypt(b'0'*300) + + # Nonce and Counter are mutually exclusive + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, nonce=self.nonce_64) + + def test_initial_value_parameter(self): + # Test with nonce parameter + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=0xFFFF) + counter = Counter.new(64, prefix=self.nonce_64, initial_value=0xFFFF) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Test without nonce parameter + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + initial_value=0xFFFF) + counter = Counter.new(64, prefix=cipher1.nonce, initial_value=0xFFFF) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Initial_value and Counter are mutually exclusive + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, initial_value=0) + + def test_initial_value_bytes_parameter(self): + # Same result as when passing an integer + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=b"\x00"*6+b"\xFF\xFF") + cipher2 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=0xFFFF) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Fail if the iv is too large + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + initial_value=b"5"*17) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=b"5"*9) + + # Fail if the iv is too short + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + initial_value=b"5"*15) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=b"5"*7) + + def test_iv_with_matching_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + counter=Counter.new(120)) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + counter=Counter.new(136)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + 7, counter=self.ctr_128) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, unknown=7) + # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128, use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_wrap_around(self): + # Counter is only 8 bits, so we can only encrypt/decrypt 256 blocks (=4096 bytes) + counter = Counter.new(8, prefix=bchr(9) * 15) + max_bytes = 4096 + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + cipher.encrypt(b'9' * max_bytes) + self.assertRaises(OverflowError, cipher.encrypt, b'9') + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertRaises(OverflowError, cipher.encrypt, b'9' * (max_bytes + 1)) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + cipher.decrypt(b'9' * max_bytes) + self.assertRaises(OverflowError, cipher.decrypt, b'9') + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertRaises(OverflowError, cipher.decrypt, b'9' * (max_bytes + 1)) + + def test_bytearray(self): + data = b"1" * 16 + iv = b"\x00" * 6 + b"\xFF\xFF" + + # Encrypt + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=iv) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(self.key_128, AES.MODE_CTR, + nonce=bytearray(self.nonce_64), + initial_value=bytearray(iv)) + ref2 = cipher2.encrypt(bytearray(data)) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + cipher3 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=iv) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(self.key_128, AES.MODE_CTR, + nonce=bytearray(self.nonce_64), + initial_value=bytearray(iv)) + ref4 = cipher4.decrypt(bytearray(data)) + + self.assertEqual(ref3, ref4) + + def test_very_long_data(self): + cipher = AES.new(b'A' * 32, AES.MODE_CTR, nonce=b'') + ct = cipher.encrypt(b'B' * 1000000) + digest = SHA256.new(ct).hexdigest() + self.assertEqual(digest, "96204fc470476561a3a8f3b6fe6d24be85c87510b638142d1d0fb90989f8a6a6") + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CTR test vectors found in Section F.5 + of NIST SP 800-38A""" + + def test_aes_128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '874d6191b620e3261bef6864990db6ce' +\ + '9806f66b7970fdff8617187bb9fffdff' +\ + '5ae4df3edbd5d35e5b4f09020db03eab' +\ + '1e031dda2fbe03d1792170a0f3009cee' + key = '2b7e151628aed2a6abf7158809cf4f3c' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '1abc932417521ca24f2b0459fe7e6e0b' +\ + '090339ec0aa6faefd5ccc2c6f4ce8e94' +\ + '1e36b26bd1ebc670d1bd1d665620abf7' +\ + '4f78a7f6d29809585a97daec58c6b050' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '601ec313775789a5b7a7f504bbf3d228' +\ + 'f443e3ca4d62b59aca84e990cacaf5c5' +\ + '2b0930daa23de94ce87017ba2d84988d' +\ + 'dfc9c58db67aada613c2dd08457941a6' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +class RFC3686TestVectors(unittest.TestCase): + + # Each item is a test vector with: + # - plaintext + # - ciphertext + # - key (AES 128, 192 or 256 bits) + # - counter prefix (4 byte nonce + 8 byte nonce) + data = ( + ('53696e676c6520626c6f636b206d7367', + 'e4095d4fb7a7b3792d6175a3261311b8', + 'ae6852f8121067cc4bf7a5765577f39e', + '000000300000000000000000'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', + '7e24067817fae0d743d6ce1f32539163', + '006cb6dbc0543b59da48d90b'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + 'c1cf48a89f2ffdd9cf4652e9efdb72d74540a42bde6d7836d59a5ceaaef3105325b2072f', + '7691be035e5020a8ac6e618529f9a0dc', + '00e0017b27777f3f4a1786f0'), + ('53696e676c6520626c6f636b206d7367', + '4b55384fe259c9c84e7935a003cbe928', + '16af5b145fc9f579c175f93e3bfb0eed863d06ccfdb78515', + '0000004836733c147d6d93cb'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '453243fc609b23327edfaafa7131cd9f8490701c5ad4a79cfc1fe0ff42f4fb00', + '7c5cb2401b3dc33c19e7340819e0f69c678c3db8e6f6a91a', + '0096b03b020c6eadc2cb500d'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + '96893fc55e5c722f540b7dd1ddf7e758d288bc95c69165884536c811662f2188abee0935', + '02bf391ee8ecb159b959617b0965279bf59b60a786d3e0fe', + '0007bdfd5cbd60278dcc0912'), + ('53696e676c6520626c6f636b206d7367', + '145ad01dbf824ec7560863dc71e3e0c0', + '776beff2851db06f4c8a0542c8696f6c6a81af1eec96b4d37fc1d689e6c1c104', + '00000060db5672c97aa8f0b2'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'f05e231b3894612c49ee000b804eb2a9b8306b508f839d6a5530831d9344af1c', + 'f6d66d6bd52d59bb0796365879eff886c66dd51a5b6a99744b50590c87a23884', + '00faac24c1585ef15a43d875'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + 'eb6c52821d0bbbf7ce7594462aca4faab407df866569fd07f48cc0b583d6071f1ec0e6b8', + 'ff7a617ce69148e4f1726e2f43581de2aa62d9f805532edff1eed687fb54153d', + '001cc5b751a51d70a1c11148') + ) + + bindata = [] + for tv in data: + bindata.append([unhexlify(x) for x in tv]) + + def runTest(self): + for pt, ct, key, prefix in self.bindata: + counter = Counter.new(32, prefix=prefix) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + result = cipher.encrypt(pt) + self.assertEqual(hexlify(ct), hexlify(result)) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CtrTests) + tests += list_test_cases(SP800TestVectors) + tests += [ RFC3686TestVectors() ] + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py new file mode 100644 index 0000000..2b8de20 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py @@ -0,0 +1,529 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import b, tobytes, bchr +from Crypto.Util.strxor import strxor_c +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import ChaCha20 + + +class ChaCha20Test(unittest.TestCase): + + def test_new_positive(self): + cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*8) + self.assertEqual(cipher.nonce, b"0" * 8) + cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*12) + self.assertEqual(cipher.nonce, b"0" * 12) + + def test_new_negative(self): + new = ChaCha20.new + self.assertRaises(TypeError, new) + self.assertRaises(TypeError, new, nonce=b("0")) + self.assertRaises(ValueError, new, nonce=b("0")*8, key=b("0")) + self.assertRaises(ValueError, new, nonce=b("0"), key=b("0")*32) + + def test_default_nonce(self): + cipher1 = ChaCha20.new(key=bchr(1) * 32) + cipher2 = ChaCha20.new(key=bchr(1) * 32) + self.assertEquals(len(cipher1.nonce), 8) + self.assertNotEqual(cipher1.nonce, cipher2.nonce) + + def test_nonce(self): + key = b'A' * 32 + + nonce1 = b'P' * 8 + cipher1 = ChaCha20.new(key=key, nonce=nonce1) + self.assertEqual(nonce1, cipher1.nonce) + + nonce2 = b'Q' * 12 + cipher2 = ChaCha20.new(key=key, nonce=nonce2) + self.assertEqual(nonce2, cipher2.nonce) + + def test_eiter_encrypt_or_decrypt(self): + """Verify that a cipher cannot be used for both decrypting and encrypting""" + + c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c1.encrypt(b("8")) + self.assertRaises(TypeError, c1.decrypt, b("9")) + + c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c2.decrypt(b("8")) + self.assertRaises(TypeError, c2.encrypt, b("9")) + + def test_round_trip(self): + pt = b("A") * 1024 + c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + ct = c1.encrypt(pt) + self.assertEqual(c2.decrypt(ct), pt) + + self.assertEqual(c1.encrypt(b("")), b("")) + self.assertEqual(c2.decrypt(b("")), b("")) + + def test_streaming(self): + """Verify that an arbitrary number of bytes can be encrypted/decrypted""" + from Crypto.Hash import SHA1 + + segments = (1, 3, 5, 7, 11, 17, 23) + total = sum(segments) + + pt = b("") + while len(pt) < total: + pt += SHA1.new(pt).digest() + + cipher1 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + ct = cipher1.encrypt(pt) + + cipher2 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + cipher3 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + idx = 0 + for segment in segments: + self.assertEqual(cipher2.decrypt(ct[idx:idx+segment]), pt[idx:idx+segment]) + self.assertEqual(cipher3.encrypt(pt[idx:idx+segment]), ct[idx:idx+segment]) + idx += segment + + def test_seek(self): + cipher1 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) + + offset = 64 * 900 + 7 + pt = b("1") * 64 + + cipher1.encrypt(b("0") * offset) + ct1 = cipher1.encrypt(pt) + + cipher2 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) + cipher2.seek(offset) + ct2 = cipher2.encrypt(pt) + + self.assertEquals(ct1, ct2) + + def test_seek_tv(self): + # Test Vector #4, A.1 from + # http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 + key = bchr(0) + bchr(255) + bchr(0) * 30 + nonce = bchr(0) * 8 + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.seek(64 * 2) + expected_key_stream = unhexlify(b( + "72d54dfbf12ec44b362692df94137f32" + "8fea8da73990265ec1bbbea1ae9af0ca" + "13b25aa26cb4a648cb9b9d1be65b2c09" + "24a66c54d545ec1b7374f4872e99f096" + )) + ct = cipher.encrypt(bchr(0) * len(expected_key_stream)) + self.assertEqual(expected_key_stream, ct) + + def test_rfc7539(self): + # from https://tools.ietf.org/html/rfc7539 Annex A.1 + # Each item is: key, nonce, block #, plaintext, ciphertext + tvs = [ + # Test Vector #1 + ( + "00"*32, + "00"*12, + 0, + "00"*16*4, + "76b8e0ada0f13d90405d6ae55386bd28" + "bdd219b8a08ded1aa836efcc8b770dc7" + "da41597c5157488d7724e03fb8d84a37" + "6a43b8f41518a11cc387b669b2ee6586" + ), + # Test Vector #2 + ( + "00"*31 + "01", + "00"*11 + "02", + 1, + "416e79207375626d697373696f6e2074" + "6f20746865204945544620696e74656e" + "6465642062792074686520436f6e7472" + "696275746f7220666f72207075626c69" + "636174696f6e20617320616c6c206f72" + "2070617274206f6620616e2049455446" + "20496e7465726e65742d447261667420" + "6f722052464320616e6420616e792073" + "746174656d656e74206d616465207769" + "7468696e2074686520636f6e74657874" + "206f6620616e20494554462061637469" + "7669747920697320636f6e7369646572" + "656420616e20224945544620436f6e74" + "7269627574696f6e222e205375636820" + "73746174656d656e747320696e636c75" + "6465206f72616c2073746174656d656e" + "747320696e2049455446207365737369" + "6f6e732c2061732077656c6c20617320" + "7772697474656e20616e6420656c6563" + "74726f6e696320636f6d6d756e696361" + "74696f6e73206d61646520617420616e" + "792074696d65206f7220706c6163652c" + "20776869636820617265206164647265" + "7373656420746f", + "a3fbf07df3fa2fde4f376ca23e827370" + "41605d9f4f4f57bd8cff2c1d4b7955ec" + "2a97948bd3722915c8f3d337f7d37005" + "0e9e96d647b7c39f56e031ca5eb6250d" + "4042e02785ececfa4b4bb5e8ead0440e" + "20b6e8db09d881a7c6132f420e527950" + "42bdfa7773d8a9051447b3291ce1411c" + "680465552aa6c405b7764d5e87bea85a" + "d00f8449ed8f72d0d662ab052691ca66" + "424bc86d2df80ea41f43abf937d3259d" + "c4b2d0dfb48a6c9139ddd7f76966e928" + "e635553ba76c5c879d7b35d49eb2e62b" + "0871cdac638939e25e8a1e0ef9d5280f" + "a8ca328b351c3c765989cbcf3daa8b6c" + "cc3aaf9f3979c92b3720fc88dc95ed84" + "a1be059c6499b9fda236e7e818b04b0b" + "c39c1e876b193bfe5569753f88128cc0" + "8aaa9b63d1a16f80ef2554d7189c411f" + "5869ca52c5b83fa36ff216b9c1d30062" + "bebcfd2dc5bce0911934fda79a86f6e6" + "98ced759c3ff9b6477338f3da4f9cd85" + "14ea9982ccafb341b2384dd902f3d1ab" + "7ac61dd29c6f21ba5b862f3730e37cfd" + "c4fd806c22f221" + ), + # Test Vector #3 + ( + "1c9240a5eb55d38af333888604f6b5f0" + "473917c1402b80099dca5cbc207075c0", + "00"*11 + "02", + 42, + "2754776173206272696c6c69672c2061" + "6e642074686520736c6974687920746f" + "7665730a446964206779726520616e64" + "2067696d626c6520696e207468652077" + "6162653a0a416c6c206d696d73792077" + "6572652074686520626f726f676f7665" + "732c0a416e6420746865206d6f6d6520" + "7261746873206f757467726162652e", + "62e6347f95ed87a45ffae7426f27a1df" + "5fb69110044c0d73118effa95b01e5cf" + "166d3df2d721caf9b21e5fb14c616871" + "fd84c54f9d65b283196c7fe4f60553eb" + "f39c6402c42234e32a356b3e764312a6" + "1a5532055716ead6962568f87d3f3f77" + "04c6a8d1bcd1bf4d50d6154b6da731b1" + "87b58dfd728afa36757a797ac188d1" + ) + ] + + for tv in tvs: + key = unhexlify(tv[0]) + nonce = unhexlify(tv[1]) + offset = tv[2] * 64 + pt = unhexlify(tv[3]) + ct_expect = unhexlify(tv[4]) + + cipher = ChaCha20.new(key=key, nonce=nonce) + if offset != 0: + cipher.seek(offset) + ct = cipher.encrypt(pt) + assert(ct == ct_expect) + + +class XChaCha20Test(unittest.TestCase): + + # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + + def test_hchacha20(self): + # Section 2.2.1 + + from Crypto.Cipher.ChaCha20 import _HChaCha20 + + key = b"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f" + key = unhexlify(key.replace(b":", b"")) + + nonce = b"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27" + nonce = unhexlify(nonce.replace(b":", b"")) + + subkey = _HChaCha20(key, nonce) + + expected = b"82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc" + expected = unhexlify(expected.replace(b" ", b"")) + + self.assertEqual(subkey, expected) + + def test_nonce(self): + key = b'A' * 32 + nonce = b'P' * 24 + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertEqual(nonce, cipher.nonce) + + def test_encrypt(self): + # Section A.3.2 + + pt = b""" + 5468652064686f6c65202870726f6e6f756e6365642022646f6c652229206973 + 20616c736f206b6e6f776e2061732074686520417369617469632077696c6420 + 646f672c2072656420646f672c20616e642077686973746c696e6720646f672e + 2049742069732061626f7574207468652073697a65206f662061204765726d61 + 6e20736865706865726420627574206c6f6f6b73206d6f7265206c696b652061 + 206c6f6e672d6c656767656420666f782e205468697320686967686c7920656c + 757369766520616e6420736b696c6c6564206a756d70657220697320636c6173 + 736966696564207769746820776f6c7665732c20636f796f7465732c206a6163 + 6b616c732c20616e6420666f78657320696e20746865207461786f6e6f6d6963 + 2066616d696c792043616e696461652e""" + pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) + + key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") + iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555658") + + ct = b""" + 7d0a2e6b7f7c65a236542630294e063b7ab9b555a5d5149aa21e4ae1e4fbce87 + ecc8e08a8b5e350abe622b2ffa617b202cfad72032a3037e76ffdcdc4376ee05 + 3a190d7e46ca1de04144850381b9cb29f051915386b8a710b8ac4d027b8b050f + 7cba5854e028d564e453b8a968824173fc16488b8970cac828f11ae53cabd201 + 12f87107df24ee6183d2274fe4c8b1485534ef2c5fbc1ec24bfc3663efaa08bc + 047d29d25043532db8391a8a3d776bf4372a6955827ccb0cdd4af403a7ce4c63 + d595c75a43e045f0cce1f29c8b93bd65afc5974922f214a40b7c402cdb91ae73 + c0b63615cdad0480680f16515a7ace9d39236464328a37743ffc28f4ddb324f4 + d0f5bbdc270c65b1749a6efff1fbaa09536175ccd29fb9e6057b307320d31683 + 8a9c71f70b5b5907a66f7ea49aadc409""" + ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) + + cipher = ChaCha20.new(key=key, nonce=iv) + cipher.seek(64) # Counter = 1 + ct_test = cipher.encrypt(pt) + self.assertEqual(ct, ct_test) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_ba = bytearray(data) + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + + cipher1 = ChaCha20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = ChaCha20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_ba) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + ct_ba = bytearray(ct) + + cipher3 = ChaCha20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_ba) + + self.assertEqual(data, pt_test) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_mv = memoryview(bytearray(data)) + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + + cipher1 = ChaCha20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = ChaCha20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_mv) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + ct_mv = memoryview(bytearray(ct)) + + cipher3 = ChaCha20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_mv) + + self.assertEqual(data, pt_test) + + +class ChaCha20_AGL_NIR(unittest.TestCase): + + # From http://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04 + # and http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 + tv = [ + ( "00" * 32, + "00" * 8, + "76b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc" + "8b770dc7da41597c5157488d7724e03fb8d84a376a43b8f41518a11c" + "c387b669b2ee6586" + "9f07e7be5551387a98ba977c732d080d" + "cb0f29a048e3656912c6533e32ee7aed" + "29b721769ce64e43d57133b074d839d5" + "31ed1f28510afb45ace10a1f4b794d6f" + ), + ( "00" * 31 + "01", + "00" * 8, + "4540f05a9f1fb296d7736e7b208e3c96eb4fe1834688d2604f450952" + "ed432d41bbe2a0b6ea7566d2a5d1e7e20d42af2c53d792b1c43fea81" + "7e9ad275ae546963" + "3aeb5224ecf849929b9d828db1ced4dd" + "832025e8018b8160b82284f3c949aa5a" + "8eca00bbb4a73bdad192b5c42f73f2fd" + "4e273644c8b36125a64addeb006c13a0" + ), + ( "00" * 32, + "00" * 7 + "01", + "de9cba7bf3d69ef5e786dc63973f653a0b49e015adbff7134fcb7df1" + "37821031e85a050278a7084527214f73efc7fa5b5277062eb7a0433e" + "445f41e3" + ), + ( "00" * 32, + "01" + "00" * 7, + "ef3fdfd6c61578fbf5cf35bd3dd33b8009631634d21e42ac33960bd1" + "38e50d32111e4caf237ee53ca8ad6426194a88545ddc497a0b466e7d" + "6bbdb0041b2f586b" + ), + ( "000102030405060708090a0b0c0d0e0f101112131415161718191a1b" + "1c1d1e1f", + "0001020304050607", + "f798a189f195e66982105ffb640bb7757f579da31602fc93ec01ac56" + "f85ac3c134a4547b733b46413042c9440049176905d3be59ea1c53f1" + "5916155c2be8241a38008b9a26bc35941e2444177c8ade6689de9526" + "4986d95889fb60e84629c9bd9a5acb1cc118be563eb9b3a4a472f82e" + "09a7e778492b562ef7130e88dfe031c79db9d4f7c7a899151b9a4750" + "32b63fc385245fe054e3dd5a97a5f576fe064025d3ce042c566ab2c5" + "07b138db853e3d6959660996546cc9c4a6eafdc777c040d70eaf46f7" + "6dad3979e5c5360c3317166a1c894c94a371876a94df7628fe4eaaf2" + "ccb27d5aaae0ad7ad0f9d4b6ad3b54098746d4524d38407a6deb3ab7" + "8fab78c9" + ), + ( "00" * 32, + "00" * 7 + "02", + "c2c64d378cd536374ae204b9ef933fcd" + "1a8b2288b3dfa49672ab765b54ee27c7" + "8a970e0e955c14f3a88e741b97c286f7" + "5f8fc299e8148362fa198a39531bed6d" + ), + ] + + def runTest(self): + for (key, nonce, stream) in self.tv: + c = ChaCha20.new(key=unhexlify(b(key)), nonce=unhexlify(b(nonce))) + ct = unhexlify(b(stream)) + pt = b("\x00") * len(ct) + self.assertEqual(c.encrypt(pt), ct) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 8 + cipher = ChaCha20.new(key=key, nonce=nonce) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = ChaCha20.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = ChaCha20.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ChaCha20Test) + tests += list_test_cases(XChaCha20Test) + tests.append(ChaCha20_AGL_NIR()) + tests.append(ByteArrayTest()) + tests.append(MemoryviewTest()) + tests.append(TestOutput()) + + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py new file mode 100644 index 0000000..f7baad5 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py @@ -0,0 +1,770 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import ChaCha20_Poly1305 +from Crypto.Hash import SHAKE128 + +from Crypto.Util._file_system import pycryptodome_filename +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class ChaCha20Poly1305Tests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce can only be 8 or 12 bytes + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=b'H' * 8) + self.assertEqual(len(cipher.nonce), 8) + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=b'H' * 12) + self.assertEqual(len(cipher.nonce), 12) + + # If not passed, the nonce is created randomly + cipher = ChaCha20_Poly1305.new(key=self.key_256) + nonce1 = cipher.nonce + cipher = ChaCha20_Poly1305.new(key=self.key_256) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 12) + self.assertNotEqual(nonce1, nonce2) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can only be 8 or 12 bytes long + self.assertRaises(ValueError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=b'0' * 7) + self.assertRaises(ValueError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=b'') + + def test_block_size(self): + # Not based on block ciphers + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.failIf(hasattr(cipher, 'block_size')) + + def test_nonce_attribute(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 12 bytes long nonce is randomly generated + nonce1 = ChaCha20_Poly1305.new(key=self.key_256).nonce + nonce2 = ChaCha20_Poly1305.new(key=self.key_256).nonce + self.assertEqual(len(nonce1), 12) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, + ChaCha20_Poly1305.new, + key=self.key_256, + param=9) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEquals(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_256) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_256) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + del data_ba + + cipher3 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_ba) + ct_ba[:3] = b'\xFF\xFF\xFF' + cipher3.verify(tag_ba) + + self.assertEqual(pt_test, self.data_128) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_256)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_256)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + ct_mv = memoryview(bytearray(ct)) + tag_mv = memoryview(bytearray(tag)) + del data_mv + + cipher3 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_mv) + ct_mv[:3] = b'\x99\x99\x99' + cipher3.verify(tag_mv) + + self.assertEqual(pt_test, self.data_128) + + +class XChaCha20Poly1305Tests(unittest.TestCase): + + def test_encrypt(self): + # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + # Section A.3.1 + + pt = b""" + 4c616469657320616e642047656e746c656d656e206f662074686520636c6173 + 73206f66202739393a204966204920636f756c64206f6666657220796f75206f + 6e6c79206f6e652074697020666f7220746865206675747572652c2073756e73 + 637265656e20776f756c642062652069742e""" + pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) + + aad = unhexlify(b"50515253c0c1c2c3c4c5c6c7") + key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") + iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555657") + + ct = b""" + bd6d179d3e83d43b9576579493c0e939572a1700252bfaccbed2902c21396cbb + 731c7f1b0b4aa6440bf3a82f4eda7e39ae64c6708c54c216cb96b72e1213b452 + 2f8c9ba40db5d945b11b69b982c1bb9e3f3fac2bc369488f76b2383565d3fff9 + 21f9664c97637da9768812f615c68b13b52e""" + ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) + + tag = unhexlify(b"c0875924c1c7987947deafd8780acf49") + + cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) + cipher.update(aad) + ct_test, tag_test = cipher.encrypt_and_digest(pt) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) + cipher.update(aad) + cipher.decrypt_and_verify(ct, tag) + + +class ChaCha20Poly1305FSMTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + mac = cipher.digest() + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +def compact(x): + return unhexlify(x.replace(" ", "").replace(":", "")) + + +class TestVectorsRFC(unittest.TestCase): + """Test cases from RFC7539""" + + # AAD, PT, CT, MAC, KEY, NONCE + test_vectors_hex = [ + ( '50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7', + '4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c' + '65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73' + '73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63' + '6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f' + '6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20' + '74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73' + '63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69' + '74 2e', + 'd3 1a 8d 34 64 8e 60 db 7b 86 af bc 53 ef 7e c2' + 'a4 ad ed 51 29 6e 08 fe a9 e2 b5 a7 36 ee 62 d6' + '3d be a4 5e 8c a9 67 12 82 fa fb 69 da 92 72 8b' + '1a 71 de 0a 9e 06 0b 29 05 d6 a5 b6 7e cd 3b 36' + '92 dd bd 7f 2d 77 8b 8c 98 03 ae e3 28 09 1b 58' + 'fa b3 24 e4 fa d6 75 94 55 85 80 8b 48 31 d7 bc' + '3f f4 de f0 8e 4b 7a 9d e5 76 d2 65 86 ce c6 4b' + '61 16', + '1a:e1:0b:59:4f:09:e2:6a:7e:90:2e:cb:d0:60:06:91', + '80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f' + '90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f', + '07 00 00 00' + '40 41 42 43 44 45 46 47', + ), + ( 'f3 33 88 86 00 00 00 00 00 00 4e 91', + '49 6e 74 65 72 6e 65 74 2d 44 72 61 66 74 73 20' + '61 72 65 20 64 72 61 66 74 20 64 6f 63 75 6d 65' + '6e 74 73 20 76 61 6c 69 64 20 66 6f 72 20 61 20' + '6d 61 78 69 6d 75 6d 20 6f 66 20 73 69 78 20 6d' + '6f 6e 74 68 73 20 61 6e 64 20 6d 61 79 20 62 65' + '20 75 70 64 61 74 65 64 2c 20 72 65 70 6c 61 63' + '65 64 2c 20 6f 72 20 6f 62 73 6f 6c 65 74 65 64' + '20 62 79 20 6f 74 68 65 72 20 64 6f 63 75 6d 65' + '6e 74 73 20 61 74 20 61 6e 79 20 74 69 6d 65 2e' + '20 49 74 20 69 73 20 69 6e 61 70 70 72 6f 70 72' + '69 61 74 65 20 74 6f 20 75 73 65 20 49 6e 74 65' + '72 6e 65 74 2d 44 72 61 66 74 73 20 61 73 20 72' + '65 66 65 72 65 6e 63 65 20 6d 61 74 65 72 69 61' + '6c 20 6f 72 20 74 6f 20 63 69 74 65 20 74 68 65' + '6d 20 6f 74 68 65 72 20 74 68 61 6e 20 61 73 20' + '2f e2 80 9c 77 6f 72 6b 20 69 6e 20 70 72 6f 67' + '72 65 73 73 2e 2f e2 80 9d', + '64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd' + '5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2' + '4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0' + 'bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf' + '33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81' + '14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55' + '97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38' + '36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4' + 'b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9' + '90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e' + 'af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a' + '0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a' + '0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e' + 'ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10' + '49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30' + '30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29' + 'a6 ad 5c b4 02 2b 02 70 9b', + 'ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38', + '1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0' + '47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0', + '00 00 00 00 01 02 03 04 05 06 07 08', + ) + ] + + test_vectors = [[unhexlify(x.replace(" ","").replace(":","")) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def load_tests(self, filename): + + def filter_tag(group): + return group['tagSize'] // 8 + + def filter_algo(root): + return root['algorithm'] + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof ChaCha20-Poly1305", + root_tag={'algo': filter_algo}, + group_tag={'tag_size': filter_tag}) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("chacha20_poly1305_test.json")) + self.tv.extend(self.load_tests("xchacha20_poly1305_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt %s Test #%s" % (tv.algo, tv.id) + + try: + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + except ValueError as e: + assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) + return + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) + + try: + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + except ValueError as e: + assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) + return + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt ChaCha20-Poly1305 Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 12 + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(ChaCha20Poly1305Tests) + tests += list_test_cases(XChaCha20Poly1305Tests) + tests += list_test_cases(ChaCha20Poly1305FSMTests) + tests += [TestVectorsRFC()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py new file mode 100644 index 0000000..ee261bc --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES""" + +import unittest + +from Crypto.Cipher import DES + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_17_B1_KEY = '01' * 8 +SP800_17_B2_PT = '00' * 8 +test_data = [ + # Test vectors from Appendix A of NIST SP 800-17 + # "Modes of Operation Validation System (MOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf + + # Appendix A - "Sample Round Outputs for the DES" + ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', + "NIST SP800-17 A"), + + # Table B.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #23'), + ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #63'), + + # Table B.2 - Variable Key Known Answer Test + (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', + 'NIST SP800-17 B.2 #0'), + (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', + 'NIST SP800-17 B.2 #1'), + (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', + 'NIST SP800-17 B.2 #2'), + (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', + 'NIST SP800-17 B.2 #3'), + (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', + 'NIST SP800-17 B.2 #4'), + (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', + 'NIST SP800-17 B.2 #5'), + (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', + 'NIST SP800-17 B.2 #6'), + (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', + 'NIST SP800-17 B.2 #7'), + (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', + 'NIST SP800-17 B.2 #8'), + (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', + 'NIST SP800-17 B.2 #9'), + (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', + 'NIST SP800-17 B.2 #10'), + (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', + 'NIST SP800-17 B.2 #11'), + (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', + 'NIST SP800-17 B.2 #12'), + (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', + 'NIST SP800-17 B.2 #13'), + (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', + 'NIST SP800-17 B.2 #14'), + (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', + 'NIST SP800-17 B.2 #15'), + (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', + 'NIST SP800-17 B.2 #16'), + (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', + 'NIST SP800-17 B.2 #17'), + (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', + 'NIST SP800-17 B.2 #18'), + (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', + 'NIST SP800-17 B.2 #19'), + (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', + 'NIST SP800-17 B.2 #20'), + (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', + 'NIST SP800-17 B.2 #21'), + (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', + 'NIST SP800-17 B.2 #22'), + (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', + 'NIST SP800-17 B.2 #23'), + (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', + 'NIST SP800-17 B.2 #24'), + (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', + 'NIST SP800-17 B.2 #25'), + (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', + 'NIST SP800-17 B.2 #26'), + (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', + 'NIST SP800-17 B.2 #27'), + (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', + 'NIST SP800-17 B.2 #28'), + (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', + 'NIST SP800-17 B.2 #29'), + (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', + 'NIST SP800-17 B.2 #30'), + (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', + 'NIST SP800-17 B.2 #31'), + (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', + 'NIST SP800-17 B.2 #32'), + (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', + 'NIST SP800-17 B.2 #33'), + (SP800_17_B2_PT, '5570530829705592', '0101010102010101', + 'NIST SP800-17 B.2 #34'), + (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', + 'NIST SP800-17 B.2 #35'), + (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', + 'NIST SP800-17 B.2 #36'), + (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', + 'NIST SP800-17 B.2 #37'), + (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', + 'NIST SP800-17 B.2 #38'), + (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', + 'NIST SP800-17 B.2 #39'), + (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', + 'NIST SP800-17 B.2 #40'), + (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', + 'NIST SP800-17 B.2 #41'), + (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', + 'NIST SP800-17 B.2 #42'), + (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', + 'NIST SP800-17 B.2 #43'), + (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', + 'NIST SP800-17 B.2 #44'), + (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', + 'NIST SP800-17 B.2 #45'), + (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', + 'NIST SP800-17 B.2 #46'), + (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', + 'NIST SP800-17 B.2 #47'), + (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', + 'NIST SP800-17 B.2 #48'), + (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', + 'NIST SP800-17 B.2 #49'), + (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', + 'NIST SP800-17 B.2 #50'), + (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', + 'NIST SP800-17 B.2 #51'), + (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', + 'NIST SP800-17 B.2 #52'), + (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', + 'NIST SP800-17 B.2 #53'), + (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', + 'NIST SP800-17 B.2 #54'), + (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', + 'NIST SP800-17 B.2 #55'), +] + +class RonRivestTest(unittest.TestCase): + """ Ronald L. Rivest's DES test, see + http://people.csail.mit.edu/rivest/Destest.txt + ABSTRACT + -------- + + We present a simple way to test the correctness of a DES implementation: + Use the recurrence relation: + + X0 = 9474B8E8C73BCA7D (hexadecimal) + + X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) + + to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here + E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes + the DES decryption of X using key K. If you obtain + + X16 = 1B1A2DDB4C642438 + + your implementation does not have any of the 36,568 possible single-fault + errors described herein. + """ + def runTest(self): + from binascii import b2a_hex + + X = [] + X[0:] = [b'\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D'] + + for i in range(16): + c = DES.new(X[i],DES.MODE_ECB) + if not (i&1): # (num&1) returns 1 for odd numbers + X[i+1:] = [c.encrypt(X[i])] # even + else: + X[i+1:] = [c.decrypt(X[i])] # odd + + self.assertEqual(b2a_hex(X[16]), + b2a_hex(b'\x1B\x1A\x2D\xDB\x4C\x64\x24\x38')) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = DES.new(b'4'*8, DES.MODE_ECB) + + pt = b'5' * 8 + ct = cipher.encrypt(pt) + + output = bytearray(8) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(8)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*8) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*8) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + tests = make_block_tests(DES, "DES", test_data) + tests += [RonRivestTest()] + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py new file mode 100644 index 0000000..8d6a648 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES3""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Cipher import DES3 + +from Crypto.Util.strxor import strxor_c +from Crypto.Util.py3compat import bchr, tostr +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +# This is a list of (plaintext, ciphertext, key, description) tuples. +test_data = [ + # Test vector from Appendix B of NIST SP 800-67 + # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block + # Cipher" + # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + ('54686520717566636b2062726f776e20666f78206a756d70', + 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', + '0123456789abcdef23456789abcdef01456789abcdef0123', + 'NIST SP800-67 B.1'), + + # This test is designed to test the DES3 API, not the correctness of the + # output. + ('21e81b7ade88a259', '5c577d4d9b20c0f8', + '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), +] + +# NIST CAVP test vectors + +nist_tdes_mmt_files = ("TECBMMT2.rsp", "TECBMMT3.rsp") + +for tdes_file in nist_tdes_mmt_files: + + test_vectors = load_test_vectors( + ("Cipher", "TDES"), + tdes_file, + "TDES ECB (%s)" % tdes_file, + {"count": lambda x: int(x)}) or [] + + for index, tv in enumerate(test_vectors): + + # The test vector file contains some directive lines + if isinstance(tv, str): + continue + + key = tv.key1 + tv.key2 + tv.key3 + test_data_item = (tostr(hexlify(tv.plaintext)), + tostr(hexlify(tv.ciphertext)), + tostr(hexlify(key)), + "%s (%s)" % (tdes_file, index)) + test_data.append(test_data_item) + + +class CheckParity(unittest.TestCase): + + def test_parity_option2(self): + before_2k = unhexlify("CABF326FA56734324FFCCABCDEFACABF") + after_2k = DES3.adjust_key_parity(before_2k) + self.assertEqual(after_2k, + unhexlify("CBBF326EA46734324FFDCBBCDFFBCBBF")) + + def test_parity_option3(self): + before_3k = unhexlify("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC") + after_3k = DES3.adjust_key_parity(before_3k) + self.assertEqual(after_3k, + unhexlify("ABABABABABABABABBABABABABABABABACDCDCDCDCDCDCDCD")) + + def test_degradation(self): + sub_key1 = bchr(1) * 8 + sub_key2 = bchr(255) * 8 + + # K1 == K2 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 * 2 + sub_key2) + + # K2 == K3 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 + sub_key2 * 2) + + # K1 == K2 == K3 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 * 3) + + # K1 == K2 (with different parity) + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 + strxor_c(sub_key1, 1) + sub_key2) + + +class DegenerateToDESTest(unittest.TestCase): + + def runTest(self): + sub_key1 = bchr(1) * 8 + sub_key2 = bchr(255) * 8 + + # K1 == K2 + self.assertRaises(ValueError, DES3.new, + sub_key1 * 2 + sub_key2, + DES3.MODE_ECB) + + # K2 == K3 + self.assertRaises(ValueError, DES3.new, + sub_key1 + sub_key2 * 2, + DES3.MODE_ECB) + + # K1 == K2 == K3 + self.assertRaises(ValueError, DES3.new, + sub_key1 * 3, + DES3.MODE_ECB) + + # K2 == K3 (parity is ignored) + self.assertRaises(ValueError, DES3.new, + sub_key1 + sub_key2 + strxor_c(sub_key2, 0x1), + DES3.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = DES3.new(b'4'*8 + b'G'*8 + b'T'*8, DES3.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + + tests = [] + tests = make_block_tests(DES3, "DES3", test_data) + tests.append(DegenerateToDESTest()) + tests += list_test_cases(CheckParity) + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + + def suite(): + unittest.TestSuite(get_tests()) + + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_EAX.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_EAX.py new file mode 100644 index 0000000..ad88262 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_EAX.py @@ -0,0 +1,772 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class EaxTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # If not passed, the nonce is created randomly + cipher = AES.new(self.key_128, AES.MODE_EAX) + nonce1 = cipher.nonce + cipher = AES.new(self.key_128, AES.MODE_EAX) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + cipher = AES.new(self.key_128, AES.MODE_EAX, self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=bchr(1) * x) + cipher.encrypt(bchr(1)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 16 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_EAX).nonce + nonce2 = AES.new(self.key_128, AES.MODE_EAX).nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, mac_len=3) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(5, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEquals(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = AES.new(self.key_128, + AES.MODE_EAX, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_EAX, + nonce=nonce_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + del data_ba + + cipher3 = AES.new(key_ba, + AES.MODE_EAX, + nonce=nonce_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_ba) + ct_ba[:3] = b'\xFF\xFF\xFF' + cipher3.verify(tag_ba) + + self.assertEqual(pt_test, self.data_128) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_128, + AES.MODE_EAX, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_EAX, + nonce=nonce_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + ct_mv = memoryview(bytearray(ct)) + tag_mv = memoryview(bytearray(tag)) + del data_mv + + cipher3 = AES.new(key_mv, + AES.MODE_EAX, + nonce=nonce_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_mv) + ct_mv[:3] = b'\x99\x99\x99' + cipher3.verify(tag_mv) + + self.assertEqual(pt_test, self.data_128) + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(16) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class EaxFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +class TestVectorsPaper(unittest.TestCase): + """Class exercising the EAX test vectors found in + http://www.cs.ucdavis.edu/~rogaway/papers/eax.pdf""" + + test_vectors_hex = [ + ( '6bfb914fd07eae6b', + '', + '', + 'e037830e8389f27b025a2d6527e79d01', + '233952dee4d5ed5f9b9c6d6ff80ff478', + '62EC67F9C3A4A407FCB2A8C49031A8B3' + ), + ( + 'fa3bfd4806eb53fa', + 'f7fb', + '19dd', + '5c4c9331049d0bdab0277408f67967e5', + '91945d3f4dcbee0bf45ef52255f095a4', + 'BECAF043B0A23D843194BA972C66DEBD' + ), + ( '234a3463c1264ac6', + '1a47cb4933', + 'd851d5bae0', + '3a59f238a23e39199dc9266626c40f80', + '01f74ad64077f2e704c0f60ada3dd523', + '70C3DB4F0D26368400A10ED05D2BFF5E' + ), + ( + '33cce2eabff5a79d', + '481c9e39b1', + '632a9d131a', + 'd4c168a4225d8e1ff755939974a7bede', + 'd07cf6cbb7f313bdde66b727afd3c5e8', + '8408DFFF3C1A2B1292DC199E46B7D617' + ), + ( + 'aeb96eaebe2970e9', + '40d0c07da5e4', + '071dfe16c675', + 'cb0677e536f73afe6a14b74ee49844dd', + '35b6d0580005bbc12b0587124557d2c2', + 'FDB6B06676EEDC5C61D74276E1F8E816' + ), + ( + 'd4482d1ca78dce0f', + '4de3b35c3fc039245bd1fb7d', + '835bb4f15d743e350e728414', + 'abb8644fd6ccb86947c5e10590210a4f', + 'bd8e6e11475e60b268784c38c62feb22', + '6EAC5C93072D8E8513F750935E46DA1B' + ), + ( + '65d2017990d62528', + '8b0a79306c9ce7ed99dae4f87f8dd61636', + '02083e3979da014812f59f11d52630da30', + '137327d10649b0aa6e1c181db617d7f2', + '7c77d6e813bed5ac98baa417477a2e7d', + '1A8C98DCD73D38393B2BF1569DEEFC19' + ), + ( + '54b9f04e6a09189a', + '1bda122bce8a8dbaf1877d962b8592dd2d56', + '2ec47b2c4954a489afc7ba4897edcdae8cc3', + '3b60450599bd02c96382902aef7f832a', + '5fff20cafab119ca2fc73549e20f5b0d', + 'DDE59B97D722156D4D9AFF2BC7559826' + ), + ( + '899a175897561d7e', + '6cf36720872b8513f6eab1a8a44438d5ef11', + '0de18fd0fdd91e7af19f1d8ee8733938b1e8', + 'e7f6d2231618102fdb7fe55ff1991700', + 'a4a4782bcffd3ec5e7ef6d8c34a56123', + 'B781FCF2F75FA5A8DE97A9CA48E522EC' + ), + ( + '126735fcc320d25a', + 'ca40d7446e545ffaed3bd12a740a659ffbbb3ceab7', + 'cb8920f87a6c75cff39627b56e3ed197c552d295a7', + 'cfc46afc253b4652b1af3795b124ab6e', + '8395fcf1e95bebd697bd010bc766aac3', + '22E7ADD93CFC6393C57EC0B3C17D6B44' + ), + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_eax_test.json", + "Wycheproof EAX", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt EAX Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + except ValueError as e: + assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) + return + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt EAX Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + except ValueError as e: + assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) + return + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt EAX Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestOtherCiphers(unittest.TestCase): + + @classmethod + def create_test(cls, name, factory, key_size): + + def test_template(self, factory=factory, key_size=key_size): + cipher = factory.new(get_tag_random("cipher", key_size), + factory.MODE_EAX, + nonce=b"nonce") + ct, mac = cipher.encrypt_and_digest(b"plaintext") + + cipher = factory.new(get_tag_random("cipher", key_size), + factory.MODE_EAX, + nonce=b"nonce") + pt2 = cipher.decrypt_and_verify(ct, mac) + + self.assertEqual(b"plaintext", pt2) + + setattr(cls, "test_" + name, test_template) + + +from Crypto.Cipher import DES, DES3, ARC2, CAST, Blowfish + +TestOtherCiphers.create_test("DES_" + str(DES.key_size), DES, DES.key_size) +for ks in DES3.key_size: + TestOtherCiphers.create_test("DES3_" + str(ks), DES3, ks) +for ks in ARC2.key_size: + TestOtherCiphers.create_test("ARC2_" + str(ks), ARC2, ks) +for ks in CAST.key_size: + TestOtherCiphers.create_test("CAST_" + str(ks), CAST, ks) +for ks in Blowfish.key_size: + TestOtherCiphers.create_test("Blowfish_" + str(ks), Blowfish, ks) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(EaxTests) + tests += list_test_cases(EaxFSMTests) + tests += [ TestVectorsPaper() ] + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + tests += list_test_cases(TestOtherCiphers) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_GCM.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_GCM.py new file mode 100644 index 0000000..5e7694f --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_GCM.py @@ -0,0 +1,950 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from __future__ import print_function + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128, SHA256 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class GcmTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce is optional (a random one will be created) + AES.new(self.key_128, AES.MODE_GCM) + + cipher = AES.new(self.key_128, AES.MODE_GCM, self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=bchr(1) * x) + cipher.encrypt(bchr(1)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_GCM).nonce + nonce2 = AES.new(self.key_128, AES.MODE_GCM).nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, mac_len=3) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(5, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEquals(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = AES.new(self.key_128, + AES.MODE_GCM, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_GCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_GCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_128, + AES.MODE_GCM, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_GCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_GCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(16) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class GcmFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +class TestVectors(unittest.TestCase): + """Class exercising the GCM test vectors found in + http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/gcm/gcm-revised-spec.pdf""" + + # List of test vectors, each made up of: + # - authenticated data + # - plaintext + # - ciphertext + # - MAC + # - AES key + # - nonce + test_vectors_hex = [ + ( + '', + '', + '', + '58e2fccefa7e3061367f1d57a4e7455a', + '00000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + '0388dace60b6a392f328c2b971b2fe78', + 'ab6e47d42cec13bdf53a67b21257bddf', + '00000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + + '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091473f5985', + '4d5c2af327cd64a62cf35abd2ba6fab4', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + + '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091', + '5bc94fbc3221a5db94fae95ae7121a47', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '61353b4c2806934a777ff51fa22a4755699b2a714fcdc6f83766e5f97b6c7423' + + '73806900e49f24b22b097544d4896b424989b5e1ebac0f07c23f4598', + '3612d2e79e3b0785561be14aaca2fccb', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '8ce24998625615b603a033aca13fb894be9112a5c3a211a8ba262a3cca7e2ca7' + + '01e4a9a4fba43c90ccdcb281d48c7c6fd62875d2aca417034c34aee5', + '619cc5aefffe0bfa462af43c1699d050', + 'feffe9928665731c6d6a8f9467308308', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ), + ( + '', + '', + '', + 'cd33b28ac773f74ba00ed1f312572435', + '000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + '98e7247c07f0fe411c267e4384b0f600', + '2ff58d80033927ab8ef4d4587514f0fb', + '000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + + '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710acade256', + '9924a7c8587336bfb118024db8674a14', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + + '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710', + '2519498e80f1478f37ba55bd6d27618c', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '0f10f599ae14a154ed24b36e25324db8c566632ef2bbb34f8347280fc4507057' + + 'fddc29df9a471f75c66541d4d4dad1c9e93a19a58e8b473fa0f062f7', + '65dcc57fcf623a24094fcca40d3533f8', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + 'd27e88681ce3243c4830165a8fdcf9ff1de9a1d8e6b447ef6ef7b79828666e45' + + '81e79012af34ddd9e2f037589b292db3e67c036745fa22e7e9b7373b', + 'dcf566ff291c25bbb8568fc3d376a6d9', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ), + ( + '', + '', + '', + '530f8afbc74536b9a963b4f1c4cb738b', + '0000000000000000000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + 'cea7403d4d606b6e074ec5d3baf39d18', + 'd0d1c8a799996bf0265b98b5d48ab919', + '0000000000000000000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + + '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662898015ad', + 'b094dac5d93471bdec1a502270e3cc6c', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + + '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662', + '76fc6ece0f4e1768cddf8853bb2d551b', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + 'c3762df1ca787d32ae47c13bf19844cbaf1ae14d0b976afac52ff7d79bba9de0' + + 'feb582d33934a4f0954cc2363bc73f7862ac430e64abe499f47c9b1f', + '3a337dbf46a792c45e454913fe2ea8f2', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '5a8def2f0c9e53f1f75d7853659e2a20eeb2b22aafde6419a058ab4f6f746bf4' + + '0fc0c3b780f244452da3ebf1c5d82cdea2418997200ef82e44ae7e3f', + 'a44a8266ee1c8eb0c8b5d4cf5ae9f19a', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ) + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + + # Encrypt + cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsGueronKrasnov(unittest.TestCase): + """Class exercising the GCM test vectors found in + 'The fragility of AES-GCM authentication algorithm', Gueron, Krasnov + https://eprint.iacr.org/2013/157.pdf""" + + def test_1(self): + key = unhexlify("3da6c536d6295579c0959a7043efb503") + iv = unhexlify("2b926197d34e091ef722db94") + aad = unhexlify("00000000000000000000000000000000" + + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f") + digest = unhexlify("69dd586555ce3fcc89663801a71d957b") + + cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) + self.assertEqual(digest, cipher.digest()) + + def test_2(self): + key = unhexlify("843ffcf5d2b72694d19ed01d01249412") + iv = unhexlify("dbcca32ebf9b804617c3aa9e") + aad = unhexlify("00000000000000000000000000000000" + + "101112131415161718191a1b1c1d1e1f") + pt = unhexlify("000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f") + ct = unhexlify("6268c6fa2a80b2d137467f092f657ac0" + + "4d89be2beaa623d61b5a868c8f03ff95" + + "d3dcee23ad2f1ab3a6c80eaf4b140eb0" + + "5de3457f0fbc111a6b43d0763aa422a3" + + "013cf1dc37fe417d1fbfc449b75d4cc5") + digest = unhexlify("3b629ccfbc1119b7319e1dce2cd6fd6d") + + cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) + ct2, digest2 = cipher.encrypt_and_digest(pt) + + self.assertEqual(ct, ct2) + self.assertEqual(digest, digest2) + + +class NISTTestVectorsGCM(unittest.TestCase): + + def __init__(self, a): + self.use_clmul = True + unittest.TestCase.__init__(self, a) + + +class NISTTestVectorsGCM_no_clmul(unittest.TestCase): + + def __init__(self, a): + self.use_clmul = False + unittest.TestCase.__init__(self, a) + + +test_vectors_nist = load_test_vectors( + ("Cipher", "AES"), + "gcmDecrypt128.rsp", + "GCM decrypt", + {"count": lambda x: int(x)}) or [] + +test_vectors_nist += load_test_vectors( + ("Cipher", "AES"), + "gcmEncryptExtIV128.rsp", + "GCM encrypt", + {"count": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_nist): + + # The test vector file contains some directive lines + if isinstance(tv, str): + continue + + def single_test(self, tv=tv): + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_GCM, nonce=tv.iv, + mac_len=len(tv.tag), use_clmul=self.use_clmul) + cipher.update(tv.aad) + if "FAIL" in tv.others: + self.assertRaises(ValueError, cipher.decrypt_and_verify, + tv.ct, tv.tag) + else: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + self.assertEqual(pt, tv.pt) + + setattr(NISTTestVectorsGCM, "test_%d" % idx, single_test) + setattr(NISTTestVectorsGCM_no_clmul, "test_%d" % idx, single_test) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, **extra_params): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._extra_params = extra_params + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_gcm_test.json", + "Wycheproof GCM", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt GCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): + return + raise e + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt GCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): + return + raise e + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt GCM Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestVariableLength(unittest.TestCase): + + def __init__(self, **extra_params): + unittest.TestCase.__init__(self) + self._extra_params = extra_params + + def runTest(self): + key = b'0' * 16 + h = SHA256.new() + + for length in range(160): + nonce = '{0:04d}'.format(length).encode('utf-8') + data = bchr(length) * length + cipher = AES.new(key, AES.MODE_GCM, nonce=nonce, **self._extra_params) + ct, tag = cipher.encrypt_and_digest(data) + h.update(ct) + h.update(tag) + + self.assertEqual(h.hexdigest(), "7b7eb1ffbe67a2e53a912067c0ec8e62ebc7ce4d83490ea7426941349811bdf4") + + +def get_tests(config={}): + from Crypto.Util import _cpu_features + + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(GcmTests) + tests += list_test_cases(GcmFSMTests) + tests += [TestVectors()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + tests += list_test_cases(TestVectorsGueronKrasnov) + tests += [TestVariableLength()] + if config.get('slow_tests'): + tests += list_test_cases(NISTTestVectorsGCM) + + if _cpu_features.have_clmul(): + tests += [TestVectorsWycheproof(wycheproof_warnings, use_clmul=False)] + tests += [TestVariableLength(use_clmul=False)] + if config.get('slow_tests'): + tests += list_test_cases(NISTTestVectorsGCM_no_clmul) + else: + print("Skipping test of PCLMULDQD in AES GCM") + + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OCB.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OCB.py new file mode 100644 index 0000000..c1baab3 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OCB.py @@ -0,0 +1,742 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import b, tobytes, bchr +from Crypto.Util.strxor import strxor_c +from Crypto.Util.number import long_to_bytes +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class OcbTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct, mac = cipher.encrypt_and_digest(pt) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce is optional + AES.new(self.key_128, AES.MODE_OCB) + + cipher = AES.new(self.key_128, AES.MODE_OCB, self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEquals(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce cannot be empty + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=b("")) + + # nonce can be up to 15 bytes long + for length in range(1, 16): + AES.new(self.key_128, AES.MODE_OCB, nonce=self.data_128[:length]) + + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.data_128) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce + nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce + self.assertEqual(len(nonce1), 15) + self.assertNotEqual(nonce1, nonce2) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce + nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce + self.assertEqual(len(nonce1), 15) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + result = getattr(cipher, func)(b("")) + self.assertEqual(result, b("")) + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt(b("xyz")) + self.assertRaises(TypeError, cipher.decrypt, b("xyz")) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt(b("xyz")) + self.assertRaises(TypeError, cipher.encrypt, b("xyz")) + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, mac_len=7) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(8, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b("") + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + pt2 += cipher.decrypt() + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b("") + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + ct2 += cipher.encrypt() + self.assertEqual(ciphertext, ct2) + self.assertEquals(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = AES.new(self.key_128, + AES.MODE_OCB, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + cipher1.encrypt() + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_OCB, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + cipher2.encrypt() + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_OCB, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_128, + AES.MODE_OCB, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + cipher1.encrypt() + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_OCB, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + cipher2.encrypt() + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_OCB, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data_128, pt_test) + + +class OcbFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->ENCRYPT(NONE)->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + ct += cipher.encrypt() + mac = cipher.digest() + + # Verify path INIT->DECRYPT->DECRYPT(NONCE)->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + + def test_invalid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + self.assertRaises(TypeError, cipher.digest) + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.decrypt(ct) + self.assertRaises(TypeError, cipher.verify) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->ENCRYPT(NONE)->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + ct += cipher.encrypt() + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->DECRYPT(NONE)->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + + def test_invalid_encrypt_after_final(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.encrypt(self.data_128) + cipher.encrypt() + self.assertRaises(TypeError, cipher.encrypt, self.data_128) + + def test_invalid_decrypt_after_final(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(self.data_128) + cipher.decrypt() + self.assertRaises(TypeError, cipher.decrypt, self.data_128) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b("333"), self.data_128, + self.data_128 + b("3")): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method() + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.encrypt() + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + ct += cipher.encrypt() + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +class OcbRfc7253Test(unittest.TestCase): + + # Tuple with + # - nonce + # - authenticated data + # - plaintext + # - ciphertext and 16 byte MAC tag + tv1_key = "000102030405060708090A0B0C0D0E0F" + tv1 = ( + ( + "BBAA99887766554433221100", + "", + "", + "785407BFFFC8AD9EDCC5520AC9111EE6" + ), + ( + "BBAA99887766554433221101", + "0001020304050607", + "0001020304050607", + "6820B3657B6F615A5725BDA0D3B4EB3A257C9AF1F8F03009" + ), + ( + "BBAA99887766554433221102", + "0001020304050607", + "", + "81017F8203F081277152FADE694A0A00" + ), + ( + "BBAA99887766554433221103", + "", + "0001020304050607", + "45DD69F8F5AAE72414054CD1F35D82760B2CD00D2F99BFA9" + ), + ( + "BBAA99887766554433221104", + "000102030405060708090A0B0C0D0E0F", + "000102030405060708090A0B0C0D0E0F", + "571D535B60B277188BE5147170A9A22C3AD7A4FF3835B8C5" + "701C1CCEC8FC3358" + ), + ( + "BBAA99887766554433221105", + "000102030405060708090A0B0C0D0E0F", + "", + "8CF761B6902EF764462AD86498CA6B97" + ), + ( + "BBAA99887766554433221106", + "", + "000102030405060708090A0B0C0D0E0F", + "5CE88EC2E0692706A915C00AEB8B2396F40E1C743F52436B" + "DF06D8FA1ECA343D" + ), + ( + "BBAA99887766554433221107", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "1CA2207308C87C010756104D8840CE1952F09673A448A122" + "C92C62241051F57356D7F3C90BB0E07F" + ), + ( + "BBAA99887766554433221108", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "", + "6DC225A071FC1B9F7C69F93B0F1E10DE" + ), + ( + "BBAA99887766554433221109", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "221BD0DE7FA6FE993ECCD769460A0AF2D6CDED0C395B1C3C" + "E725F32494B9F914D85C0B1EB38357FF" + ), + ( + "BBAA9988776655443322110A", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "BD6F6C496201C69296C11EFD138A467ABD3C707924B964DE" + "AFFC40319AF5A48540FBBA186C5553C68AD9F592A79A4240" + ), + ( + "BBAA9988776655443322110B", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "", + "FE80690BEE8A485D11F32965BC9D2A32" + ), + ( + "BBAA9988776655443322110C", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "2942BFC773BDA23CABC6ACFD9BFD5835BD300F0973792EF4" + "6040C53F1432BCDFB5E1DDE3BC18A5F840B52E653444D5DF" + ), + ( + "BBAA9988776655443322110D", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "D5CA91748410C1751FF8A2F618255B68A0A12E093FF45460" + "6E59F9C1D0DDC54B65E8628E568BAD7AED07BA06A4A69483" + "A7035490C5769E60" + ), + ( + "BBAA9988776655443322110E", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "", + "C5CD9D1850C141E358649994EE701B68" + ), + ( + "BBAA9988776655443322110F", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "4412923493C57D5DE0D700F753CCE0D1D2D95060122E9F15" + "A5DDBFC5787E50B5CC55EE507BCB084E479AD363AC366B95" + "A98CA5F3000B1479" + ) + ) + + # Tuple with + # - key + # - nonce + # - authenticated data + # - plaintext + # - ciphertext and 12 byte MAC tag + tv2 = ( + "0F0E0D0C0B0A09080706050403020100", + "BBAA9988776655443322110D", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "1792A4E31E0755FB03E31B22116E6C2DDF9EFD6E33D536F1" + "A0124B0A55BAE884ED93481529C76B6AD0C515F4D1CDD4FD" + "AC4F02AA" + ) + + # Tuple with + # - key length + # - MAC tag length + # - Expected output + tv3 = ( + (128, 128, "67E944D23256C5E0B6C61FA22FDF1EA2"), + (192, 128, "F673F2C3E7174AAE7BAE986CA9F29E17"), + (256, 128, "D90EB8E9C977C88B79DD793D7FFA161C"), + (128, 96, "77A3D8E73589158D25D01209"), + (192, 96, "05D56EAD2752C86BE6932C5E"), + (256, 96, "5458359AC23B0CBA9E6330DD"), + (128, 64, "192C9B7BD90BA06A"), + (192, 64, "0066BC6E0EF34E24"), + (256, 64, "7D4EA5D445501CBE"), + ) + + def test1(self): + key = unhexlify(b(self.tv1_key)) + for tv in self.tv1: + nonce, aad, pt, ct = [ unhexlify(b(x)) for x in tv ] + ct, mac_tag = ct[:-16], ct[-16:] + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + cipher.update(aad) + ct2 = cipher.encrypt(pt) + cipher.encrypt() + self.assertEquals(ct, ct2) + self.assertEquals(mac_tag, cipher.digest()) + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + cipher.update(aad) + pt2 = cipher.decrypt(ct) + cipher.decrypt() + self.assertEquals(pt, pt2) + cipher.verify(mac_tag) + + def test2(self): + + key, nonce, aad, pt, ct = [ unhexlify(b(x)) for x in self.tv2 ] + ct, mac_tag = ct[:-12], ct[-12:] + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) + cipher.update(aad) + ct2 = cipher.encrypt(pt) + cipher.encrypt() + self.assertEquals(ct, ct2) + self.assertEquals(mac_tag, cipher.digest()) + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) + cipher.update(aad) + pt2 = cipher.decrypt(ct) + cipher.decrypt() + self.assertEquals(pt, pt2) + cipher.verify(mac_tag) + + def test3(self): + + for keylen, taglen, result in self.tv3: + + key = bchr(0) * (keylen // 8 - 1) + bchr(taglen) + C = b("") + + for i in range(128): + S = bchr(0) * i + + N = long_to_bytes(3 * i + 1, 12) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(S) + C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() + + N = long_to_bytes(3 * i + 2, 12) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() + + N = long_to_bytes(3 * i + 3, 12) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(S) + C += cipher.encrypt() + cipher.digest() + + N = long_to_bytes(385, 12) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(C) + result2 = cipher.encrypt() + cipher.digest() + self.assertEquals(unhexlify(b(result)), result2) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OcbTests) + tests += list_test_cases(OcbFSMTests) + tests += list_test_cases(OcbRfc7253Test) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OFB.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OFB.py new file mode 100644 index 0000000..ec145ad --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OFB.py @@ -0,0 +1,238 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + +class OfbTests(BlockChainingTests): + + aes_mode = AES.MODE_OFB + des3_mode = DES3.MODE_OFB + + # Redefine test_unaligned_data_128/64 + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + +from Crypto.SelfTest.Cipher.test_CBC import NistBlockChainingVectors + +class NistOfbVectors(NistBlockChainingVectors): + aes_mode = AES.MODE_OFB + des_mode = DES.MODE_OFB + des3_mode = DES3.MODE_OFB + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "OFBGFSbox128.rsp", + "OFBGFSbox192.rsp", + "OFBGFSbox256.rsp", + "OFBKeySbox128.rsp", + "OFBKeySbox192.rsp", + "OFBKeySbox256.rsp", + "OFBVarKey128.rsp", + "OFBVarKey192.rsp", + "OFBVarKey256.rsp", + "OFBVarTxt128.rsp", + "OFBVarTxt192.rsp", + "OFBVarTxt256.rsp", + # MMT + "OFBMMT128.rsp", + "OFBMMT192.rsp", + "OFBMMT256.rsp", + ) +nist_aes_mct_files = ( + "OFBMCT128.rsp", + "OFBMCT192.rsp", + "OFBMCT256.rsp", + ) + +for file_name in nist_aes_kat_mmt_files: + def new_func(self, file_name=file_name): + self._do_kat_aes_test(file_name) + setattr(NistOfbVectors, "test_AES_" + file_name, new_func) + +for file_name in nist_aes_mct_files: + def new_func(self, file_name=file_name): + self._do_mct_aes_test(file_name) + setattr(NistOfbVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TOFBMMT2.rsp", # 2TDES + "TOFBMMT3.rsp", # 3TDES + "TOFBinvperm.rsp", # Single DES + "TOFBpermop.rsp", + "TOFBsubtab.rsp", + "TOFBvarkey.rsp", + "TOFBvartext.rsp", + ) + +for file_name in nist_tdes_files: + def new_func(self, file_name=file_name): + self._do_tdes_test(file_name) + setattr(NistOfbVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST OFB TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the OFB test vectors found in Section F.4 + of NIST SP 800-3A""" + + def test_aes_128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ + '7789508d16918f03f53c52dac54ed825' +\ + '9740051e9c5fecf64344f7a82260edcc' +\ + '304c6528f659c77866a510d9c1d6ae5e' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + def test_aes_192(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ + 'fcc28b8d4c63837c09e81700c1100401' +\ + '8d9a9aeac0f6596f559c6d4daf59a5f2' +\ + '6d9f200857ca6c3e9cac524bd9acc92a' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + def test_aes_256(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ + '4febdc6740d20b3ac88f6ad82a4fb08d' +\ + '71ab47a086e86eedf39d1c5bba97c408' +\ + '0126141d67f37be8538f5a8be740e484' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OfbTests) + if config.get('slow_tests'): + tests += list_test_cases(NistOfbVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py new file mode 100644 index 0000000..e6cae67 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py @@ -0,0 +1,218 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + +class OpenPGPTests(BlockChainingTests): + + aes_mode = AES.MODE_OPENPGP + des3_mode = DES3.MODE_OPENPGP + + # Redefine test_unaligned_data_128/64 + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + iv_128 = get_tag_random("iv_128", 16) + iv_64 = get_tag_random("iv_64", 8) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + eiv, ct = ct[:18], ct[18:] + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + eiv, ct = ct[:10], ct[10:] + + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, eiv) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_IV_iv_attributes(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + self.assertEqual(cipher.iv, self.iv_128) + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + self.assertEqual(cipher.iv, self.iv_128) + + def test_null_encryption_decryption(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + self.assertEqual(cipher.decrypt(b""), b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_output_param(self): + pass + + def test_output_param_same_buffer(self): + pass + + def test_output_param_memoryview(self): + pass + + def test_output_param_neg(self): + pass + + +class TestVectors(unittest.TestCase): + + def test_aes(self): + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # + # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # + # As result, the content of the file 'pt' is encrypted with a key derived + # from 'secret_passphrase' and written to file 'ct'. + # Test vectors must be extracted from 'ct', which is a collection of + # TLVs (see RFC4880 for all details): + # - the encrypted data (with the encrypted IV as prefix) is the payload + # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). + # This is the ciphertext in the test vector. + # - inside the encrypted part, there is a further layer of TLVs. One must + # look for tag 11 (Literal Data Packet); in its payload, after a short + # but time dependent header, there is the content of file 'pt'. + # In the test vector, the plaintext is the complete set of TLVs that gets + # encrypted. It is not just the content of 'pt'. + # - the key is the leftmost 16 bytes of the SHA1 digest of the password. + # The test vector contains such shortened digest. + # + # Note that encryption uses a clear IV, and decryption an encrypted IV + + plaintext = 'ac18620270744fb4f647426c61636b4361745768697465436174' + ciphertext = 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb' + key = '5baa61e4c9b93f3f0682250b6cf8331b' + iv = '3d7d3e62282add7eb203eeba5c800733' + encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' + + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + key = unhexlify(key) + iv = unhexlify(iv) + encrypted_iv = unhexlify(encrypted_iv) + + cipher = AES.new(key, AES.MODE_OPENPGP, iv) + ct = cipher.encrypt(plaintext) + self.assertEqual(ct[:18], encrypted_iv) + self.assertEqual(ct[18:], ciphertext) + + cipher = AES.new(key, AES.MODE_OPENPGP, encrypted_iv) + pt = cipher.decrypt(ciphertext) + self.assertEqual(pt, plaintext) + + def test_des3(self): + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # For an explanation, see test_AES.py . + + plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579' + ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d' + key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2' + iv='cd47e2afb8b7e4b0' + encrypted_iv='6a7eef0b58050e8b904a' + + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + key = unhexlify(key) + iv = unhexlify(iv) + encrypted_iv = unhexlify(encrypted_iv) + + cipher = DES3.new(key, DES3.MODE_OPENPGP, iv) + ct = cipher.encrypt(plaintext) + self.assertEqual(ct[:10], encrypted_iv) + self.assertEqual(ct[10:], ciphertext) + + cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv) + pt = cipher.decrypt(ciphertext) + self.assertEqual(pt, plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OpenPGPTests) + tests += list_test_cases(TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_SIV.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_SIV.py new file mode 100644 index 0000000..421c9eb --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_SIV.py @@ -0,0 +1,551 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import json +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class SivTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + key_384 = get_tag_random("key_384", 48) + key_512 = get_tag_random("key_512", 64) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + for key in self.key_256, self.key_384, self.key_512: + cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct, mac = cipher.encrypt_and_digest(pt) + + cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Deterministic encryption + AES.new(self.key_256, AES.MODE_SIV) + + cipher = AES.new(self.key_256, AES.MODE_SIV, self.nonce_96) + ct1, tag1 = cipher.encrypt_and_digest(self.data_128) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct2, tag2 = cipher.encrypt_and_digest(self.data_128) + self.assertEquals(ct1 + tag1, ct2 + tag2) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_256, AES.MODE_SIV, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=bchr(1) * x) + cipher.encrypt_and_digest(b'\x01') + + def test_block_size_128(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, no nonce is randomly generated + self.failIf(hasattr(AES.new(self.key_256, AES.MODE_SIV), "nonce")) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96, + use_aesni=False) + + def test_encrypt_excludes_decrypt(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + self.assertRaises(TypeError, cipher.decrypt, self.data_128) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + self.assertRaises(TypeError, cipher.decrypt_and_verify, + self.data_128, self.data_128) + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt_and_verify, + u'test1234567890-*', b"xxxx") + + def test_mac_len(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_bytearray(self): + + # Encrypt + key = bytearray(self.key_256) + nonce = bytearray(self.nonce_96) + data = bytearray(self.data_128) + header = bytearray(self.data_128) + + cipher1 = AES.new(self.key_256, + AES.MODE_SIV, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct, tag = cipher1.encrypt_and_digest(self.data_128) + + cipher2 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher2.update(header) + header[:3] = b'\xFF\xFF\xFF' + ct_test, tag_test = cipher2.encrypt_and_digest(data) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key = bytearray(self.key_256) + nonce = bytearray(self.nonce_96) + header = bytearray(self.data_128) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + + cipher3 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher3.update(header) + header[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) + + self.assertEqual(self.data_128, pt_test) + + def test_memoryview(self): + + # Encrypt + key = memoryview(bytearray(self.key_256)) + nonce = memoryview(bytearray(self.nonce_96)) + data = memoryview(bytearray(self.data_128)) + header = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_256, + AES.MODE_SIV, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct, tag = cipher1.encrypt_and_digest(self.data_128) + + cipher2 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher2.update(header) + header[:3] = b'\xFF\xFF\xFF' + ct_test, tag_test= cipher2.encrypt_and_digest(data) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key = memoryview(bytearray(self.key_256)) + nonce = memoryview(bytearray(self.nonce_96)) + header = memoryview(bytearray(self.data_128)) + ct_ba = memoryview(bytearray(ct)) + tag_ba = memoryview(bytearray(tag)) + + cipher3 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher3.update(header) + header[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) + + self.assertEqual(self.data_128, pt_test) + + def test_output_param(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + output = bytearray(16) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + output = memoryview(bytearray(16)) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt_and_digest, pt, output=b'0'*16) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt_and_digest, pt, output=shorter_output) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, tag, output=shorter_output) + + +class SivFSMTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data_128 = get_tag_random("data_128", 16) + + def test_invalid_init_encrypt(self): + # Path INIT->ENCRYPT fails + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, b"xxx") + + def test_invalid_init_decrypt(self): + # Path INIT->DECRYPT fails + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, b"xxx") + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_multiple_encrypt_and_digest(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(self.data_128) + self.assertRaises(TypeError, cipher.encrypt_and_digest, b'') + + def test_invalid_multiple_decrypt_and_verify(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(self.data_128) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, tag) + self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag) + + +def transform(tv): + new_tv = [[unhexlify(x) for x in tv[0].split("-")]] + new_tv += [ unhexlify(x) for x in tv[1:5]] + if tv[5]: + nonce = unhexlify(tv[5]) + else: + nonce = None + new_tv += [ nonce ] + return new_tv + + +class TestVectors(unittest.TestCase): + """Class exercising the SIV test vectors found in RFC5297""" + + # This is a list of tuples with 5 items: + # + # 1. Header + '|' + plaintext + # 2. Header + '|' + ciphertext + '|' + MAC + # 3. AES-128 key + # 4. Description + # 5. Dictionary of parameters to be passed to AES.new(). + # It must include the nonce. + # + # A "Header" is a dash ('-') separated sequece of components. + # + test_vectors_hex = [ + ( + '101112131415161718191a1b1c1d1e1f2021222324252627', + '112233445566778899aabbccddee', + '40c02b9690c4dc04daef7f6afe5c', + '85632d07c6e8f37f950acd320a2ecc93', + 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff', + None + ), + ( + '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddccbbaa9988' + + '7766554433221100-102030405060708090a0', + '7468697320697320736f6d6520706c61696e7465787420746f20656e63727970' + + '74207573696e67205349562d414553', + 'cb900f2fddbe404326601965c889bf17dba77ceb094fa663b7a3f748ba8af829' + + 'ea64ad544a272e9c485b62a3fd5c0d', + '7bdb6e3b432667eb06f4d14bff2fbd0f', + '7f7e7d7c7b7a79787776757473727170404142434445464748494a4b4c4d4e4f', + '09f911029d74e35bd84156c5635688c0' + ), + ] + + test_vectors = [ transform(tv) for tv in test_vectors_hex ] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + + # Encrypt + cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) + for x in assoc_data: + cipher.update(x) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) + for x in assoc_data: + cipher.update(x) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self): + unittest.TestCase.__init__(self) + self._id = "None" + + def setUp(self): + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_siv_cmac_test.json", + "Wycheproof AES SIV") + + def shortDescription(self): + return self._id + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV) + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(tag + ct, tv.ct) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt AES_SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV) + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct[16:], tv.ct[:16]) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + + +class TestVectorsWycheproof2(unittest.TestCase): + + def __init__(self): + unittest.TestCase.__init__(self) + self._id = "None" + + def setUp(self): + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aead_aes_siv_cmac_test.json", + "Wycheproof AEAD SIV") + + def shortDescription(self): + return self._id + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt AEAD-AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt AEAD-AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(SivTests) + tests += list_test_cases(SivFSMTests) + tests += [ TestVectors() ] + tests += [ TestVectorsWycheproof() ] + tests += [ TestVectorsWycheproof2() ] + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py new file mode 100644 index 0000000..fdcb072 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/Salsa20.py: Self-test for the Salsa20 stream cipher +# +# Written in 2013 by Fabrizio Tarizzo +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Salsa20""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import Salsa20 + +from .common import make_stream_tests + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) +# tuples. +test_data = [ + # Test vectors are taken from + # http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/verified.test-vectors + ( '00' * 512, + '4dfa5e481da23ea09a31022050859936da52fcee218005164f267cb65f5cfd7f' + + '2b4f97e0ff16924a52df269515110a07f9e460bc65ef95da58f740b7d1dbb0aa' + + 'd64cec189c7eb8c6bbf3d7376c80a481d43e628701f6a27afb9fe23919f24114' + + '8db44f70d7063efcc3dd55a0893a613c3c6fe1c127bd6f59910589293bb6ef9e' + + 'e24819066dee1a64f49b0bbad5988635272b169af861f85df881939f29ada6fd' + + '0241410e8d332ae4798d929434a2630de451ec4e0169694cbaa7ebb121ea6a2b' + + 'da9c1581f429e0a00f7d67e23b730676783b262e8eb43a25f55fb90b3e753aef' + + '8c6713ec66c51881111593ccb3e8cb8f8de124080501eeeb389c4bcb6977cf95' + + '7d5789631eb4554400e1e025935dfa7b3e9039d61bdc58a8697d36815bf1985c' + + 'efdf7ae112e5bb81e37ecf0616ce7147fc08a93a367e08631f23c03b00a8da2f' + + 'aa5024e5c8d30aca43fc2d5082067b21b234bc741d68fb292c6012c3764ccee3' + + '1e364a5403e00cfee338a21a01e7d3cefd5a770ca0ab48c435ea6116435f7ad8' + + '30b217b49f978a68e207ed9f462af7fb195b2115fe8f24f152e4ddc32202d6f2' + + 'b52fafbcfbc202d8a259a611e901d3f62d065eb13f09bbc45cd45119b843efaa' + + 'b375703739daced4dd4059fd71c3c47fc2f9939670fad4a46066adcc6a564578' + + '3308b90ffb72be04a6b147cbe38cc0c3b9267c296a92a7c69873f9f263be9703', + '80000000000000000000000000000000', + '128 bits key, set 1, vector 0', + dict (iv='00'*8)), + + ( '00' * 512, + 'e3be8fdd8beca2e3ea8ef9475b29a6e7003951e1097a5c38d23b7a5fad9f6844' + + 'b22c97559e2723c7cbbd3fe4fc8d9a0744652a83e72a9c461876af4d7ef1a117' + + '8da2b74eef1b6283e7e20166abcae538e9716e4669e2816b6b20c5c356802001' + + 'cc1403a9a117d12a2669f456366d6ebb0f1246f1265150f793cdb4b253e348ae' + + '203d89bc025e802a7e0e00621d70aa36b7e07cb1e7d5b38d5e222b8b0e4b8407' + + '0142b1e29504767d76824850320b5368129fdd74e861b498e3be8d16f2d7d169' + + '57be81f47b17d9ae7c4ff15429a73e10acf250ed3a90a93c711308a74c6216a9' + + 'ed84cd126da7f28e8abf8bb63517e1ca98e712f4fb2e1a6aed9fdc73291faa17' + + '958211c4ba2ebd5838c635edb81f513a91a294e194f1c039aeec657dce40aa7e' + + '7c0af57cacefa40c9f14b71a4b3456a63e162ec7d8d10b8ffb1810d71001b618' + + '2f9f73da53b85405c11f7b2d890fa8ae0c7f2e926d8a98c7ec4e91b65120e988' + + '349631a700c6facec3471cb0413656e75e309456584084d7e12c5b43a41c43ed' + + '9a048abd9b880da65f6a665a20fe7b77cd292fe62cae644b7f7df69f32bdb331' + + '903e6505ce44fdc293920c6a9ec7057e23df7dad298f82ddf4efb7fdc7bfc622' + + '696afcfd0cddcc83c7e77f11a649d79acdc3354e9635ff137e929933a0bd6f53' + + '77efa105a3a4266b7c0d089d08f1e855cc32b15b93784a36e56a76cc64bc8477', + '8000000000000000000000000000000000000000000000000000000000000000', + '256 bits key, set 1, vector 0', + dict (iv='00'*8)), + + ( '00' * 512, + '169060ccb42bea7bee4d8012a02f3635eb7bca12859fa159cd559094b3507db8' + + '01735d1a1300102a9c9415546829cbd2021ba217b39b81d89c55b13d0c603359' + + '3f84159a3c84f4b4f4a0edcd9d38ff261a737909e0b66d68b5cac496f3a5be99' + + 'cb12c321ab711afaab36cc0947955e1a9bb952ed54425e7711279fbc81bb83f5' + + '6e55cea44e6daddb05858a153ea6213b3350c12aa1a83ef2726f09485fa71790' + + 'f9b9f922c7dda1113b1f9d56658ed3402803f511bc1f122601d5e7f0ff036e23' + + '23ef24bb24195b9fd574823cd8a40c29d86bd35c191e2038779ff696c712b6d8' + + '2e7014dbe1ac5d527af076c088c4a8d44317958189f6ef54933a7e0816b5b916' + + 'd8f12ed8afe9422b85e5cc9b8adec9d6cfabe8dbc1082bccc02f5a7266aa074c' + + 'a284e583a35837798cc0e69d4ce937653b8cdd65ce414b89138615ccb165ad19' + + '3c6b9c3d05eef4be921a10ea811fe61d11c6867600188e065daff90b509ec56b' + + 'd41e7e8968c478c78d590c2d2ee24ea009c8f49bc3d81672cfc47895a9e21c9a' + + '471ebf8e294bee5d2de436ac8d052bf31111b345f1da23c3a4d13b9fc5f0900a' + + 'a298f98f538973b8fad40d4d159777de2cfe2a3dead1645ddb49794827dba040' + + 'f70a0ff4ecd155e0f033604693a51e2363880e2ecf98699e7174af7c2c6b0fc6' + + '59ae329599a3949272a37b9b2183a0910922a3f325ae124dcbdd735364055ceb', + '09090909090909090909090909090909', + '128 bits key, set 2, vector 9', + dict (iv='00'*8)), + + ( '00' * 512, + '7041e747ceb22ed7812985465f50333124f971da1c5d6efe5ca201b886f31046' + + 'e757e5c3ec914f60ed1f6bce2819b6810953f12b8ba1199bf82d746a8b8a88f1' + + '142002978ec4c35b95dc2c82990f9e847a0ab45f2ca72625f5190c820f29f3aa' + + 'f5f0b5572b06b70a144f2a240c3b3098d4831fa1ce1459f8d1df226a6a79b0ab' + + '41e91799ef31b5ff3d756c19126b19025858ee70fbd69f2be955cb011c005e31' + + '32b271b378f39b0cb594e95c99ce6ff17735a541891845bbf0450afcb4a850b9' + + '4ee90afb713ae7e01295c74381180a3816d7020d5a396c0d97aaa783eaabb6ec' + + '44d5111157f2212d1b1b8fca7893e8b520cd482418c272ab119b569a2b9598eb' + + '355624d12e79adab81153b58cd22eaf1b2a32395dedc4a1c66f4d274070b9800' + + 'ea95766f0245a8295f8aadb36ddbbdfa936417c8dbc6235d19494036964d3e70' + + 'b125b0f800c3d53881d9d11e7970f827c2f9556935cd29e927b0aceb8cae5fd4' + + '0fd88a8854010a33db94c96c98735858f1c5df6844f864feaca8f41539313e7f' + + '3c0610214912cd5e6362197646207e2d64cd5b26c9dfe0822629dcbeb16662e8' + + '9ff5bf5cf2e499138a5e27bd5027329d0e68ddf53103e9e409523662e27f61f6' + + '5cf38c1232023e6a6ef66c315bcb2a4328642faabb7ca1e889e039e7c444b34b' + + 'b3443f596ac730f3df3dfcdb343c307c80f76e43e8898c5e8f43dc3bb280add0', + '0909090909090909090909090909090909090909090909090909090909090909', + '256 bits key, set 2, vector 9', + dict (iv='00'*8)), + + ( '00' * 1024, + '71daee5142d0728b41b6597933ebf467e43279e30978677078941602629cbf68' + + 'b73d6bd2c95f118d2b3e6ec955dabb6dc61c4143bc9a9b32b99dbe6866166dc0' + + '8631b7d6553050303d7252c264d3a90d26c853634813e09ad7545a6ce7e84a5d' + + 'fc75ec43431207d5319970b0faadb0e1510625bb54372c8515e28e2accf0a993' + + '0ad15f431874923d2a59e20d9f2a5367dba6051564f150287debb1db536ff9b0' + + '9ad981f25e5010d85d76ee0c305f755b25e6f09341e0812f95c94f42eead346e' + + '81f39c58c5faa2c88953dc0cac90469db2063cb5cdb22c9eae22afbf0506fca4' + + '1dc710b846fbdfe3c46883dd118f3a5e8b11b6afd9e71680d8666557301a2daa' + + 'fb9496c559784d35a035360885f9b17bd7191977deea932b981ebdb29057ae3c' + + '92cfeff5e6c5d0cb62f209ce342d4e35c69646ccd14e53350e488bb310a32f8b' + + '0248e70acc5b473df537ced3f81a014d4083932bedd62ed0e447b6766cd2604b' + + '706e9b346c4468beb46a34ecf1610ebd38331d52bf33346afec15eefb2a7699e' + + '8759db5a1f636a48a039688e39de34d995df9f27ed9edc8dd795e39e53d9d925' + + 'b278010565ff665269042f05096d94da3433d957ec13d2fd82a0066283d0d1ee' + + 'b81bf0ef133b7fd90248b8ffb499b2414cd4fa003093ff0864575a43749bf596' + + '02f26c717fa96b1d057697db08ebc3fa664a016a67dcef8807577cc3a09385d3' + + 'f4dc79b34364bb3b166ce65fe1dd28e3950fe6fa81063f7b16ce1c0e6daac1f8' + + '188455b77752045e863c9b256ad92bc6e2d08314c5bba191c274f42dfbb3d652' + + 'bb771956555e880f84cd8b827a4c5a52f3a099fa0259bd4aac3efd541f191170' + + '4412d6e85fbcc628b335875b9fef24807f6e1bc66c3186159e1e7f5a13913e02' + + 'd241ce2efdbcaa275039fb14eac5923d17ffbc7f1abd3b45e92127575bfbabf9' + + '3a257ebef0aa1437b326e41b585af572f7239c33b32981a1577a4f629b027e1e' + + 'b49d58cc497e944d79cef44357c2bf25442ab779651e991147bf79d6fd3a8868' + + '0cd3b1748e07fd10d78aceef6db8a5e563570d40127f754146c34a440f2a991a' + + '23fa39d365141f255041f2135c5cba4373452c114da1801bacca38610e3a6524' + + '2b822d32de4ab5a7d3cf9b61b37493c863bd12e2cae10530cddcda2cb7a5436b' + + 'ef8988d4d24e8cdc31b2d2a3586340bc5141f8f6632d0dd543bfed81eb471ba1' + + 'f3dc2225a15ffddcc03eb48f44e27e2aa390598adf83f15c6608a5f18d4dfcf0' + + 'f547d467a4d70b281c83a595d7660d0b62de78b9cca023cca89d7b1f83484638' + + '0e228c25f049184a612ef5bb3d37454e6cfa5b10dceda619d898a699b3c8981a' + + '173407844bb89b4287bf57dd6600c79e352c681d74b03fa7ea0d7bf6ad69f8a6' + + '8ecb001963bd2dd8a2baa0083ec09751cd9742402ad716be16d5c052304cfca1', + '0F62B5085BAE0154A7FA4DA0F34699EC', + '128 bits key, Set 6, vector# 3', + dict (iv='288FF65DC42B92F9')), + + ( '00' * 1024, + '5e5e71f90199340304abb22a37b6625bf883fb89ce3b21f54a10b81066ef87da' + + '30b77699aa7379da595c77dd59542da208e5954f89e40eb7aa80a84a6176663f' + + 'd910cde567cf1ff60f7040548d8f376bfd1f44c4774aac37410ede7d5c3463fc' + + '4508a603201d8495ad257894e5eb1914b53e8da5e4bf2bc83ac87ce55cc67df7' + + '093d9853d2a83a9c8be969175df7c807a17156df768445dd0874a9271c6537f5' + + 'ce0466473582375f067fa4fcdaf65dbc0139cd75e8c21a482f28c0fb8c3d9f94' + + '22606cc8e88fe28fe73ec3cb10ff0e8cc5f2a49e540f007265c65b7130bfdb98' + + '795b1df9522da46e48b30e55d9f0d787955ece720205b29c85f3ad9be33b4459' + + '7d21b54d06c9a60b04b8e640c64e566e51566730e86cf128ab14174f91bd8981' + + 'a6fb00fe587bbd6c38b5a1dfdb04ea7e61536fd229f957aa9b070ca931358e85' + + '11b92c53c523cb54828fb1513c5636fa9a0645b4a3c922c0db94986d92f314ff' + + '7852c03b231e4dceea5dd8cced621869cff818daf3c270ff3c8be2e5c74be767' + + 'a4e1fdf3327a934fe31e46df5a74ae2021cee021d958c4f615263d99a5ddae7f' + + 'eab45e6eccbafefe4761c57750847b7e75ee2e2f14333c0779ce4678f47b1e1b' + + '760a03a5f17d6e91d4b42313b3f1077ee270e432fe04917ed1fc8babebf7c941' + + '42b80dfb44a28a2a3e59093027606f6860bfb8c2e5897078cfccda7314c70035' + + 'f137de6f05daa035891d5f6f76e1df0fce1112a2ff0ac2bd3534b5d1bf4c7165' + + 'fb40a1b6eacb7f295711c4907ae457514a7010f3a342b4427593d61ba993bc59' + + '8bd09c56b9ee53aac5dd861fa4b4bb53888952a4aa9d8ca8671582de716270e1' + + '97375b3ee49e51fa2bf4ef32015dd9a764d966aa2ae541592d0aa650849e99ca' + + '5c6c39beebf516457cc32fe4c105bff314a12f1ec94bdf4d626f5d9b1cbbde42' + + 'e5733f0885765ba29e2e82c829d312f5fc7e180679ac84826c08d0a644b326d0' + + '44da0fdcc75fa53cfe4ced0437fa4df5a7ecbca8b4cb7c4a9ecf9a60d00a56eb' + + '81da52adc21f508dbb60a9503a3cc94a896616d86020d5b0e5c637329b6d396a' + + '41a21ba2c4a9493cf33fa2d4f10f77d5b12fdad7e478ccfe79b74851fc96a7ca' + + '6320c5efd561a222c0ab0fb44bbda0e42149611d2262bb7d1719150fa798718a' + + '0eec63ee297cad459869c8b0f06c4e2b56cbac03cd2605b2a924efedf85ec8f1' + + '9b0b6c90e7cbd933223ffeb1b3a3f9677657905829294c4c70acdb8b0891b47d' + + '0875d0cd6c0f4efe2917fc44b581ef0d1e4280197065d07da34ab33283364552' + + 'efad0bd9257b059acdd0a6f246812feb69e7e76065f27dbc2eee94da9cc41835' + + 'bf826e36e5cebe5d4d6a37a6a666246290ce51a0c082718ab0ec855668db1add' + + 'a658e5f257e0db39384d02e6145c4c00eaa079098f6d820d872de711b6ed08cf', + '0F62B5085BAE0154A7FA4DA0F34699EC3F92E5388BDE3184D72A7DD02376C91C', + '256 bits key, Set 6, vector# 3', + dict (iv='288FF65DC42B92F9')), + +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + + nonce = bchr(0) * 8 + for key_length in (15, 30, 33): + key = bchr(1) * key_length + self.assertRaises(ValueError, Salsa20.new, key, nonce) + + +class NonceTests(unittest.TestCase): + + def test_invalid_nonce_length(self): + key = bchr(1) * 16 + self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 7) + self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 9) + + def test_default_nonce(self): + + cipher1 = Salsa20.new(bchr(1) * 16) + cipher2 = Salsa20.new(bchr(1) * 16) + self.assertEqual(len(cipher1.nonce), 8) + self.assertNotEqual(cipher1.nonce, cipher2.nonce) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_ba = bytearray(data) + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + + cipher1 = Salsa20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = Salsa20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_ba) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + ct_ba = bytearray(ct) + + cipher3 = Salsa20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_ba) + + self.assertEqual(data, pt_test) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_mv = memoryview(bytearray(data)) + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + + cipher1 = Salsa20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = Salsa20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_mv) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + ct_mv = memoryview(bytearray(ct)) + + cipher3 = Salsa20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_mv) + + self.assertEqual(data, pt_test) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 8 + cipher = Salsa20.new(key=key, nonce=nonce) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = Salsa20.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = Salsa20.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher = Salsa20.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = Salsa20.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + tests = make_stream_tests(Salsa20, "Salsa20", test_data) + tests.append(KeyLength()) + tests += list_test_cases(NonceTests) + tests.append(ByteArrayTest()) + tests.append(MemoryviewTest()) + tests.append(TestOutput()) + + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py new file mode 100644 index 0000000..e122971 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import json +import unittest +from binascii import unhexlify + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.Cipher import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import b +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_15_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data + + _testData = ( + + # + # Generated with openssl 0.9.8o + # + ( + # Private key + '''-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY +W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA +zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB +AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk +y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 +atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG +uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k +IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 +d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ +8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a +1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT +NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs +HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= +-----END RSA PRIVATE KEY-----''', + # Plaintext + '''THIS IS PLAINTEXT\x0A''', + # Ciphertext + '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 + 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c + 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 + 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 + f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 + 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f + da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 + 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', + # Random data + '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d + 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a + 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc + f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d + 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c + bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 + 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' + ), + ) + + def testEncrypt1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:self.idx+N] + self.idx += N + return r + # The real test + cipher = PKCS.new(key, randfunc=randGen(t2b(test[3]))) + ct = cipher.encrypt(b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fail if plaintext is too long + pt = '\x00'*(128-11+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testVerify1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # The real test + cipher = PKCS.new(key) + pt = cipher.decrypt(t2b(test[2]), "---") + self.assertEqual(pt, b(test[1])) + + def testVerify2(self): + # Verify that decryption fails if ciphertext is not as long as + # RSA modulus + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") + self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") + + # Verify that decryption fails if there are less then 8 non-zero padding + # bytes + pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) + pt_int = bytes_to_long(pt) + ct_int = self.key1024._encrypt(pt_int) + ct = long_to_bytes(ct_int, 128) + self.assertEqual("---", cipher.decrypt(ct, "---")) + + def testEncryptVerify1(self): + # Encrypt/Verify messages of length [0..RSAlen-11] + # and therefore padding [8..117] + for pt_len in range(0,128-11+1): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct, "---") + self.assertEqual(pt,pt2) + + def testByteArray(self): + pt = b"XER" + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(bytearray(pt)) + pt2 = cipher.decrypt(bytearray(ct), "---") + self.assertEqual(pt, pt2) + + def testMemoryview(self): + pt = b"XER" + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(memoryview(bytearray(pt))) + pt2 = cipher.decrypt(memoryview(bytearray(ct)), "---") + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, skip_slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._skip_slow_tests = skip_slow_tests + self._id = "None" + + def load_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['privateKeyPem']) + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof PKCS#1v1.5 (%s)" % filename, + group_tag={'rsa_key': filter_rsa} + ) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("rsa_pkcs1_2048_test.json")) + if not self._skip_slow_tests: + self.tv.extend(self.load_tests("rsa_pkcs1_3072_test.json")) + self.tv.extend(self.load_tests("rsa_pkcs1_4096_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt PKCS#1v1.5 Test #%s" % tv.id + + cipher = PKCS.new(tv.rsa_key) + try: + pt = cipher.decrypt(tv.ct, sentinel=b'---') + except ValueError: + assert not tv.valid + else: + if pt == b'---': + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_decrypt(tv) + + +def get_tests(config={}): + skip_slow_tests = not config.get('slow_tests') + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py new file mode 100644 index 0000000..1711581 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +from Crypto.PublicKey import RSA +from Crypto.Cipher import PKCS1_OAEP as PKCS +from Crypto.Hash import MD2, MD5, SHA1, SHA256, RIPEMD160, SHA224, SHA384, SHA512 +from Crypto import Random +from Crypto.Signature.pss import MGF1 + +from Crypto.Util.py3compat import b, bchr + + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c, '') + return t + + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = rws(t) + if len(clean) % 2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + + +class PKCS1_OAEP_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 OAEP + # Each tuple is made up by: + # Item #0: dictionary with RSA key component + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data (=seed) + # Item #4: hash object + + _testData = ( + + # + # From in oaep-int.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', + # Public key + 'e':'11', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 + 668b42784813801579641b29410b3c7998d6bc465745e5c3 + 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 + 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef + 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b + 8883fe4463a4bc85b1cb3c1''' + } + , + # Plaintext + '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', + # Ciphertext + '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', + # Random + '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 + f0 6c b5 8f''', + # Hash + SHA1, + ), + + # + # From in oaep-vect.txt to be found in Example 1.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 + 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab + c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 + 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 + f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 + c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 + 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 + 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', + 'e':'''01 00 01''', + 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c + 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd + 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b + 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 + fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 + 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf + b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de + f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' + } + , + # Plaintext + '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 + 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', + # Ciphertext + '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f + 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb + 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 + 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f + a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 + d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 + d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c + 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', + # Random + '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 + dd a0 a5 ef''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 2.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 + e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e + 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba + 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d + 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d + de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 + 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 + 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f + 45''', + 'e':'''01 00 01''', + 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa + 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 + c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 + 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc + d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 + 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 + 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 + 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' + }, + # Plaintext + '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 + 52 b5 8c f1 d9 2f ec 57 0b ee e7''', + # Ciphertext + '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 + 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a + 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a + ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 + 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d + 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a + bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f + ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 + 0e''', + # Random + '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 + e7 1b 17 82''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 10.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d + db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 + df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 + 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 + 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d + 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f + a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 + 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 + 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a + 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 + c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 + 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef + a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 + 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 + d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc + 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', + 'e':'''01 00 01''', + 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 + 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 + 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 + d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d + 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 + 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be + be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 + 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 + 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 + 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 + fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 + 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 + 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 + 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 + 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e + 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' + }, + # Plaintext + '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 + b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', + # Ciphertext + '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 + 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 + ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee + e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 + 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a + 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc + c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb + ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 + 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec + 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 + fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 + 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 + ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 + ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 + 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 + bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', + # Random + '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 + aa 63 bd 33''', + SHA1 + ), + ) + + def testEncrypt1(self): + # Verify encryption using all test vectors + for test in self._testData: + # Build the key + comps = [int(rws(test[0][x]), 16) for x in ('n', 'e')] + key = RSA.construct(comps) + + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + + def __init__(self, data): + self.data = data + self.idx = 0 + + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + + # The real test + cipher = PKCS.new(key, test[4], randfunc=randGen(t2b(test[3]))) + ct = cipher.encrypt(t2b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fails if plaintext is too long + pt = '\x00'*(128-2*20-2+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testDecrypt1(self): + # Verify decryption using all test vectors + for test in self._testData: + # Build the key + comps = [int(rws(test[0][x]),16) for x in ('n', 'e', 'd')] + key = RSA.construct(comps) + # The real test + cipher = PKCS.new(key, test[4]) + pt = cipher.decrypt(t2b(test[2])) + self.assertEqual(pt, t2b(test[1])) + + def testDecrypt2(self): + # Simplest possible negative tests + for ct_size in (127, 128, 129): + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) + + def testEncryptDecrypt1(self): + # Encrypt/Decrypt messages of length [0..128-2*20-2] + for pt_len in range(0, 128-2*20-2): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def testEncryptDecrypt2(self): + # Helper function to monitor what's requested from RNG + global asked + + def localRng(N): + global asked + asked += N + return self.rng(N) + + # Verify that OAEP is friendly to all hashes + for hashmod in (MD2, MD5, SHA1, SHA256, RIPEMD160): + # Verify that encrypt() asks for as many random bytes + # as the hash output size + asked = 0 + pt = self.rng(40) + cipher = PKCS.new(self.key1024, hashmod, randfunc=localRng) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + self.assertEqual(asked, hashmod.digest_size) + + def testEncryptDecrypt3(self): + # Verify that OAEP supports labels + pt = self.rng(35) + xlabel = self.rng(22) + cipher = PKCS.new(self.key1024, label=xlabel) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + + def testEncryptDecrypt4(self): + # Verify that encrypt() uses the custom MGF + global mgfcalls + # Helper function to monitor what's requested from MGF + + def newMGF(seed, maskLen): + global mgfcalls + mgfcalls += 1 + return b'\x00' * maskLen + + mgfcalls = 0 + pt = self.rng(32) + cipher = PKCS.new(self.key1024, mgfunc=newMGF) + ct = cipher.encrypt(pt) + self.assertEqual(mgfcalls, 2) + self.assertEqual(cipher.decrypt(ct), pt) + + def testByteArray(self): + pt = b("XER") + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(bytearray(pt)) + pt2 = cipher.decrypt(bytearray(ct)) + self.assertEqual(pt, pt2) + + def testMemoryview(self): + pt = b("XER") + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(memoryview(bytearray(pt))) + pt2 = cipher.decrypt(memoryview(bytearray(ct))) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, skip_slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._skip_slow_tests = skip_slow_tests + self._id = "None" + + def load_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['privateKeyPem']) + + def filter_sha(group): + if group['sha'] == "SHA-1": + return SHA1 + elif group['sha'] == "SHA-224": + return SHA224 + elif group['sha'] == "SHA-256": + return SHA256 + elif group['sha'] == "SHA-384": + return SHA384 + elif group['sha'] == "SHA-512": + return SHA512 + else: + raise ValueError("Unknown sha " + group['sha']) + + def filter_mgf(group): + if group['mgfSha'] == "SHA-1": + return lambda x, y: MGF1(x, y, SHA1) + elif group['mgfSha'] == "SHA-224": + return lambda x, y: MGF1(x, y, SHA224) + elif group['mgfSha'] == "SHA-256": + return lambda x, y: MGF1(x, y, SHA256) + elif group['mgfSha'] == "SHA-384": + return lambda x, y: MGF1(x, y, SHA384) + elif group['mgfSha'] == "SHA-512": + return lambda x, y: MGF1(x, y, SHA512) + else: + raise ValueError("Unknown mgf/sha " + group['mgfSha']) + + def filter_algo(group): + return "%s with MGF1/%s" % (group['sha'], group['mgfSha']) + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof PKCS#1 OAEP (%s)" % filename, + group_tag={'rsa_key': filter_rsa, + 'hash_mod': filter_sha, + 'mgf': filter_mgf, + 'algo': filter_algo} + ) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("rsa_oaep_2048_sha1_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha224_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha384_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha512_test.json")) + if not self._skip_slow_tests: + self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_misc_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) + + cipher = PKCS.new(tv.rsa_key, hashAlgo=tv.hash_mod, mgfunc=tv.mgf, label=tv.label) + try: + pt = cipher.decrypt(tv.ct) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_decrypt(tv) + + +def get_tests(config={}): + skip_slow_tests = not config.get('slow_tests') + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PKCS1_OAEP_Tests) + tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py new file mode 100644 index 0000000..1933f2a --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/__init__.py: Self-test for hash modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for hash modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_CMAC; tests += test_CMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) + from Crypto.SelfTest.Hash import test_RIPEMD160; tests += test_RIPEMD160.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA1; tests += test_SHA1.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_224; tests += test_SHA3_224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_256; tests += test_SHA3_256.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_384; tests += test_SHA3_384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_512; tests += test_SHA3_512.get_tests(config=config) + from Crypto.SelfTest.Hash import test_keccak; tests += test_keccak.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHAKE; tests += test_SHAKE.get_tests(config=config) + try: + from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) + except ImportError: + import sys + sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n") + from Crypto.SelfTest.Hash import test_BLAKE2; tests += test_BLAKE2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_Poly1305; tests += test_Poly1305.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/common.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/common.py new file mode 100644 index 0000000..b89db84 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/common.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +import re +import sys +import unittest +import binascii +import Crypto.Hash +from binascii import hexlify, unhexlify +from Crypto.Util.py3compat import b, tobytes +from Crypto.Util.strxor import strxor_c + +def t2b(hex_string): + shorter = re.sub(br'\s+', b'', tobytes(hex_string)) + return unhexlify(shorter) + + +class HashDigestSizeSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.description = description + self.extra_params = extra_params + + def shortDescription(self): + return self.description + + def runTest(self): + if "truncate" not in self.extra_params: + self.failUnless(hasattr(self.hashmod, "digest_size")) + self.assertEquals(self.hashmod.digest_size, self.expected) + h = self.hashmod.new(**self.extra_params) + self.failUnless(hasattr(h, "digest_size")) + self.assertEquals(h.digest_size, self.expected) + + +class HashSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, input, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected.lower() + self.input = input + self.description = description + self.extra_params = extra_params + + def shortDescription(self): + return self.description + + def runTest(self): + h = self.hashmod.new(**self.extra_params) + h.update(self.input) + + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(self.input, **self.extra_params) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() + if sys.version_info[0] == 2: + self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() + else: + self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() + self.assertEqual(self.expected, out4) # h = .new(data); h.digest() + + # Verify that the .new() method produces a fresh hash object, except + # for MD5 and SHA1, which are hashlib objects. (But test any .new() + # method that does exist.) + if self.hashmod.__name__ not in ('Crypto.Hash.MD5', 'Crypto.Hash.SHA1') or hasattr(h, 'new'): + h2 = h.new() + h2.update(self.input) + out5 = binascii.b2a_hex(h2.digest()) + self.assertEqual(self.expected, out5) + + +class HashTestOID(unittest.TestCase): + def __init__(self, hashmod, oid, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.oid = oid + self.extra_params = extra_params + + def runTest(self): + h = self.hashmod.new(**self.extra_params) + self.assertEqual(h.oid, self.oid) + + +class ByteArrayTest(unittest.TestCase): + + def __init__(self, module, extra_params): + unittest.TestCase.__init__(self) + self.module = module + self.extra_params = extra_params + + def runTest(self): + data = b("\x00\x01\x02") + + # Data can be a bytearray (during initialization) + ba = bytearray(data) + + h1 = self.module.new(data, **self.extra_params) + h2 = self.module.new(ba, **self.extra_params) + ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + ba = bytearray(data) + + h1 = self.module.new(**self.extra_params) + h2 = self.module.new(**self.extra_params) + + h1.update(data) + h2.update(ba) + + ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTest(unittest.TestCase): + + def __init__(self, module, extra_params): + unittest.TestCase.__init__(self) + self.module = module + self.extra_params = extra_params + + def runTest(self): + + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in get_mv_ro, get_mv_rw: + + # Data can be a memoryview (during initialization) + mv = get_mv(data) + + h1 = self.module.new(data, **self.extra_params) + h2 = self.module.new(mv, **self.extra_params) + if not mv.readonly: + mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + mv = get_mv(data) + + h1 = self.module.new(**self.extra_params) + h2 = self.module.new(**self.extra_params) + h1.update(data) + h2.update(mv) + if not mv.readonly: + mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MACSelfTest(unittest.TestCase): + + def __init__(self, module, description, result, data, key, params): + unittest.TestCase.__init__(self) + self.module = module + self.result = t2b(result) + self.data = t2b(data) + self.key = t2b(key) + self.params = params + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + + result_hex = hexlify(self.result) + + # Verify result + h = self.module.new(self.key, **self.params) + h.update(self.data) + self.assertEqual(self.result, h.digest()) + self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) + + # Verify that correct MAC does not raise any exception + h.verify(self.result) + h.hexverify(result_hex) + + # Verify that incorrect MAC does raise ValueError exception + wrong_mac = strxor_c(self.result, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + # Verify again, with data passed to new() + h = self.module.new(self.key, self.data, **self.params) + self.assertEqual(self.result, h.digest()) + self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) + + # Test .copy() + try: + h = self.module.new(self.key, self.data, **self.params) + h2 = h.copy() + h3 = h.copy() + + # Verify that changing the copy does not change the original + h2.update(b"bla") + self.assertEqual(h3.digest(), self.result) + + # Verify that both can reach the same state + h.update(b"bla") + self.assertEqual(h.digest(), h2.digest()) + except NotImplementedError: + pass + + # PY3K: Check that hexdigest() returns str and digest() returns bytes + self.assertTrue(isinstance(h.digest(), type(b""))) + self.assertTrue(isinstance(h.hexdigest(), type(""))) + + # PY3K: Check that .hexverify() accepts bytes or str + h.hexverify(h.hexdigest()) + h.hexverify(h.hexdigest().encode('ascii')) + + +def make_hash_tests(module, module_name, test_data, digest_size, oid=None, + extra_params={}): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (expected, input) = map(tobytes,row[0:2]) + if len(row) < 3: + description = repr(input) + else: + description = row[2] + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(HashSelfTest(module, name, expected, input, extra_params)) + + name = "%s #%d: digest_size" % (module_name, len(test_data) + 1) + tests.append(HashDigestSizeSelfTest(module, name, digest_size, extra_params)) + + if oid is not None: + tests.append(HashTestOID(module, oid, extra_params)) + + tests.append(ByteArrayTest(module, extra_params)) + + tests.append(MemoryViewTest(module, extra_params)) + + return tests + + +def make_mac_tests(module, module_name, test_data): + tests = [] + for i, row in enumerate(test_data): + if len(row) == 4: + (key, data, results, description, params) = list(row) + [ {} ] + else: + (key, data, results, description, params) = row + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(MACSelfTest(module, name, results, data, key, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py new file mode 100644 index 0000000..f32163d --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py @@ -0,0 +1,482 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import unittest +import warnings +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import tobytes +from Crypto.Util.strxor import strxor_c +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import BLAKE2b, BLAKE2s + + +class Blake2Test(unittest.TestCase): + + def test_new_positive(self): + + h = self.BLAKE2.new(digest_bits=self.max_bits) + for new_func in self.BLAKE2.new, h.new: + + for dbits in range(8, self.max_bits + 1, 8): + hobj = new_func(digest_bits=dbits) + self.assertEqual(hobj.digest_size, dbits // 8) + + for dbytes in range(1, self.max_bytes + 1): + hobj = new_func(digest_bytes=dbytes) + self.assertEqual(hobj.digest_size, dbytes) + + digest1 = new_func(data=b"\x90", digest_bytes=self.max_bytes).digest() + digest2 = new_func(digest_bytes=self.max_bytes).update(b"\x90").digest() + self.assertEqual(digest1, digest2) + + new_func(data=b"A", key=b"5", digest_bytes=self.max_bytes) + + hobj = h.new() + self.assertEqual(hobj.digest_size, self.max_bytes) + + def test_new_negative(self): + + h = self.BLAKE2.new(digest_bits=self.max_bits) + for new_func in self.BLAKE2.new, h.new: + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + digest_bits=self.max_bits) + self.assertRaises(ValueError, new_func, digest_bytes=0) + self.assertRaises(ValueError, new_func, + digest_bytes=self.max_bytes + 1) + self.assertRaises(ValueError, new_func, digest_bits=7) + self.assertRaises(ValueError, new_func, digest_bits=15) + self.assertRaises(ValueError, new_func, + digest_bits=self.max_bits + 1) + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + key=u"string") + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + data=u"string") + + def test_default_digest_size(self): + digest = self.BLAKE2.new(data=b'abc').digest() + self.assertEquals(len(digest), self.max_bytes) + + def test_update(self): + pieces = [b"\x0A" * 200, b"\x14" * 300] + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + h.update(pieces[0]).update(pieces[1]) + digest = h.digest() + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.digest(), digest) + + def test_update_negative(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.failUnless(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg = b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = self.BLAKE2.new(digest_bits=256, data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = self.BLAKE2.new(digest_bits=256, data=msg).digest() + + # With the proper flag, it is allowed + h = self.BLAKE2.new(digest_bits=256, data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + def test_hex_digest(self): + mac = self.BLAKE2.new(digest_bits=self.max_bits) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.failUnless(isinstance(hexdigest, type("digest"))) + + def test_verify(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") + mac = h.digest() + h.verify(mac) + wrong_mac = strxor_c(mac, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + + def test_hexverify(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") + mac = h.hexdigest() + h.hexverify(mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + def test_oid(self): + + prefix = "1.3.6.1.4.1.1722.12.2." + self.oid_variant + "." + + for digest_bits in self.digest_bits_oid: + h = self.BLAKE2.new(digest_bits=digest_bits) + self.assertEqual(h.oid, prefix + str(digest_bits // 8)) + + h = self.BLAKE2.new(digest_bits=digest_bits, key=b"secret") + self.assertRaises(AttributeError, lambda: h.oid) + + for digest_bits in (8, self.max_bits): + if digest_bits in self.digest_bits_oid: + continue + self.assertRaises(AttributeError, lambda: h.oid) + + def test_bytearray(self): + + key = b'0' * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = self.BLAKE2.new(data=data, key=key) + h2 = self.BLAKE2.new(data=data_ba, key=key_ba) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = self.BLAKE2.new() + h2 = self.BLAKE2.new() + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + key = b'0' * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = self.BLAKE2.new(data=data, key=key) + h2 = self.BLAKE2.new(data=data_mv, key=key_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + key_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = self.BLAKE2.new() + h2 = self.BLAKE2.new() + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class Blake2bTest(Blake2Test): + #: Module + BLAKE2 = BLAKE2b + #: Max output size (in bits) + max_bits = 512 + #: Max output size (in bytes) + max_bytes = 64 + #: Bit size of the digests for which an ASN OID exists + digest_bits_oid = (160, 256, 384, 512) + # http://tools.ietf.org/html/draft-saarinen-blake2-02 + oid_variant = "1" + + +class Blake2sTest(Blake2Test): + #: Module + BLAKE2 = BLAKE2s + #: Max output size (in bits) + max_bits = 256 + #: Max output size (in bytes) + max_bytes = 32 + #: Bit size of the digests for which an ASN OID exists + digest_bits_oid = (128, 160, 224, 256) + # http://tools.ietf.org/html/draft-saarinen-blake2-02 + oid_variant = "2" + + +class Blake2OfficialTestVector(unittest.TestCase): + + def _load_tests(self, test_vector_file): + expected = "in" + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + + if line.strip() == "" or line.startswith("#"): + continue + + res = re.match("%s:\t([0-9A-Fa-f]*)" % expected, line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + + if res.group(1): + bin_value = unhexlify(tobytes(res.group(1))) + else: + bin_value = b"" + if expected == "in": + input_data = bin_value + expected = "key" + elif expected == "key": + key = bin_value + expected = "hash" + else: + result = bin_value + expected = "in" + test_vectors.append((input_data, key, result)) + return test_vectors + + def setUp(self): + + dir_comps = ("Hash", self.name) + file_name = self.name.lower() + "-test.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors # type: ignore + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + for (input_data, key, result) in self.test_vectors: + mac = self.BLAKE2.new(key=key, digest_bytes=self.max_bytes) + mac.update(input_data) + self.assertEqual(mac.digest(), result) + + +class Blake2bOfficialTestVector(Blake2OfficialTestVector): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + #: Max digest size + max_bytes = 64 + + +class Blake2sOfficialTestVector(Blake2OfficialTestVector): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + #: Max digest size + max_bytes = 32 + + +class Blake2TestVector1(unittest.TestCase): + + def _load_tests(self, test_vector_file): + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + if line.strip() == "" or line.startswith("#"): + continue + res = re.match("digest: ([0-9A-Fa-f]*)", line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + + test_vectors.append(unhexlify(tobytes(res.group(1)))) + return test_vectors + + def setUp(self): + dir_comps = ("Hash", self.name) + file_name = "tv1.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + + for tv in self.test_vectors: + digest_bytes = len(tv) + next_data = b"" + for _ in range(100): + h = self.BLAKE2.new(digest_bytes=digest_bytes) + h.update(next_data) + next_data = h.digest() + next_data + self.assertEqual(h.digest(), tv) + + +class Blake2bTestVector1(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + + +class Blake2sTestVector1(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + + +class Blake2TestVector2(unittest.TestCase): + + def _load_tests(self, test_vector_file): + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + if line.strip() == "" or line.startswith("#"): + continue + res = re.match(r"digest\(([0-9]+)\): ([0-9A-Fa-f]*)", line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + key_size = int(res.group(1)) + result = unhexlify(tobytes(res.group(2))) + test_vectors.append((key_size, result)) + return test_vectors + + def setUp(self): + dir_comps = ("Hash", self.name) + file_name = "tv2.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors # type: ignore + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + + for key_size, result in self.test_vectors: + next_data = b"" + for _ in range(100): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, + key=b"A" * key_size) + h.update(next_data) + next_data = h.digest() + next_data + self.assertEqual(h.digest(), result) + + +class Blake2bTestVector2(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + #: Max digest size in bytes + max_bytes = 64 + + +class Blake2sTestVector2(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + #: Max digest size in bytes + max_bytes = 32 + + +def get_tests(config={}): + tests = [] + + tests += list_test_cases(Blake2bTest) + tests.append(Blake2bOfficialTestVector()) + tests.append(Blake2bTestVector1()) + tests.append(Blake2bTestVector2()) + + tests += list_test_cases(Blake2sTest) + tests.append(Blake2sOfficialTestVector()) + tests.append(Blake2sTestVector1()) + tests.append(Blake2sTestVector2()) + + return tests + + +if __name__ == '__main__': + import unittest + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_CMAC.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_CMAC.py new file mode 100644 index 0000000..8cdbf09 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_CMAC.py @@ -0,0 +1,448 @@ +# +# SelfTest/Hash/CMAC.py: Self-test for the CMAC module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.CMAC""" + +import json +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import tobytes + +from Crypto.Hash import CMAC +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +# This is a list of (key, data, result, description, module) tuples. +test_data = [ + + ## Test vectors from RFC 4493 ## + ## The are also in NIST SP 800 38B D.2 ## + ( '2b7e151628aed2a6abf7158809cf4f3c', + '', + 'bb1d6929e95937287fa37d129b756746', + 'RFC 4493 #1', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a', + '070a16b46b4d4144f79bdd9dd04a287c', + 'RFC 4493 #2', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + 'dfa66747de9ae63030ca32611497c827', + 'RFC 4493 #3', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + '51f0bebf7e3b9d92fc49741779363cfe', + 'RFC 4493 #4', + AES + ), + + ## The rest of Appendix D of NIST SP 800 38B + ## was not totally correct. + ## Values in Examples 14, 15, 18, and 19 were wrong. + ## The updated test values are published in: + ## http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '', + 'd17ddf46adaacde531cac483de7a9367', + 'NIST SP 800 38B D.2 Example 5', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a', + '9e99a7bf31e710900662f65e617c5184', + 'NIST SP 800 38B D.2 Example 6', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + '8a1de5be2eb31aad089a82e6ee908b0e', + 'NIST SP 800 38B D.2 Example 7', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + 'a1d5df0eed790f794d77589659f39a11', + 'NIST SP 800 38B D.2 Example 8', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '', + '028962f61b7bf89efc6b551f4667d983', + 'NIST SP 800 38B D.3 Example 9', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a', + '28a7023f452e8f82bd4bf28d8c37c35c', + 'NIST SP 800 38B D.3 Example 10', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + 'aaf3d8f1de5640c232f5b169b9c911e6', + 'NIST SP 800 38B D.3 Example 11', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + 'e1992190549f6ed5696a2c056c315410', + 'NIST SP 800 38B D.3 Example 12', + AES + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '', + 'b7a688e122ffaf95', + 'NIST SP 800 38B D.4 Example 13', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96', + '8e8f293136283797', + 'NIST SP 800 38B D.4 Example 14', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a57', + '743ddbe0ce2dc2ed', + 'NIST SP 800 38B D.4 Example 15', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a571e03ac9c'+ + '9eb76fac45af8e51', + '33e6b1092400eae5', + 'NIST SP 800 38B D.4 Example 16', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '', + 'bd2ebf9a3ba00361', + 'NIST SP 800 38B D.7 Example 17', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96', + '4ff2ab813c53ce83', + 'NIST SP 800 38B D.7 Example 18', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a57', + '62dd1b471902bd4e', + 'NIST SP 800 38B D.7 Example 19', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a571e03ac9c'+ + '9eb76fac45af8e51', + '31b1e431dabc4eb8', + 'NIST SP 800 38B D.7 Example 20', + DES3 + ), + +] + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class TestCMAC(unittest.TestCase): + + def test_internal_caching(self): + """Verify that internal caching is implemented correctly""" + + data_to_mac = get_tag_random("data_to_mac", 128) + key = get_tag_random("key", 16) + ref_mac = CMAC.new(key, msg=data_to_mac, ciphermod=AES).digest() + + # Break up in chunks of different length + # The result must always be the same + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + chunks = [data_to_mac[i:i+chunk_length] for i in + range(0, len(data_to_mac), chunk_length)] + + mac = CMAC.new(key, ciphermod=AES) + for chunk in chunks: + mac.update(chunk) + self.assertEqual(ref_mac, mac.digest()) + + def test_update_after_digest(self): + msg = b"rrrrttt" + key = b"4" * 16 + + # Normally, update() cannot be done after digest() + h = CMAC.new(key, msg[:4], ciphermod=AES) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = CMAC.new(key, msg, ciphermod=AES).digest() + + # With the proper flag, it is allowed + h2 = CMAC.new(key, msg[:4], ciphermod=AES, update_after_digest=True) + self.assertEquals(h2.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h2.update(msg[4:]) + self.assertEquals(h2.digest(), dig2) + + +class ByteArrayTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = CMAC.new(key, data, ciphermod=AES) + h2 = CMAC.new(key_ba, data_ba, ciphermod=AES) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = CMAC.new(key, ciphermod=AES) + h2 = CMAC.new(key, ciphermod=AES) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = CMAC.new(key, data, ciphermod=AES) + h2 = CMAC.new(key_mv, data_mv, ciphermod=AES) + if not data_mv.readonly: + key_mv[:1] = b'\xFF' + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = CMAC.new(key, ciphermod=AES) + h2 = CMAC.new(key, ciphermod=AES) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Hash", "wycheproof"), + "aes_cmac_test.json", + "Wycheproof CMAC", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_create_mac(self, tv): + self._id = "Wycheproof MAC creation Test #" + str(tv.id) + + try: + tag = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size).digest() + except ValueError as e: + if len(tv.key) not in (16, 24, 32) and "key length" in str(e): + return + raise e + if tv.valid: + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_verify_mac(self, tv): + self._id = "Wycheproof MAC verification Test #" + str(tv.id) + + try: + mac = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size) + except ValueError as e: + if len(tv.key) not in (16, 24, 32) and "key length" in str(e): + return + raise e + try: + mac.verify(tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_create_mac(tv) + self.test_verify_mac(tv) + + +def get_tests(config={}): + global test_data + import types + from .common import make_mac_tests + + wycheproof_warnings = config.get('wycheproof_warnings') + + # Add new() parameters to the back of each test vector + params_test_data = [] + for row in test_data: + t = list(row) + t[4] = dict(ciphermod=t[4]) + params_test_data.append(t) + + tests = make_mac_tests(CMAC, "CMAC", params_test_data) + tests.append(ByteArrayTests()) + tests.append(list_test_cases(TestCMAC)) + tests.append(MemoryViewTests()) + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py new file mode 100644 index 0000000..7be15be --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py @@ -0,0 +1,402 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/HMAC.py: Self-test for the HMAC module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.HMAC""" + +import sys +import unittest +from binascii import hexlify +from Crypto.Util.py3compat import tostr, tobytes + +from Crypto.Hash import HMAC, MD5, SHA1, SHA256 +hash_modules = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256) + +try: + from Crypto.Hash import SHA224, SHA384, SHA512, RIPEMD160 + hash_modules.update(dict(SHA224=SHA224, SHA384=SHA384, SHA512=SHA512, + RIPEMD160=RIPEMD160)) +except ImportError: + sys.stderr.write("SelfTest: warning: not testing HMAC-SHA224/384/512" + " (not available)\n") + +default_hash = None + +def xl(text): + return tostr(hexlify(tobytes(text))) + +# This is a list of (key, data, results, description) tuples. +test_data = [ + ## Test vectors from RFC 2202 ## + # Test that the default hashmod is MD5 + ('0b' * 16, + '4869205468657265', + dict(default_hash='9294727a3638bb1c13f48ef8158bfc9d'), + 'default-is-MD5'), + + # Test case 1 (MD5) + ('0b' * 16, + '4869205468657265', + dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), + 'RFC 2202 #1-MD5 (HMAC-MD5)'), + + # Test case 1 (SHA1) + ('0b' * 20, + '4869205468657265', + dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), + 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), + + # Test case 2 + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(MD5='750c783e6ab0b503eaa86e310a5db738', + SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), + 'RFC 2202 #2 (HMAC-MD5/SHA1)'), + + # Test case 3 (MD5) + ('aa' * 16, + 'dd' * 50, + dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), + 'RFC 2202 #3-MD5 (HMAC-MD5)'), + + # Test case 3 (SHA1) + ('aa' * 20, + 'dd' * 50, + dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), + 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), + + # Test case 4 + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', + SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), + 'RFC 2202 #4 (HMAC-MD5/SHA1)'), + + # Test case 5 (MD5) + ('0c' * 16, + '546573742057697468205472756e636174696f6e', + dict(MD5='56461ef2342edc00f9bab995690efd4c'), + 'RFC 2202 #5-MD5 (HMAC-MD5)'), + + # Test case 5 (SHA1) + # NB: We do not implement hash truncation, so we only test the full hash here. + ('0c' * 20, + '546573742057697468205472756e636174696f6e', + dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), + 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), + + # Test case 6 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', + SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), + 'RFC 2202 #6 (HMAC-MD5/SHA1)'), + + # Test case 7 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' + + '53697a652044617461', + dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', + SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), + 'RFC 2202 #7 (HMAC-MD5/SHA1)'), + + ## Test vectors from RFC 4231 ## + # 4.2. Test Case 1 + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + '4869205468657265', + dict(SHA256=''' + b0344c61d8db38535ca8afceaf0bf12b + 881dc200c9833da726e9376c2e32cff7 + '''), + 'RFC 4231 #1 (HMAC-SHA256)'), + + # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC + # output. + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(SHA256=''' + 5bdcc146bf60754e6a042426089575c7 + 5a003f089d2739839dec58b964ec3843 + '''), + 'RFC 4231 #2 (HMAC-SHA256)'), + + # 4.4. Test Case 3 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('aa' * 20, + 'dd' * 50, + dict(SHA256=''' + 773ea91e36800e46854db8ebd09181a7 + 2959098b3ef8c122d9635514ced565fe + '''), + 'RFC 4231 #3 (HMAC-SHA256)'), + + # 4.5. Test Case 4 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(SHA256=''' + 82558a389a443c0ea4cc819899f2083a + 85f0faa3e578f8077a2e3ff46729665b + '''), + 'RFC 4231 #4 (HMAC-SHA256)'), + + # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. + # + # Not included because we do not implement hash truncation. + # + + # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of + # SHA-384 and SHA-512). + ('aa' * 131, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(SHA256=''' + 60e431591ee0b67f0d8a26aacbf5b77f + 8e0bc6213728c5140546040f0ee37f54 + '''), + 'RFC 4231 #6 (HMAC-SHA256)'), + + # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes + # (= block-size of SHA-384 and SHA-512). + ('aa' * 131, + '5468697320697320612074657374207573696e672061206c6172676572207468' + + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' + + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' + + '647320746f20626520686173686564206265666f7265206265696e6720757365' + + '642062792074686520484d414320616c676f726974686d2e', + dict(SHA256=''' + 9b09ffa71b942fcb27635fbcd5b0e944 + bfdc63644f0713938a7f51535c3a35e2 + '''), + 'RFC 4231 #7 (HMAC-SHA256)'), + + # Test case 8 (SHA224) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), + 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), + + # Test case 9 (SHA384) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), + 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), + + # Test case 10 (SHA512) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), + 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), + + # Test case 11 (RIPEMD) + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + xl("Hi There"), + dict(RIPEMD160='24cb4bd67d20fc1a5d2ed7732dcc39377f0a5668'), + 'RFC 2286 #1 (HMAC-RIPEMD)'), + + # Test case 12 (RIPEMD) + (xl("Jefe"), + xl("what do ya want for nothing?"), + dict(RIPEMD160='dda6c0213a485a9e24f4742064a7f033b43c4069'), + 'RFC 2286 #2 (HMAC-RIPEMD)'), + + # Test case 13 (RIPEMD) + ('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + 'dd' * 50, + dict(RIPEMD160='b0b105360de759960ab4f35298e116e295d8e7c1'), + 'RFC 2286 #3 (HMAC-RIPEMD)'), + + # Test case 14 (RIPEMD) + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(RIPEMD160='d5ca862f4d21d5e610e18b4cf1beb97a4365ecf4'), + 'RFC 2286 #4 (HMAC-RIPEMD)'), + + # Test case 15 (RIPEMD) + ('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', + xl("Test With Truncation"), + dict(RIPEMD160='7619693978f91d90539ae786500ff3d8e0518e39'), + 'RFC 2286 #5 (HMAC-RIPEMD)'), + + # Test case 16 (RIPEMD) + ('aa' * 80, + xl("Test Using Larger Than Block-Size Key - Hash Key First"), + dict(RIPEMD160='6466ca07ac5eac29e1bd523e5ada7605b791fd8b'), + 'RFC 2286 #6 (HMAC-RIPEMD)'), + + # Test case 17 (RIPEMD) + ('aa' * 80, + xl("Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data"), + dict(RIPEMD160='69ea60798d71616cce5fd0871e23754cd75d5a0a'), + 'RFC 2286 #7 (HMAC-RIPEMD)'), + +] + + +class HMAC_Module_and_Instance_Test(unittest.TestCase): + """Test the HMAC construction and verify that it does not + matter if you initialize it with a hash module or + with an hash instance. + + See https://bugs.launchpad.net/pycrypto/+bug/1209399 + """ + + def __init__(self, hashmods): + """Initialize the test with a dictionary of hash modules + indexed by their names""" + + unittest.TestCase.__init__(self) + self.hashmods = hashmods + self.description = "" + + def shortDescription(self): + return self.description + + def runTest(self): + key = b"\x90\x91\x92\x93" * 4 + payload = b"\x00" * 100 + + for hashname, hashmod in self.hashmods.items(): + if hashmod is None: + continue + self.description = "Test HMAC in combination with " + hashname + one = HMAC.new(key, payload, hashmod).digest() + two = HMAC.new(key, payload, hashmod.new()).digest() + self.assertEqual(one, two) + + +class HMAC_None(unittest.TestCase): + + def runTest(self): + + key = b"\x04" * 20 + one = HMAC.new(key, b"", SHA1).digest() + two = HMAC.new(key, None, SHA1).digest() + self.assertEqual(one, two) + + +class ByteArrayTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = HMAC.new(key, data) + h2 = HMAC.new(key_ba, data_ba) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = HMAC.new(key) + h2 = HMAC.new(key) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = HMAC.new(key, data) + h2 = HMAC.new(key_mv, data_mv) + if not data_mv.readonly: + key_mv[:1] = b'\xFF' + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = HMAC.new(key) + h2 = HMAC.new(key) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +def get_tests(config={}): + global test_data + import types + from .common import make_mac_tests + + # A test vector contains multiple results, each one for a + # different hash algorithm. + # Here we expand each test vector into multiple ones, + # and add the relevant parameters that will be passed to new() + exp_test_data = [] + for row in test_data: + for modname in row[2].keys(): + t = list(row) + t[2] = row[2][modname] + try: + t.append(dict(digestmod=globals()[modname])) + exp_test_data.append(t) + except AttributeError: + sys.stderr.write("SelfTest: warning: not testing HMAC-%s" + " (not available)\n" % modname) + tests = make_mac_tests(HMAC, "HMAC", exp_test_data) + tests.append(HMAC_Module_and_Instance_Test(hash_modules)) + tests.append(HMAC_None()) + + tests.append(ByteArrayTests()) + tests.append(MemoryViewTests()) + + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py new file mode 100644 index 0000000..9375168 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD2""" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1319 + ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), + ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), + ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), + ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), + + ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('da33def2a42df13975352846c30338cd', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('d5976f79d83d3a0dc9806c3c66f3efd8', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD2 + from .common import make_hash_tests + return make_hash_tests(MD2, "MD2", test_data, + digest_size=16, + oid="1.2.840.113549.2.2") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py new file mode 100644 index 0000000..17b48a7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1320 + ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), + ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), + ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), + ('d9130a8164549fe818874806e1c7014b', 'message digest'), + + ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('043f8582f241db351ce627e153e7f0e4', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('e33b4ddc9c38f2199c3e7b164fcc0536', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD4 + from .common import make_hash_tests + return make_hash_tests(MD4, "MD4", test_data, + digest_size=16, + oid="1.2.840.113549.2.4") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py new file mode 100644 index 0000000..830ace7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD5""" + +from Crypto.Util.py3compat import * +from Crypto.Hash import MD5 +from binascii import unhexlify +import unittest +from Crypto.SelfTest.st_common import list_test_cases + + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1321 + ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), + ('0cc175b9c0f1b6a831c399e269772661', 'a'), + ('900150983cd24fb0d6963f7d28e17f72', 'abc'), + ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), + + ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('d174ab98d277d9f5a5611c2c9f419d9f', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('57edf4a22be3c955ac49da2e2107b67a', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), + + # https://www.cosic.esat.kuleuven.be/nessie/testvectors/hash/md5/Md5-128.unverified.test-vectors + ('57EDF4A22BE3C955AC49DA2E2107B67A', '1234567890' * 8, 'Set 1, vector #7'), + ('7707D6AE4E027C70EEA2A935C2296F21', 'a'*1000000, 'Set 1, vector #8'), +] + + +class Md5IterTest(unittest.TestCase): + + def runTest(self): + message = b("\x00") * 16 + result1 = "4AE71336E44BF9BF79D2752E234818A5".lower() + result2 = "1A83F51285E4D89403D00C46EF8508FE".lower() + + h = MD5.new(message) + message = h.digest() + self.assertEqual(h.hexdigest(), result1) + + for _ in range(99999): + h = MD5.new(message) + message = h.digest() + + self.assertEqual(h.hexdigest(), result2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = make_hash_tests(MD5, "MD5", test_data, + digest_size=16, + oid="1.2.840.113549.2.5") + if config.get('slow_tests'): + tests += [ Md5IterTest() ] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py new file mode 100644 index 0000000..0bbb6bd --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py @@ -0,0 +1,542 @@ +# +# SelfTest/Hash/test_Poly1305.py: Self-test for the Poly1305 module +# +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash._Poly1305""" + +import json +import unittest +from binascii import unhexlify, hexlify + +from .common import make_mac_tests +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import Poly1305 +from Crypto.Cipher import AES, ChaCha20 + +from Crypto.Util.py3compat import tobytes +from Crypto.Util.strxor import strxor_c + +# This is a list of (r+s keypair, data, result, description, keywords) tuples. +test_data_basic = [ + ( + "85d6be7857556d337f4452fe42d506a80103808afb0db2fd4abff6af4149f51b", + hexlify(b"Cryptographic Forum Research Group").decode(), + "a8061dc1305136c6c22b8baf0c0127a9", + "RFC7539" + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "0000000000000000000000000000000000000000000000000000000000000000", + "49ec78090e481ec6c26b33b91ccc0307", + "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 A", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "48656c6c6f20776f726c6421", + "a6f745008f81c916a20dcc74eef2b2f0", + "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 B", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "", + "6b657920666f7220506f6c7931333035", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "FF", + "f7e4e0ef4c46d106219da3d1bdaeb3ff", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "FF00", + "7471eceeb22988fc936da1d6e838b70e", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "AA" * 17, + "32590bc07cb2afaccca3f67f122975fe", + "Generated with pure Python", + ), + ( + "00" * 32, + "00" * 64, + "00" * 16, + "RFC7539 A.3 #1", + ), + ( + "0000000000000000000000000000000036e5f6b5c5e06070f0efca96227a863e", + hexlify( + b"Any submission t" + b"o the IETF inten" + b"ded by the Contr" + b"ibutor for publi" + b"cation as all or" + b" part of an IETF" + b" Internet-Draft " + b"or RFC and any s" + b"tatement made wi" + b"thin the context" + b" of an IETF acti" + b"vity is consider" + b"ed an \"IETF Cont" + b"ribution\". Such " + b"statements inclu" + b"de oral statemen" + b"ts in IETF sessi" + b"ons, as well as " + b"written and elec" + b"tronic communica" + b"tions made at an" + b"y time or place," + b" which are addre" + b"ssed to").decode(), + "36e5f6b5c5e06070f0efca96227a863e", + "RFC7539 A.3 #2", + ), + ( + "36e5f6b5c5e06070f0efca96227a863e00000000000000000000000000000000", + hexlify( + b"Any submission t" + b"o the IETF inten" + b"ded by the Contr" + b"ibutor for publi" + b"cation as all or" + b" part of an IETF" + b" Internet-Draft " + b"or RFC and any s" + b"tatement made wi" + b"thin the context" + b" of an IETF acti" + b"vity is consider" + b"ed an \"IETF Cont" + b"ribution\". Such " + b"statements inclu" + b"de oral statemen" + b"ts in IETF sessi" + b"ons, as well as " + b"written and elec" + b"tronic communica" + b"tions made at an" + b"y time or place," + b" which are addre" + b"ssed to").decode(), + "f3477e7cd95417af89a6b8794c310cf0", + "RFC7539 A.3 #3", + ), + ( + "1c9240a5eb55d38af333888604f6b5f0473917c1402b80099dca5cbc207075c0", + "2754776173206272696c6c69672c2061" + "6e642074686520736c6974687920746f" + "7665730a446964206779726520616e64" + "2067696d626c6520696e207468652077" + "6162653a0a416c6c206d696d73792077" + "6572652074686520626f726f676f7665" + "732c0a416e6420746865206d6f6d6520" + "7261746873206f757467726162652e", + "4541669a7eaaee61e708dc7cbcc5eb62", + "RFC7539 A.3 #4", + ), + ( + "02" + "00" * 31, + "FF" * 16, + "03" + "00" * 15, + "RFC7539 A.3 #5", + ), + ( + "02" + "00" * 15 + "FF" * 16, + "02" + "00" * 15, + "03" + "00" * 15, + "RFC7539 A.3 #6", + ), + ( + "01" + "00" * 31, + "FF" * 16 + "F0" + "FF" * 15 + "11" + "00" * 15, + "05" + "00" * 15, + "RFC7539 A.3 #7", + ), + ( + "01" + "00" * 31, + "FF" * 16 + "FB" + "FE" * 15 + "01" * 16, + "00" * 16, + "RFC7539 A.3 #8", + ), + ( + "02" + "00" * 31, + "FD" + "FF" * 15, + "FA" + "FF" * 15, + "RFC7539 A.3 #9", + ), + ( + "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" + "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00" + "01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "14 00 00 00 00 00 00 00 55 00 00 00 00 00 00 00", + "RFC7539 A.3 #10", + ), + ( + "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" + "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "13" + "00" * 15, + "RFC7539 A.3 #11", + ), +] + +# This is a list of (key(k+r), data, result, description, keywords) tuples. +test_data_aes = [ + ( + "ec074c835580741701425b623235add6851fc40c3467ac0be05cc20404f3f700", + "f3f6", + "f4c633c3044fc145f84f335cb81953de", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("fb447350c4e868c52ac3275cf9d4327e") } + ), + ( + "75deaa25c09f208e1dc4ce6b5cad3fbfa0f3080000f46400d0c7e9076c834403", + "", + "dd3fab2251f11ac759f0887129cc2ee7", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("61ee09218d29b0aaed7e154a2c5509cc") } + ), + ( + "6acb5f61a7176dd320c5c1eb2edcdc7448443d0bb0d21109c89a100b5ce2c208", + "663cea190ffb83d89593f3f476b6bc24" + "d7e679107ea26adb8caf6652d0656136", + "0ee1c16bb73f0f4fd19881753c01cdbe", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("ae212a55399729595dea458bc621ff0e") } + ), + ( + "e1a5668a4d5b66a5f68cc5424ed5982d12976a08c4426d0ce8a82407c4f48207", + "ab0812724a7f1e342742cbed374d94d1" + "36c6b8795d45b3819830f2c04491faf0" + "990c62e48b8018b2c3e4a0fa3134cb67" + "fa83e158c994d961c4cb21095c1bf9", + "5154ad0d2cb26e01274fc51148491f1b", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("9ae831e743978d3a23527c7128149e3a") } + ), +] + +test_data_chacha20 = [ + ( + "00" * 32, + "FF" * 15, + "13cc5bbadc36b03a5163928f0bcb65aa", + "RFC7539 A.4 #1", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 12) } + ), + ( + "00" * 31 + "01", + "FF" * 15, + "0baf33c1d6df211bdd50a6767e98e00a", + "RFC7539 A.4 #2", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } + ), + ( + "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" + "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", + "FF" * 15, + "e8b4c6db226cd8939e65e02eebf834ce", + "RFC7539 A.4 #3", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } + ), + ( + "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" + "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", + "f3 33 88 86 00 00 00 00 00 00 4e 91 00 00 00 00" + "64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd" + "5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2" + "4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0" + "bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf" + "33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81" + "14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55" + "97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38" + "36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4" + "b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9" + "90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e" + "af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a" + "0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a" + "0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e" + "ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10" + "49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30" + "30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29" + "a6 ad 5c b4 02 2b 02 70 9b 00 00 00 00 00 00 00" + "0c 00 00 00 00 00 00 00 09 01 00 00 00 00 00 00", + "ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38", + "RFC7539 A.5", + { 'cipher':ChaCha20, 'nonce':unhexlify("000000000102030405060708") } + ), +] + + +class Poly1305Test_AES(unittest.TestCase): + + key = b'\x11' * 32 + + def test_new_positive(self): + + data = b'r' * 100 + + h1 = Poly1305.new(key=self.key, cipher=AES) + self.assertEqual(h1.digest_size, 16) + self.assertEqual(len(h1.nonce), 16) + d1 = h1.update(data).digest() + self.assertEqual(len(d1), 16) + + h2 = Poly1305.new(key=self.key, nonce=h1.nonce, data=data, cipher=AES) + d2 = h2.digest() + self.assertEqual(h1.nonce, h2.nonce) + self.assertEqual(d1, d2) + + def test_new_negative(self): + from Crypto.Cipher import DES3 + + self.assertRaises(ValueError, Poly1305.new, key=self.key[:31], cipher=AES) + self.assertRaises(ValueError, Poly1305.new, key=self.key, cipher=DES3) + self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 15, cipher=AES) + self.assertRaises(TypeError, Poly1305.new, key=u"2" * 32, cipher=AES) + self.assertRaises(TypeError, Poly1305.new, key=self.key, data=u"2" * 100, cipher=AES) + + def test_update(self): + pieces = [b"\x0A" * 200, b"\x14" * 300] + h1 = Poly1305.new(key=self.key, cipher=AES) + h1.update(pieces[0]).update(pieces[1]) + d1 = h1.digest() + + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h2.update(pieces[0] + pieces[1]) + d2 = h2.digest() + self.assertEqual(d1, d2) + + def test_update_negative(self): + h = Poly1305.new(key=self.key, cipher=AES) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = Poly1305.new(key=self.key, cipher=AES) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.failUnless(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg=b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = Poly1305.new(key=self.key, data=msg[:4], cipher=AES) + h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + + def test_hex_digest(self): + mac = Poly1305.new(key=self.key, cipher=AES) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.failUnless(isinstance(hexdigest, type("digest"))) + + def test_verify(self): + h = Poly1305.new(key=self.key, cipher=AES) + mac = h.digest() + h.verify(mac) + wrong_mac = strxor_c(mac, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + + def test_hexverify(self): + h = Poly1305.new(key=self.key, cipher=AES) + mac = h.hexdigest() + h.hexverify(mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + def test_bytearray(self): + + data = b"\x00\x01\x02" + h0 = Poly1305.new(key=self.key, data=data, cipher=AES) + d_ref = h0.digest() + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(self.key) + data_ba = bytearray(data) + + h1 = Poly1305.new(key=self.key, data=data, cipher=AES, nonce=h0.nonce) + h2 = Poly1305.new(key=key_ba, data=data_ba, cipher=AES, nonce=h0.nonce) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xEE' + + self.assertEqual(h1.digest(), d_ref) + self.assertEqual(h2.digest(), d_ref) + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = Poly1305.new(key=self.key, cipher=AES) + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(self.key) + data_mv = get_mv(data) + + h1 = Poly1305.new(key=self.key, data=data, cipher=AES) + h2 = Poly1305.new(key=key_mv, data=data_mv, cipher=AES, + nonce=h1.nonce) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + key_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = Poly1305.new(key=self.key, cipher=AES) + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class Poly1305Test_ChaCha20(unittest.TestCase): + + key = b'\x11' * 32 + + def test_new_positive(self): + data = b'r' * 100 + + h1 = Poly1305.new(key=self.key, cipher=ChaCha20) + self.assertEqual(h1.digest_size, 16) + self.assertEqual(len(h1.nonce), 12) + + h2 = Poly1305.new(key=self.key, cipher=ChaCha20, nonce = b'8' * 8) + self.assertEqual(len(h2.nonce), 8) + self.assertEqual(h2.nonce, b'8' * 8) + + def test_new_negative(self): + + self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 7, cipher=ChaCha20) + + +# +# make_mac_tests() expect a new() function with signature new(key, data, +# **kwargs), and we need to adapt Poly1305's, as it only uses keywords +# +class Poly1305_New(object): + + @staticmethod + def new(key, *data, **kwds): + _kwds = dict(kwds) + if len(data) == 1: + _kwds['data'] = data[0] + _kwds['key'] = key + return Poly1305.new(**_kwds) + + +class Poly1305_Basic(object): + + @staticmethod + def new(key, *data, **kwds): + from Crypto.Hash.Poly1305 import Poly1305_MAC + + if len(data) == 1: + msg = data[0] + else: + msg = None + + return Poly1305_MAC(key[:16], key[16:], msg) + + +class Poly1305AES_MC(unittest.TestCase): + + def runTest(self): + tag = unhexlify(b"fb447350c4e868c52ac3275cf9d4327e") + + msg = b'' + for msg_len in range(5000 + 1): + key = tag + strxor_c(tag, 0xFF) + nonce = tag[::-1] + if msg_len > 0: + msg = msg + tobytes(tag[0]) + auth = Poly1305.new(key=key, nonce=nonce, cipher=AES, data=msg) + tag = auth.digest() + + # Compare against output of original DJB's poly1305aes-20050218 + self.assertEqual("CDFA436DDD629C7DC20E1128530BAED2", auth.hexdigest().upper()) + + +def get_tests(config={}): + tests = make_mac_tests(Poly1305_Basic, "Poly1305", test_data_basic) + tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_aes) + tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_chacha20) + tests += [ Poly1305AES_MC() ] + tests += list_test_cases(Poly1305Test_AES) + tests += list_test_cases(Poly1305Test_ChaCha20) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py new file mode 100644 index 0000000..153c570 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_RIPEMD160.py: Self-test for the RIPEMD-160 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +#"""Self-test suite for Crypto.Hash.RIPEMD160""" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors downloaded 2008-09-12 from + # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html + ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), + ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), + ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), + ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), + + ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', + 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', + 'abcdbcd...pnopq'), + + ('b0e20b6e3116640286ed3a87a5713079b21f5189', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', + '1234567890' * 8, + "'1234567890' * 8"), + + ('52783243c1697bdbe16d37f97f68f08325dc1528', + 'a' * 10**6, + '"a" * 10**6'), +] + +def get_tests(config={}): + from Crypto.Hash import RIPEMD160 + from .common import make_hash_tests + return make_hash_tests(RIPEMD160, "RIPEMD160", test_data, + digest_size=20, + oid="1.3.36.3.2.1") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA1.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA1.py new file mode 100644 index 0000000..a883a44 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA1.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/SHA1.py: Self-test for the SHA-1 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA""" + +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data_various = [ + # FIPS PUB 180-2, A.1 - "One-Block Message" + ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), + + # FIPS PUB 180-2, A.2 - "Multi-Block Message" + ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, A.3 - "Long Message" +# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', +# 'a' * 10**6, +# '"a" * 10**6'), + + # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) + ('dea356a2cddd90c7a7ecedc5ebb563934f460452', + '01234567' * 80, + '"01234567" * 80'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA1 + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA1"), + "SHA1ShortMsg.rsp", + "KAT SHA-1", + { "len" : lambda x: int(x) } ) or [] + + test_data = test_data_various[:] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA1, "SHA1", test_data, + digest_size=20, + oid="1.3.14.3.2.26") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py new file mode 100644 index 0000000..cf81ad9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA224""" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 3874: Section 3.1, "Test Vector #1 + ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), + + # RFC 3874: Section 3.2, "Test Vector #2 + ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # RFC 3874: Section 3.3, "Test Vector #3 + ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), + + # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), + + ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + + ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', + 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA224 + from .common import make_hash_tests + return make_hash_tests(SHA224, "SHA224", test_data, + digest_size=28, + oid='2.16.840.1.101.3.4.2.4') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py new file mode 100644 index 0000000..bb99326 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA256""" + +import unittest +from Crypto.Util.py3compat import * + +class LargeSHA256Test(unittest.TestCase): + def runTest(self): + """SHA256: 512/520 MiB test""" + from Crypto.Hash import SHA256 + zeros = bchr(0x00) * (1024*1024) + + h = SHA256.new(zeros) + for i in range(511): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB + + for i in range(8): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB + +def get_tests(config={}): + # Test vectors from FIPS PUB 180-2 + # This is a list of (expected_result, input[, description]) tuples. + test_data = [ + # FIPS PUB 180-2, B.1 - "One-Block Message" + ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', + 'abc'), + + # FIPS PUB 180-2, B.2 - "Multi-Block Message" + ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, B.3 - "Long Message" + ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', + 'a' * 10**6, + '"a" * 10**6'), + + # Test for an old PyCrypto bug. + ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', + 'This message is precisely 55 bytes long, to test a bug.', + 'Length = 55 (mod 64)'), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), + + ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + ] + + from Crypto.Hash import SHA256 + from .common import make_hash_tests + tests = make_hash_tests(SHA256, "SHA256", test_data, + digest_size=32, + oid="2.16.840.1.101.3.4.2.1") + + if config.get('slow_tests'): + tests += [LargeSHA256Test()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py new file mode 100644 index 0000000..c682eb4 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA384""" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.2" + ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA384 + from .common import make_hash_tests + return make_hash_tests(SHA384, "SHA384", test_data, + digest_size=48, + oid='2.16.840.1.101.3.4.2.2') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py new file mode 100644 index 0000000..6dbf661 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_224.py: Self-test for the SHA-3/224 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_224""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_224 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-224.txt", + "KAT SHA-3 224", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_224", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.7") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py new file mode 100644 index 0000000..b017852 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_256.py: Self-test for the SHA-3/256 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_256""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_256 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-256.txt", + "KAT SHA-3 256", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + + tests += make_hash_tests(SHA3, "SHA3_256", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.8") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py new file mode 100644 index 0000000..88d8c50 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_384.py: Self-test for the SHA-3/384 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_384""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_384 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-384.txt", + "KAT SHA-3 384", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_384", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.9") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py new file mode 100644 index 0000000..d012003 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_512.py: Self-test for the SHA-3/512 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_512""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_512 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-512.txt", + "KAT SHA-3 512", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_512", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.10") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py new file mode 100644 index 0000000..20961ac --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA512""" + +from binascii import hexlify + +from Crypto.Hash import SHA512 +from .common import make_hash_tests +from Crypto.SelfTest.loader import load_test_vectors + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data_512_other = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.1" + ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), + + ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), +] + + +def get_tests_SHA512(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512ShortMsg.rsp", + "KAT SHA-512", + {"len": lambda x: int(x)}) or [] + + test_data = test_data_512_other[:] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512", test_data, + digest_size=64, + oid="2.16.840.1.101.3.4.2.3") + return tests + + +def get_tests_SHA512_224(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512_224ShortMsg.rsp", + "KAT SHA-512/224", + {"len": lambda x: int(x)}) or [] + + test_data = [] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512/224", test_data, + digest_size=28, + oid="2.16.840.1.101.3.4.2.5", + extra_params={ "truncate" : "224" }) + return tests + + +def get_tests_SHA512_256(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512_256ShortMsg.rsp", + "KAT SHA-512/256", + {"len": lambda x: int(x)}) or [] + + test_data = [] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512/256", test_data, + digest_size=32, + oid="2.16.840.1.101.3.4.2.6", + extra_params={ "truncate" : "256" }) + return tests + + +def get_tests(config={}): + + tests = [] + tests += get_tests_SHA512() + tests += get_tests_SHA512_224() + tests += get_tests_SHA512_256() + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py new file mode 100644 index 0000000..77a07e8 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py @@ -0,0 +1,143 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHAKE128 and SHAKE256""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import SHAKE128, SHAKE256 +from Crypto.Util.py3compat import b, bchr, bord, tobytes + +class SHAKETest(unittest.TestCase): + + def test_new_positive(self): + + xof1 = self.shake.new() + xof2 = self.shake.new(data=b("90")) + xof3 = self.shake.new().update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = self.shake.new() + h.update(pieces[0]).update(pieces[1]) + digest = h.read(10) + h = self.shake.new() + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.read(10), digest) + + def test_update_negative(self): + h = self.shake.new() + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.shake.new() + digest = h.read(90) + + # read returns a byte string of the right length + self.failUnless(isinstance(digest, type(b("digest")))) + self.assertEqual(len(digest), 90) + + def test_update_after_read(self): + mac = self.shake.new() + mac.update(b("rrrr")) + mac.read(90) + self.assertRaises(TypeError, mac.update, b("ttt")) + + +class SHAKE128Test(SHAKETest): + shake = SHAKE128 + + +class SHAKE256Test(SHAKETest): + shake = SHAKE256 + + +class SHAKEVectors(unittest.TestCase): + pass + + +test_vectors_128 = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHAKE128.txt", + "Short Messages KAT SHAKE128", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_128): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = SHAKE128.new(data=data) + digest = hobj.read(len(result)) + self.assertEqual(digest, result) + + setattr(SHAKEVectors, "test_128_%d" % idx, new_test) + + +test_vectors_256 = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHAKE256.txt", + "Short Messages KAT SHAKE256", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_256): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = SHAKE256.new(data=data) + digest = hobj.read(len(result)) + self.assertEqual(digest, result) + + setattr(SHAKEVectors, "test_256_%d" % idx, new_test) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(SHAKE128Test) + tests += list_test_cases(SHAKE256Test) + tests += list_test_cases(SHAKEVectors) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Hash/test_keccak.py b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_keccak.py new file mode 100644 index 0000000..92e2453 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Hash/test_keccak.py @@ -0,0 +1,250 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.keccak""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import keccak +from Crypto.Util.py3compat import b, tobytes, bchr + +class KeccakTest(unittest.TestCase): + + def test_new_positive(self): + + for digest_bits in (224, 256, 384, 512): + hobj = keccak.new(digest_bits=digest_bits) + self.assertEqual(hobj.digest_size, digest_bits // 8) + + hobj2 = hobj.new() + self.assertEqual(hobj2.digest_size, digest_bits // 8) + + for digest_bytes in (28, 32, 48, 64): + hobj = keccak.new(digest_bytes=digest_bytes) + self.assertEqual(hobj.digest_size, digest_bytes) + + hobj2 = hobj.new() + self.assertEqual(hobj2.digest_size, digest_bytes) + + def test_new_positive2(self): + + digest1 = keccak.new(data=b("\x90"), digest_bytes=64).digest() + digest2 = keccak.new(digest_bytes=64).update(b("\x90")).digest() + self.assertEqual(digest1, digest2) + + def test_new_negative(self): + + # keccak.new needs digest size + self.assertRaises(TypeError, keccak.new) + + h = keccak.new(digest_bits=512) + + # Either bits or bytes can be specified + self.assertRaises(TypeError, keccak.new, + digest_bytes=64, + digest_bits=512) + + # Range + self.assertRaises(ValueError, keccak.new, digest_bytes=0) + self.assertRaises(ValueError, keccak.new, digest_bytes=1) + self.assertRaises(ValueError, keccak.new, digest_bytes=65) + self.assertRaises(ValueError, keccak.new, digest_bits=0) + self.assertRaises(ValueError, keccak.new, digest_bits=1) + self.assertRaises(ValueError, keccak.new, digest_bits=513) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = keccak.new(digest_bytes=64) + h.update(pieces[0]).update(pieces[1]) + digest = h.digest() + h = keccak.new(digest_bytes=64) + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.digest(), digest) + + def test_update_negative(self): + h = keccak.new(digest_bytes=64) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = keccak.new(digest_bytes=64) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.failUnless(isinstance(digest, type(b("digest")))) + + def test_hex_digest(self): + mac = keccak.new(digest_bits=512) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.failUnless(isinstance(hexdigest, type("digest"))) + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = keccak.new(digest_bits=512, data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = keccak.new(digest_bits=512, data=msg).digest() + + # With the proper flag, it is allowed + h = keccak.new(digest_bits=512, data=msg[:4], update_after_digest=True) + self.assertEquals(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEquals(h.digest(), dig2) + + +class KeccakVectors(unittest.TestCase): + pass + + # TODO: add ExtremelyLong tests + + +test_vectors_224 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_224.txt", + "Short Messages KAT 224", + {"len": lambda x: int(x)}) or [] + +test_vectors_224 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_224.txt", + "Long Messages KAT 224", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_224): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=224, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_224_%d" % idx, new_test) + +# --- + +test_vectors_256 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_256.txt", + "Short Messages KAT 256", + { "len" : lambda x: int(x) } ) or [] + +test_vectors_256 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_256.txt", + "Long Messages KAT 256", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_256): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=256, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_256_%d" % idx, new_test) + + +# --- + +test_vectors_384 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_384.txt", + "Short Messages KAT 384", + {"len": lambda x: int(x)}) or [] + +test_vectors_384 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_384.txt", + "Long Messages KAT 384", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_384): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=384, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_384_%d" % idx, new_test) + +# --- + +test_vectors_512 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_512.txt", + "Short Messages KAT 512", + {"len": lambda x: int(x)}) or [] + +test_vectors_512 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_512.txt", + "Long Messages KAT 512", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_512): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=512, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_512_%d" % idx, new_test) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(KeccakTest) + tests += list_test_cases(KeccakVectors) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/IO/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/IO/__init__.py new file mode 100644 index 0000000..c04a2a7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/IO/__init__.py @@ -0,0 +1,47 @@ +# +# SelfTest/IO/__init__.py: Self-test for input/output module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for I/O""" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.IO import test_PKCS8; tests += test_PKCS8.get_tests(config=config) + from Crypto.SelfTest.IO import test_PBES; tests += test_PBES.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + diff --git a/env/Lib/site-packages/Crypto/SelfTest/IO/test_PBES.py b/env/Lib/site-packages/Crypto/SelfTest/IO/test_PBES.py new file mode 100644 index 0000000..b2a4f94 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/IO/test_PBES.py @@ -0,0 +1,93 @@ +# +# SelfTest/IO/test_PBES.py: Self-test for the _PBES module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.IO._PBES module""" + +import unittest +from Crypto.Util.py3compat import * + +from Crypto.IO._PBES import PBES2 + + +class TestPBES2(unittest.TestCase): + + def setUp(self): + self.ref = b("Test data") + self.passphrase = b("Passphrase") + + def test1(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test2(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA1AndAES128-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test3(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA1AndAES192-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test4(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES128-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test5(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES192-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test6(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES256-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(TestPBES2) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/IO/test_PKCS8.py b/env/Lib/site-packages/Crypto/SelfTest/IO/test_PKCS8.py new file mode 100644 index 0000000..49f4b8a --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/IO/test_PKCS8.py @@ -0,0 +1,423 @@ +# +# SelfTest/IO/test_PKCS8.py: Self-test for the PKCS8 module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.IO.PKCS8 module""" + +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import * +from Crypto.IO import PKCS8 + +oid_key = '1.2.840.113549.1.1.1' + +# Original RSA key (in DER format) +# hexdump -v -e '32/1 "%02x" "\n"' key.der +clear_key=""" +308201ab020100025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf16 +0c951a870b71783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f0 +6fe20faeebb0c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d2 +5c08050203010001025a00afa09c70d528299b7552fe766b5d20f9a221d66938 +c3b68371d48515359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb +3a50b8e17ba297b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee89 +3f039395022d0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e8 +8dfbc3f7e0bb83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd7 +1f56ae7d973e08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3 +c24f022d0ac334eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec9 +4fcf16352f6b3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb03 +09920905c236d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5 +022d0cd88ed14fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa3 +7e2e93df3ff1a0fd3490111dcdbc4c +""" + +# Same key as above, wrapped in PKCS#8 but w/o password +# +# openssl pkcs8 -topk8 -inform DER -nocrypt -in key.der -outform DER -out keyp8.der +# hexdump -v -e '32/1 "%02x" "\n"' keyp8.der +wrapped_clear_key=""" +308201c5020100300d06092a864886f70d0101010500048201af308201ab0201 +00025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf160c951a870b71 +783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f06fe20faeebb0 +c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d25c0805020301 +0001025a00afa09c70d528299b7552fe766b5d20f9a221d66938c3b68371d485 +15359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb3a50b8e17ba2 +97b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee893f039395022d +0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e88dfbc3f7e0bb +83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd71f56ae7d973e +08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3c24f022d0ac3 +34eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec94fcf16352f6b +3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb0309920905c236 +d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5022d0cd88ed1 +4fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa37e2e93df3ff1 +a0fd3490111dcdbc4c +""" + +### +# +# The key above will now be encrypted with different algorithms. +# The password is always 'TestTest'. +# +# Each item in the wrapped_enc_keys list contains: +# * wrap algorithm +# * iteration count +# * Salt +# * IV +# * Expected result +### +wrapped_enc_keys = [] + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der -v2 des3 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC', +2048, +"47EA7227D8B22E2F", # IV +"E3F7A838AB911A4D", # Salt +""" +30820216304006092a864886f70d01050d3033301b06092a864886f70d01050c +300e0408e3f7a838ab911a4d02020800301406082a864886f70d0307040847ea +7227d8b22e2f048201d0ea388b374d2d0e4ceb7a5139f850fdff274884a6e6c0 +64326e09d00dbba9018834edb5a51a6ae3d1806e6e91eebf33788ce71fee0637 +a2ebf58859dd32afc644110c390274a6128b50c39b8d907823810ec471bada86 +6f5b75d8ea04ad310fad2e73621696db8e426cd511ee93ec1714a1a7db45e036 +4bf20d178d1f16bbb250b32c2d200093169d588de65f7d99aad9ddd0104b44f1 +326962e1520dfac3c2a800e8a14f678dff2b3d0bb23f69da635bf2a643ac934e +219a447d2f4460b67149e860e54f365da130763deefa649c72b0dcd48966a2d3 +4a477444782e3e66df5a582b07bbb19778a79bd355074ce331f4a82eb966b0c4 +52a09eab6116f2722064d314ae433b3d6e81d2436e93fdf446112663cde93b87 +9c8be44beb45f18e2c78fee9b016033f01ecda51b9b142091fa69f65ab784d2c +5ad8d34be6f7f1464adfc1e0ef3f7848f40d3bdea4412758f2fcb655c93d8f4d +f6fa48fc5aa4b75dd1c017ab79ac9d737233a6d668f5364ccf47786debd37334 +9c10c9e6efbe78430a61f71c89948aa32cdc3cc7338cf994147819ce7ab23450 +c8f7d9b94c3bb377d17a3fa204b601526317824b142ff6bc843fa7815ece89c0 +839573f234dac8d80cc571a045353d61db904a4398d8ef3df5ac +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithMD5AndDES-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d010503300e0408f9b990c89af1d41b020208 +00048201d0c6267fe8592903891933d559e71a7ca68b2e39150f19daca0f7921 +52f97e249d72f670d5140e9150433310ed7c7ee51927693fd39884cb9551cea5 +a7b746f7edf199f8787d4787a35dad930d7db057b2118851211b645ac8b90fa6 +b0e7d49ac8567cbd5fff226e87aa9129a0f52c45e9307752e8575c3b0ff756b7 +31fda6942d15ecb6b27ea19370ccc79773f47891e80d22b440d81259c4c28eac +e0ca839524116bcf52d8c566e49a95ddb0e5493437279a770a39fd333f3fca91 +55884fad0ba5aaf273121f893059d37dd417da7dcfd0d6fa7494968f13b2cc95 +65633f2c891340193e5ec00e4ee0b0e90b3b93da362a4906360845771ade1754 +9df79140be5993f3424c012598eadd3e7c7c0b4db2c72cf103d7943a5cf61420 +93370b9702386c3dd4eb0a47f34b579624a46a108b2d13921fa1b367495fe345 +6aa128aa70f8ca80ae13eb301e96c380724ce67c54380bbea2316c1faf4d058e +b4ca2e23442047606b9bc4b3bf65b432cb271bea4eb35dd3eb360d3be8612a87 +a50e96a2264490aeabdc07c6e78e5dbf4fe3388726d0e2a228346bf3c2907d68 +2a6276b22ae883fb30fa611f4e4193e7a08480fcd7db48308bacbd72bf4807aa +11fd394859f97d22982f7fe890b2e2a0f7e7ffb693 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-SHA1-RC2-64 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithSHA1AndRC2-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d01050b300e04083ee943bdae185008020208 +00048201d0e4614d9371d3ff10ceabc2f6a7a13a0f449f9a714144e46518ea55 +e3e6f0cde24031d01ef1f37ec40081449ef01914faf45983dde0d2bc496712de +8dd15a5527dff4721d9016c13f34fb93e3ce68577e30146266d71b539f854e56 +753a192cf126ed4812734d86f81884374f1100772f78d0646e9946407637c565 +d070acab413c55952f7237437f2e48cae7fa0ff8d370de2bf446dd08049a3663 +d9c813ac197468c02e2b687e7ca994cf7f03f01b6eca87dbfed94502c2094157 +ea39f73fe4e591df1a68b04d19d9adab90bb9898467c1464ad20bf2b8fb9a5ff +d3ec91847d1c67fd768a4b9cfb46572eccc83806601372b6fad0243f58f623b7 +1c5809dea0feb8278fe27e5560eed8448dc93f5612f546e5dd7c5f6404365eb2 +5bf3396814367ae8b15c5c432b57eaed1f882c05c7f6517ee9e42b87b7b8d071 +9d6125d1b52f7b2cca1f6bd5f584334bf90bce1a7d938274cafe27b68e629698 +b16e27ae528db28593af9adcfccbebb3b9e1f2af5cd5531b51968389caa6c091 +e7de1f1b96f0d258e54e540d961a7c0ef51fda45d6da5fddd33e9bbfd3a5f8d7 +d7ab2e971de495cddbc86d38444fee9f0ac097b00adaf7802dabe0cff5b43b45 +4f26b7b547016f89be52676866189911c53e2f2477""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-MD5-RC2-64 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithMD5AndRC2-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d010506300e0408f5cd2fee56d9b4b8020208 +00048201d086454942d6166a19d6b108465bd111e7080911f573d54b1369c676 +df28600e84936bfec04f91023ff16499e2e07178c340904f12ffa6886ab66228 +32bf43c2bff5a0ed14e765918cf5fc543ad49566246f7eb3fc044fa5a9c25f40 +8fc8c8296b91658d3bb1067c0aba008c4fefd9e2bcdbbbd63fdc8085482bccf4 +f150cec9a084259ad441a017e5d81a1034ef2484696a7a50863836d0eeda45cd +8cee8ecabfed703f8d9d4bbdf3a767d32a0ccdc38550ee2928d7fe3fa27eda5b +5c7899e75ad55d076d2c2d3c37d6da3d95236081f9671dab9a99afdb1cbc890e +332d1a91105d9a8ce08b6027aa07367bd1daec3059cb51f5d896124da16971e4 +0ca4bcadb06c854bdf39f42dd24174011414e51626d198775eff3449a982df7b +ace874e77e045eb6d7c3faef0750792b29a068a6291f7275df1123fac5789c51 +27ace42836d81633faf9daf38f6787fff0394ea484bbcd465b57d4dbee3cf8df +b77d1db287b3a6264c466805be5a4fe85cfbca180699859280f2dd8e2c2c10b5 +7a7d2ac670c6039d41952fbb0e4f99b560ebe1d020e1b96d02403283819c00cc +529c51f0b0101555e4c58002ba3c6e3c12e3fde1aec94382792e96d9666a2b33 +3dc397b22ecab67ee38a552fec29a1d4ff8719c748""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-SHA1-DES +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithSHA1AndDES-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d01050a300e04089bacc9cf1e8f734e020208 +00048201d03e502f3ceafe8fd19ab2939576bfdded26d719b2441db1459688f5 +9673218b41ec1f739edf1e460bd927bc28470c87b2d4fc8ea02ba17b47a63c49 +c5c1bee40529dadfd3ef8b4472c730bc136678c78abfb34670ec9d7dcd17ee3f +892f93f2629e6e0f4b24ecb9f954069bf722f466dece3913bb6abbd2c471d9a5 +c5eea89b14aaccda43d30b0dd0f6eb6e9850d9747aa8aa8414c383ad01c374ee +26d3552abec9ba22669cc9622ccf2921e3d0c8ecd1a70e861956de0bec6104b5 +b649ac994970c83f8a9e84b14a7dff7843d4ca3dd4af87cea43b5657e15ae0b5 +a940ce5047f006ab3596506600724764f23757205fe374fee04911336d655acc +03e159ec27789191d1517c4f3f9122f5242d44d25eab8f0658cafb928566ca0e +8f6589aa0c0ab13ca7a618008ae3eafd4671ee8fe0b562e70b3623b0e2a16eee +97fd388087d2e03530c9fe7db6e52eccc7c48fd701ede35e08922861a9508d12 +bc8bbf24f0c6bee6e63dbcb489b603d4c4a78ce45bf2eab1d5d10456c42a65a8 +3a606f4e4b9b46eb13b57f2624b651859d3d2d5192b45dbd5a2ead14ff20ca76 +48f321309aa56d8c0c4a192b580821cc6c70c75e6f19d1c5414da898ec4dd39d +b0eb93d6ba387a80702dfd2db610757ba340f63230 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes128 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES128-CBC', +2048, +"4F66EE5D3BCD531FE6EBF4B4E73016B8", # IV +"479F25156176C53A", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e0408479f25156176c53a02020800301d060960864801650304010204104f +66ee5d3bcd531fe6ebf4b4e73016b8048201d0e33cfa560423f589d097d21533 +3b880a5ebac5b2ac58b4e73b0d787aee7764f034fe34ca1d1bd845c0a7c3316f +afbfb2129e03dcaf5a5031394206492828dacef1e04639bee5935e0f46114202 +10bc6c37182f4889be11c5d0486c398f4be952e5740f65de9d8edeb275e2b406 +e19bc29ad5ebb97fa536344fc3d84c7e755696f12b810898de4e6f069b8a81c8 +0aab0d45d7d062303aaa4a10c2ce84fdb5a03114039cfe138e38bb15b2ced717 +93549cdad85e730b14d9e2198b663dfdc8d04a4349eb3de59b076ad40b116d4a +25ed917c576bc7c883c95ef0f1180e28fc9981bea069594c309f1aa1b253ceab +a2f0313bb1372bcb51a745056be93d77a1f235a762a45e8856512d436b2ca0f7 +dd60fbed394ba28978d2a2b984b028529d0a58d93aba46c6bbd4ac1e4013cbaa +63b00988bc5f11ccc40141c346762d2b28f64435d4be98ec17c1884985e3807e +e550db606600993efccf6de0dfc2d2d70b5336a3b018fa415d6bdd59f5777118 +16806b7bc17c4c7e20ad7176ebfa5a1aa3f6bc10f04b77afd443944642ac9cca +d740e082b4a3bbb8bafdd34a0b3c5f2f3c2aceccccdccd092b78994b845bfa61 +706c3b9df5165ed1dbcbf1244fe41fc9bf993f52f7658e2f87e1baaeacb0f562 +9d905c +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes192 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES192-CBC', +2048, +"5CFC2A4FF7B63201A4A8A5B021148186", # IV +"D718541C264944CE", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e0408d718541c264944ce02020800301d060960864801650304011604105c +fc2a4ff7b63201a4a8a5b021148186048201d08e74aaa21b8bcfb15b9790fe95 +b0e09ddb0f189b6fb1682fdb9f122b804650ddec3c67a1df093a828b3e5fbcc6 +286abbcc5354c482fd796d972e919ca8a5eba1eaa2293af1d648013ddad72106 +75622264dfba55dafdda39e338f058f1bdb9846041ffff803797d3fdf3693135 +8a192729ea8346a7e5e58e925a2e2e4af0818581859e8215d87370eb4194a5ff +bae900857d4c591dbc651a241865a817eaede9987c9f9ae4f95c0bf930eea88c +4d7596e535ffb7ca369988aba75027a96b9d0bc9c8b0b75f359067fd145a378b +02aaa15e9db7a23176224da48a83249005460cc6e429168657f2efa8b1af7537 +d7d7042f2d683e8271b21d591090963eeb57aea6172f88da139e1614d6a7d1a2 +1002d5a7a93d6d21156e2b4777f6fc069287a85a1538c46b7722ccde591ab55c +630e1ceeb1ac42d1b41f3f654e9da86b5efced43775ea68b2594e50e4005e052 +0fe753c0898120c2c07265367ff157f6538a1e4080d6f9d1ca9eb51939c9574e +f2e4e1e87c1434affd5808563cddd376776dbbf790c6a40028f311a8b58dafa2 +0970ed34acd6e3e89d063987893b2b9570ddb8cc032b05a723bba9444933ebf3 +c624204be72f4190e0245197d0cb772bec933fd8442445f9a28bd042d5a3a1e9 +9a8a07 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes192 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES256-CBC', +2048, +"323351F94462AC563E053A056252C2C4", # IV +"02A6CD0D12E727B5", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e040802a6cd0d12e727b502020800301d060960864801650304012a041032 +3351f94462ac563e053a056252c2c4048201d07f4ef1c7be21aae738a20c5632 +b8bdbbb9083b6e7f68822267b1f481fd27fdafd61a90660de6e4058790e4c912 +bf3f319a7c37e6eb3d956daaa143865020d554bf6215e8d7492359aaeef45d6e +d85a686ed26c0bf7c18d071d827a86f0b73e1db0c0e7f3d42201544093302a90 +551ad530692468c47ac15c69500b8ca67d4a17b64d15cecc035ae50b768a36cf +07c395afa091e9e6f86f665455fbdc1b21ad79c0908b73da5de75a9b43508d5d +44dc97a870cd3cd9f01ca24452e9b11c1b4982946702cfcbfda5b2fcc0203fb5 +0b52a115760bd635c94d4c95ac2c640ee9a04ffaf6ccff5a8d953dd5d88ca478 +c377811c521f2191639c643d657a9e364af88bb7c14a356c2b0b4870a23c2f54 +d41f8157afff731471dccc6058b15e1151bcf84b39b5e622a3a1d65859c912a5 +591b85e034a1f6af664f030a6bfc8c3d20c70f32b54bcf4da9c2da83cef49cf8 +e9a74f0e5d358fe50b88acdce6a9db9a7ad61536212fc5f877ebfc7957b8bda4 +b1582a0f10d515a20ee06cf768db9c977aa6fbdca7540d611ff953012d009dac +e8abd059f8e8ffea637c9c7721f817aaf0bb23403e26a0ef0ff0e2037da67d41 +af728481f53443551a9bff4cea023164e9622b5441a309e1f4bff98e5bf76677 +8d7cd9 +""" +)) + +def txt2bin(inputs): + s = b('').join([b(x) for x in inputs if not (x in '\n\r\t ')]) + return unhexlify(s) + +class Rng: + def __init__(self, output): + self.output=output + self.idx=0 + def __call__(self, n): + output = self.output[self.idx:self.idx+n] + self.idx += n + return output + +class PKCS8_Decrypt(unittest.TestCase): + + def setUp(self): + self.oid_key = oid_key + self.clear_key = txt2bin(clear_key) + self.wrapped_clear_key = txt2bin(wrapped_clear_key) + self.wrapped_enc_keys = [] + for t in wrapped_enc_keys: + self.wrapped_enc_keys.append(( + t[0], + t[1], + txt2bin(t[2]), + txt2bin(t[3]), + txt2bin(t[4]) + )) + + ### NO ENCRYTION + + def test1(self): + """Verify unwrapping w/o encryption""" + res1, res2, res3 = PKCS8.unwrap(self.wrapped_clear_key) + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + def test2(self): + """Verify wrapping w/o encryption""" + wrapped = PKCS8.wrap(self.clear_key, self.oid_key) + res1, res2, res3 = PKCS8.unwrap(wrapped) + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + ## ENCRYPTION + + def test3(self): + """Verify unwrapping with encryption""" + + for t in self.wrapped_enc_keys: + res1, res2, res3 = PKCS8.unwrap(t[4], b("TestTest")) + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + def test4(self): + """Verify wrapping with encryption""" + + for t in self.wrapped_enc_keys: + if t[0] == 'skip encryption': + continue + rng = Rng(t[2]+t[3]) + params = { 'iteration_count':t[1] } + wrapped = PKCS8.wrap( + self.clear_key, + self.oid_key, + b("TestTest"), + protection=t[0], + prot_params=params, + key_params=None, + randfunc=rng) + self.assertEqual(wrapped, t[4]) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(PKCS8_Decrypt) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/env/Lib/site-packages/Crypto/SelfTest/Math/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Math/__init__.py new file mode 100644 index 0000000..18e83d1 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Math/__init__.py @@ -0,0 +1,49 @@ +# +# SelfTest/Math/__init__.py: Self-test for math module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math""" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Math import test_Numbers + from Crypto.SelfTest.Math import test_Primality + from Crypto.SelfTest.Math import test_modexp + tests += test_Numbers.get_tests(config=config) + tests += test_Primality.get_tests(config=config) + tests += test_modexp.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Math/test_Numbers.py b/env/Lib/site-packages/Crypto/SelfTest/Math/test_Numbers.py new file mode 100644 index 0000000..058a6c7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Math/test_Numbers.py @@ -0,0 +1,774 @@ +# +# SelfTest/Math/test_Numbers.py: Self-test for Numbers module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math.Numbers""" + +import sys +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.py3compat import * + +from Crypto.Math._IntegerNative import IntegerNative + + +class TestIntegerBase(unittest.TestCase): + + def setUp(self): + raise NotImplementedError("To be implemented") + + def Integers(self, *arg): + return map(self.Integer, arg) + + def test_init_and_equality(self): + Integer = self.Integer + + v1 = Integer(23) + v2 = Integer(v1) + v3 = Integer(-9) + self.assertRaises(ValueError, Integer, 1.0) + + v4 = Integer(10**10) + v5 = Integer(-10**10) + self.assertEqual(v1, v1) + self.assertEqual(v1, 23) + self.assertEqual(v1, v2) + self.assertEqual(v3, -9) + self.assertEqual(v4, 10 ** 10) + self.assertEqual(v5, -10 ** 10) + + self.failIf(v1 == v4) + + # Init and comparison between Integer's + v6 = Integer(v1) + self.assertEqual(v1, v6) + + self.failIf(Integer(0) == None) + + def test_conversion_to_int(self): + v1, v2 = self.Integers(-23, 2 ** 1000) + self.assertEqual(int(v1), -23) + self.assertEqual(int(v2), 2 ** 1000) + + def test_equality_with_ints(self): + v1, v2, v3 = self.Integers(23, -89, 2 ** 1000) + self.failUnless(v1 == 23) + self.failUnless(v2 == -89) + self.failIf(v1 == 24) + self.failUnless(v3 == 2 ** 1000) + + def test_conversion_to_str(self): + v1, v2, v3, v4 = self.Integers(20, 0, -20, 2 ** 1000) + self.failUnless(str(v1) == "20") + self.failUnless(str(v2) == "0") + self.failUnless(str(v3) == "-20") + self.failUnless(str(v4) == "10715086071862673209484250490600018105614048117055336074437503883703510511249361224931983788156958581275946729175531468251871452856923140435984577574698574803934567774824230985421074605062371141877954182153046474983581941267398767559165543946077062914571196477686542167660429831652624386837205668069376") + + def test_repr(self): + v1, v2 = self.Integers(-1, 2**80) + self.assertEqual(repr(v1), "Integer(-1)") + self.assertEqual(repr(v2), "Integer(1208925819614629174706176)") + + def test_conversion_to_bytes(self): + Integer = self.Integer + + v1 = Integer(0x17) + self.assertEqual(b("\x17"), v1.to_bytes()) + + v2 = Integer(0xFFFF) + self.assertEqual(b("\xFF\xFF"), v2.to_bytes()) + self.assertEqual(b("\x00\xFF\xFF"), v2.to_bytes(3)) + self.assertRaises(ValueError, v2.to_bytes, 1) + + v3 = Integer(-90) + self.assertRaises(ValueError, v3.to_bytes) + + def test_conversion_from_bytes(self): + Integer = self.Integer + + v1 = Integer.from_bytes(b("\x00")) + self.failUnless(isinstance(v1, Integer)) + self.assertEqual(0, v1) + + v2 = Integer.from_bytes(b("\x00\x00")) + self.assertEqual(0, v2) + + v3 = Integer.from_bytes(b("\xFF\xFF")) + self.assertEqual(0xFFFF, v3) + + def test_inequality(self): + # Test Integer!=Integer and Integer!=int + v1, v2, v3, v4 = self.Integers(89, 89, 90, -8) + self.failUnless(v1 != v3) + self.failUnless(v1 != 90) + self.failIf(v1 != v2) + self.failIf(v1 != 89) + self.failUnless(v1 != v4) + self.failUnless(v4 != v1) + self.failUnless(self.Integer(0) != None) + + def test_less_than(self): + # Test IntegerInteger and Integer>int + v1, v2, v3, v4, v5 = self.Integers(13, 13, 14, -8, 2 ** 10) + self.failUnless(v3 > v1) + self.failUnless(v3 > 13) + self.failIf(v1 > v1) + self.failIf(v1 > v2) + self.failIf(v1 > 13) + self.failUnless(v1 > v4) + self.failIf(v4 > v1) + self.failUnless(v5 > v1) + self.failIf(v1 > v5) + + def test_more_than_or_equal(self): + # Test Integer>=Integer and Integer>=int + v1, v2, v3, v4 = self.Integers(13, 13, 14, -4) + self.failUnless(v3 >= v1) + self.failUnless(v3 >= 13) + self.failUnless(v1 >= v2) + self.failUnless(v1 >= v1) + self.failUnless(v1 >= 13) + self.failIf(v4 >= v1) + + def test_bool(self): + v1, v2, v3, v4 = self.Integers(0, 10, -9, 2 ** 10) + self.assertFalse(v1) + self.assertFalse(bool(v1)) + self.failUnless(v2) + self.failUnless(bool(v2)) + self.failUnless(v3) + self.failUnless(v4) + + def test_is_negative(self): + v1, v2, v3, v4, v5 = self.Integers(-3 ** 100, -3, 0, 3, 3**100) + self.failUnless(v1.is_negative()) + self.failUnless(v2.is_negative()) + self.failIf(v4.is_negative()) + self.failIf(v5.is_negative()) + + def test_addition(self): + # Test Integer+Integer and Integer+int + v1, v2, v3 = self.Integers(7, 90, -7) + self.failUnless(isinstance(v1 + v2, self.Integer)) + self.assertEqual(v1 + v2, 97) + self.assertEqual(v1 + 90, 97) + self.assertEqual(v1 + v3, 0) + self.assertEqual(v1 + (-7), 0) + self.assertEqual(v1 + 2 ** 10, 2 ** 10 + 7) + + def test_subtraction(self): + # Test Integer-Integer and Integer-int + v1, v2, v3 = self.Integers(7, 90, -7) + self.failUnless(isinstance(v1 - v2, self.Integer)) + self.assertEqual(v2 - v1, 83) + self.assertEqual(v2 - 7, 83) + self.assertEqual(v2 - v3, 97) + self.assertEqual(v1 - (-7), 14) + self.assertEqual(v1 - 2 ** 10, 7 - 2 ** 10) + + def test_multiplication(self): + # Test Integer-Integer and Integer-int + v1, v2, v3, v4 = self.Integers(4, 5, -2, 2 ** 10) + self.failUnless(isinstance(v1 * v2, self.Integer)) + self.assertEqual(v1 * v2, 20) + self.assertEqual(v1 * 5, 20) + self.assertEqual(v1 * -2, -8) + self.assertEqual(v1 * 2 ** 10, 4 * (2 ** 10)) + + def test_floor_div(self): + v1, v2, v3 = self.Integers(3, 8, 2 ** 80) + self.failUnless(isinstance(v1 // v2, self.Integer)) + self.assertEqual(v2 // v1, 2) + self.assertEqual(v2 // 3, 2) + self.assertEqual(v2 // -3, -3) + self.assertEqual(v3 // 2 ** 79, 2) + self.assertRaises(ZeroDivisionError, lambda: v1 // 0) + + def test_remainder(self): + # Test Integer%Integer and Integer%int + v1, v2, v3 = self.Integers(23, 5, -4) + self.failUnless(isinstance(v1 % v2, self.Integer)) + self.assertEqual(v1 % v2, 3) + self.assertEqual(v1 % 5, 3) + self.assertEqual(v3 % 5, 1) + self.assertEqual(v1 % 2 ** 10, 23) + self.assertRaises(ZeroDivisionError, lambda: v1 % 0) + self.assertRaises(ValueError, lambda: v1 % -6) + + def test_simple_exponentiation(self): + v1, v2, v3 = self.Integers(4, 3, -2) + self.failUnless(isinstance(v1 ** v2, self.Integer)) + self.assertEqual(v1 ** v2, 64) + self.assertEqual(pow(v1, v2), 64) + self.assertEqual(v1 ** 3, 64) + self.assertEqual(pow(v1, 3), 64) + self.assertEqual(v3 ** 2, 4) + self.assertEqual(v3 ** 3, -8) + + self.assertRaises(ValueError, pow, v1, -3) + + def test_modular_exponentiation(self): + v1, v2, v3 = self.Integers(23, 5, 17) + + self.failUnless(isinstance(pow(v1, v2, v3), self.Integer)) + self.assertEqual(pow(v1, v2, v3), 7) + self.assertEqual(pow(v1, 5, v3), 7) + self.assertEqual(pow(v1, v2, 17), 7) + self.assertEqual(pow(v1, 5, 17), 7) + self.assertEqual(pow(v1, 0, 17), 1) + self.assertEqual(pow(v1, 1, 2 ** 80), 23) + self.assertEqual(pow(v1, 2 ** 80, 89298), 17689) + + self.assertRaises(ZeroDivisionError, pow, v1, 5, 0) + self.assertRaises(ValueError, pow, v1, 5, -4) + self.assertRaises(ValueError, pow, v1, -3, 8) + + def test_inplace_exponentiation(self): + v1 = self.Integer(4) + v1.inplace_pow(2) + self.assertEqual(v1, 16) + + v1 = self.Integer(4) + v1.inplace_pow(2, 15) + self.assertEqual(v1, 1) + + def test_abs(self): + v1, v2, v3, v4, v5 = self.Integers(-2 ** 100, -2, 0, 2, 2 ** 100) + self.assertEqual(abs(v1), 2 ** 100) + self.assertEqual(abs(v2), 2) + self.assertEqual(abs(v3), 0) + self.assertEqual(abs(v4), 2) + self.assertEqual(abs(v5), 2 ** 100) + + def test_sqrt(self): + v1, v2, v3, v4 = self.Integers(-2, 0, 49, 10**100) + + self.assertRaises(ValueError, v1.sqrt) + self.assertEqual(v2.sqrt(), 0) + self.assertEqual(v3.sqrt(), 7) + self.assertEqual(v4.sqrt(), 10**50) + + def test_sqrt_module(self): + + # Invalid modulus (non positive) + self.assertRaises(ValueError, self.Integer(5).sqrt, 0) + self.assertRaises(ValueError, self.Integer(5).sqrt, -1) + + # Simple cases + assert self.Integer(0).sqrt(5) == 0 + assert self.Integer(1).sqrt(5) in (1, 4) + + # Test with all quadratic residues in several fields + for p in (11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53): + for i in range(0, p): + square = i**2 % p + res = self.Integer(square).sqrt(p) + assert res in (i, p - i) + + # 2 is a non-quadratic reside in Z_11 + self.assertRaises(ValueError, self.Integer(2).sqrt, 11) + + # 10 is not a prime + self.assertRaises(ValueError, self.Integer(4).sqrt, 10) + + # 5 is square residue of 4 and 7 + assert self.Integer(5 - 11).sqrt(11) in (4, 7) + assert self.Integer(5 + 11).sqrt(11) in (4, 7) + + def test_in_place_add(self): + v1, v2 = self.Integers(10, 20) + + v1 += v2 + self.assertEqual(v1, 30) + v1 += 10 + self.assertEqual(v1, 40) + v1 += -1 + self.assertEqual(v1, 39) + v1 += 2 ** 1000 + self.assertEqual(v1, 39 + 2 ** 1000) + + def test_in_place_sub(self): + v1, v2 = self.Integers(10, 20) + + v1 -= v2 + self.assertEqual(v1, -10) + v1 -= -100 + self.assertEqual(v1, 90) + v1 -= 90000 + self.assertEqual(v1, -89910) + v1 -= -100000 + self.assertEqual(v1, 10090) + + def test_in_place_mul(self): + v1, v2 = self.Integers(3, 5) + + v1 *= v2 + self.assertEqual(v1, 15) + v1 *= 2 + self.assertEqual(v1, 30) + v1 *= -2 + self.assertEqual(v1, -60) + v1 *= 2 ** 1000 + self.assertEqual(v1, -60 * (2 ** 1000)) + + def test_in_place_modulus(self): + v1, v2 = self.Integers(20, 7) + + v1 %= v2 + self.assertEqual(v1, 6) + v1 %= 2 ** 1000 + self.assertEqual(v1, 6) + v1 %= 2 + self.assertEqual(v1, 0) + def t(): + v3 = self.Integer(9) + v3 %= 0 + self.assertRaises(ZeroDivisionError, t) + + def test_and(self): + v1, v2, v3 = self.Integers(0xF4, 0x31, -0xF) + self.failUnless(isinstance(v1 & v2, self.Integer)) + self.assertEqual(v1 & v2, 0x30) + self.assertEqual(v1 & 0x31, 0x30) + self.assertEqual(v1 & v3, 0xF0) + self.assertEqual(v1 & -0xF, 0xF0) + self.assertEqual(v3 & -0xF, -0xF) + self.assertEqual(v2 & (2 ** 1000 + 0x31), 0x31) + + def test_or(self): + v1, v2, v3 = self.Integers(0x40, 0x82, -0xF) + self.failUnless(isinstance(v1 | v2, self.Integer)) + self.assertEqual(v1 | v2, 0xC2) + self.assertEqual(v1 | 0x82, 0xC2) + self.assertEqual(v2 | v3, -0xD) + self.assertEqual(v2 | 2 ** 1000, 2 ** 1000 + 0x82) + + def test_right_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + self.assertEqual(v1 >> 0, v1) + self.failUnless(isinstance(v1 >> v2, self.Integer)) + self.assertEqual(v1 >> v2, 0x08) + self.assertEqual(v1 >> 1, 0x08) + self.assertRaises(ValueError, lambda: v1 >> -1) + self.assertEqual(v1 >> (2 ** 1000), 0) + + self.assertEqual(v3 >> 1, -0x08) + self.assertEqual(v3 >> (2 ** 1000), -1) + + def test_in_place_right_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + v1 >>= 0 + self.assertEqual(v1, 0x10) + v1 >>= 1 + self.assertEqual(v1, 0x08) + v1 >>= v2 + self.assertEqual(v1, 0x04) + v3 >>= 1 + self.assertEqual(v3, -0x08) + def l(): + v4 = self.Integer(0x90) + v4 >>= -1 + self.assertRaises(ValueError, l) + def m1(): + v4 = self.Integer(0x90) + v4 >>= 2 ** 1000 + return v4 + self.assertEqual(0, m1()) + def m2(): + v4 = self.Integer(-1) + v4 >>= 2 ** 1000 + return v4 + self.assertEqual(-1, m2()) + + def _test_left_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + self.assertEqual(v1 << 0, v1) + self.failUnless(isinstance(v1 << v2, self.Integer)) + self.assertEqual(v1 << v2, 0x20) + self.assertEqual(v1 << 1, 0x20) + self.assertEqual(v3 << 1, -0x20) + self.assertRaises(ValueError, lambda: v1 << -1) + self.assertRaises(ValueError, lambda: v1 << (2 ** 1000)) + + def test_in_place_left_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + v1 <<= 0 + self.assertEqual(v1, 0x10) + v1 <<= 1 + self.assertEqual(v1, 0x20) + v1 <<= v2 + self.assertEqual(v1, 0x40) + v3 <<= 1 + self.assertEqual(v3, -0x20) + def l(): + v4 = self.Integer(0x90) + v4 <<= -1 + self.assertRaises(ValueError, l) + def m(): + v4 = self.Integer(0x90) + v4 <<= 2 ** 1000 + self.assertRaises(ValueError, m) + + + def test_get_bit(self): + v1, v2, v3 = self.Integers(0x102, -3, 1) + self.assertEqual(v1.get_bit(0), 0) + self.assertEqual(v1.get_bit(1), 1) + self.assertEqual(v1.get_bit(v3), 1) + self.assertEqual(v1.get_bit(8), 1) + self.assertEqual(v1.get_bit(9), 0) + + self.assertRaises(ValueError, v1.get_bit, -1) + self.assertEqual(v1.get_bit(2 ** 1000), 0) + + self.assertRaises(ValueError, v2.get_bit, -1) + self.assertRaises(ValueError, v2.get_bit, 0) + self.assertRaises(ValueError, v2.get_bit, 1) + self.assertRaises(ValueError, v2.get_bit, 2 * 1000) + + def test_odd_even(self): + v1, v2, v3, v4, v5 = self.Integers(0, 4, 17, -4, -17) + + self.failUnless(v1.is_even()) + self.failUnless(v2.is_even()) + self.failIf(v3.is_even()) + self.failUnless(v4.is_even()) + self.failIf(v5.is_even()) + + self.failIf(v1.is_odd()) + self.failIf(v2.is_odd()) + self.failUnless(v3.is_odd()) + self.failIf(v4.is_odd()) + self.failUnless(v5.is_odd()) + + def test_size_in_bits(self): + v1, v2, v3, v4 = self.Integers(0, 1, 0x100, -90) + self.assertEqual(v1.size_in_bits(), 1) + self.assertEqual(v2.size_in_bits(), 1) + self.assertEqual(v3.size_in_bits(), 9) + self.assertRaises(ValueError, v4.size_in_bits) + + def test_size_in_bytes(self): + v1, v2, v3, v4, v5, v6 = self.Integers(0, 1, 0xFF, 0x1FF, 0x10000, -9) + self.assertEqual(v1.size_in_bytes(), 1) + self.assertEqual(v2.size_in_bytes(), 1) + self.assertEqual(v3.size_in_bytes(), 1) + self.assertEqual(v4.size_in_bytes(), 2) + self.assertEqual(v5.size_in_bytes(), 3) + self.assertRaises(ValueError, v6.size_in_bits) + + def test_perfect_square(self): + + self.failIf(self.Integer(-9).is_perfect_square()) + self.failUnless(self.Integer(0).is_perfect_square()) + self.failUnless(self.Integer(1).is_perfect_square()) + self.failIf(self.Integer(2).is_perfect_square()) + self.failIf(self.Integer(3).is_perfect_square()) + self.failUnless(self.Integer(4).is_perfect_square()) + self.failUnless(self.Integer(39*39).is_perfect_square()) + self.failIf(self.Integer(39*39+1).is_perfect_square()) + + for x in range(100, 1000): + self.failIf(self.Integer(x**2+1).is_perfect_square()) + self.failUnless(self.Integer(x**2).is_perfect_square()) + + def test_fail_if_divisible_by(self): + v1, v2, v3 = self.Integers(12, -12, 4) + + # No failure expected + v1.fail_if_divisible_by(7) + v2.fail_if_divisible_by(7) + v2.fail_if_divisible_by(2 ** 80) + + # Failure expected + self.assertRaises(ValueError, v1.fail_if_divisible_by, 4) + self.assertRaises(ValueError, v1.fail_if_divisible_by, v3) + + def test_multiply_accumulate(self): + v1, v2, v3 = self.Integers(4, 3, 2) + v1.multiply_accumulate(v2, v3) + self.assertEqual(v1, 10) + v1.multiply_accumulate(v2, 2) + self.assertEqual(v1, 16) + v1.multiply_accumulate(3, v3) + self.assertEqual(v1, 22) + v1.multiply_accumulate(1, -2) + self.assertEqual(v1, 20) + v1.multiply_accumulate(-2, 1) + self.assertEqual(v1, 18) + v1.multiply_accumulate(1, 2 ** 1000) + self.assertEqual(v1, 18 + 2 ** 1000) + v1.multiply_accumulate(2 ** 1000, 1) + self.assertEqual(v1, 18 + 2 ** 1001) + + def test_set(self): + v1, v2 = self.Integers(3, 6) + v1.set(v2) + self.assertEqual(v1, 6) + v1.set(9) + self.assertEqual(v1, 9) + v1.set(-2) + self.assertEqual(v1, -2) + v1.set(2 ** 1000) + self.assertEqual(v1, 2 ** 1000) + + def test_inverse(self): + v1, v2, v3, v4, v5, v6 = self.Integers(2, 5, -3, 0, 723872, 3433) + + self.failUnless(isinstance(v1.inverse(v2), self.Integer)) + self.assertEqual(v1.inverse(v2), 3) + self.assertEqual(v1.inverse(5), 3) + self.assertEqual(v3.inverse(5), 3) + self.assertEqual(v5.inverse(92929921), 58610507) + self.assertEqual(v6.inverse(9912), 5353) + + self.assertRaises(ValueError, v2.inverse, 10) + self.assertRaises(ValueError, v1.inverse, -3) + self.assertRaises(ValueError, v4.inverse, 10) + self.assertRaises(ZeroDivisionError, v2.inverse, 0) + + def test_inplace_inverse(self): + v1, v2 = self.Integers(2, 5) + + v1.inplace_inverse(v2) + self.assertEqual(v1, 3) + + def test_gcd(self): + v1, v2, v3, v4 = self.Integers(6, 10, 17, -2) + self.failUnless(isinstance(v1.gcd(v2), self.Integer)) + self.assertEqual(v1.gcd(v2), 2) + self.assertEqual(v1.gcd(10), 2) + self.assertEqual(v1.gcd(v3), 1) + self.assertEqual(v1.gcd(-2), 2) + self.assertEqual(v4.gcd(6), 2) + + def test_lcm(self): + v1, v2, v3, v4, v5 = self.Integers(6, 10, 17, -2, 0) + self.failUnless(isinstance(v1.lcm(v2), self.Integer)) + self.assertEqual(v1.lcm(v2), 30) + self.assertEqual(v1.lcm(10), 30) + self.assertEqual(v1.lcm(v3), 102) + self.assertEqual(v1.lcm(-2), 6) + self.assertEqual(v4.lcm(6), 6) + self.assertEqual(v1.lcm(0), 0) + self.assertEqual(v5.lcm(0), 0) + + def test_jacobi_symbol(self): + + data = ( + (1001, 1, 1), + (19, 45, 1), + (8, 21, -1), + (5, 21, 1), + (610, 987, -1), + (1001, 9907, -1), + (5, 3439601197, -1) + ) + + js = self.Integer.jacobi_symbol + + # Jacobi symbol is always 1 for k==1 or n==1 + for k in range(1, 30): + self.assertEqual(js(k, 1), 1) + for n in range(1, 30, 2): + self.assertEqual(js(1, n), 1) + + # Fail if n is not positive odd + self.assertRaises(ValueError, js, 6, -2) + self.assertRaises(ValueError, js, 6, -1) + self.assertRaises(ValueError, js, 6, 0) + self.assertRaises(ValueError, js, 0, 0) + self.assertRaises(ValueError, js, 6, 2) + self.assertRaises(ValueError, js, 6, 4) + self.assertRaises(ValueError, js, 6, 6) + self.assertRaises(ValueError, js, 6, 8) + + for tv in data: + self.assertEqual(js(tv[0], tv[1]), tv[2]) + self.assertEqual(js(self.Integer(tv[0]), tv[1]), tv[2]) + self.assertEqual(js(tv[0], self.Integer(tv[1])), tv[2]) + + def test_jacobi_symbol_wikipedia(self): + + # Test vectors from https://en.wikipedia.org/wiki/Jacobi_symbol + tv = [ + (3, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), + (5, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 0), (6, 1), (7, -1), (8, -1), (9, 1), (10, 0), (11, 1), (12, -1), (13, -1), (14, 1), (15, 0), (16, 1), (17, -1), (18, -1), (19, 1), (20, 0), (21, 1), (22, -1), (23, -1), (24, 1), (25, 0), (26, 1), (27, -1), (28, -1), (29, 1), (30, 0)]), + (7, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, 0), (8, 1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, 0), (15, 1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, 0), (22, 1), (23, 1), (24, -1), (25, 1), (26, -1), (27, -1), (28, 0), (29, 1), (30, 1)]), + (9, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 1), (8, 1), (9, 0), (10, 1), (11, 1), (12, 0), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 1), (21, 0), (22, 1), (23, 1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 1), (29, 1), (30, 0)]), + (11, [(1, 1), (2, -1), (3, 1), (4, 1), (5, 1), (6, -1), (7, -1), (8, -1), (9, 1), (10, -1), (11, 0), (12, 1), (13, -1), (14, 1), (15, 1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 0), (23, 1), (24, -1), (25, 1), (26, 1), (27, 1), (28, -1), (29, -1), (30, -1)]), + (13, [(1, 1), (2, -1), (3, 1), (4, 1), (5, -1), (6, -1), (7, -1), (8, -1), (9, 1), (10, 1), (11, -1), (12, 1), (13, 0), (14, 1), (15, -1), (16, 1), (17, 1), (18, -1), (19, -1), (20, -1), (21, -1), (22, 1), (23, 1), (24, -1), (25, 1), (26, 0), (27, 1), (28, -1), (29, 1), (30, 1)]), + (15, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 0), (6, 0), (7, -1), (8, 1), (9, 0), (10, 0), (11, -1), (12, 0), (13, -1), (14, -1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 0), (21, 0), (22, -1), (23, 1), (24, 0), (25, 0), (26, -1), (27, 0), (28, -1), (29, -1), (30, 0)]), + (17, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, 1), (16, 1), (17, 0), (18, 1), (19, 1), (20, -1), (21, 1), (22, -1), (23, -1), (24, -1), (25, 1), (26, 1), (27, -1), (28, -1), (29, -1), (30, 1)]), + (19, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, -1), (15, -1), (16, 1), (17, 1), (18, -1), (19, 0), (20, 1), (21, -1), (22, -1), (23, 1), (24, 1), (25, 1), (26, 1), (27, -1), (28, 1), (29, -1), (30, 1)]), + (21, [(1, 1), (2, -1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 0), (8, -1), (9, 0), (10, -1), (11, -1), (12, 0), (13, -1), (14, 0), (15, 0), (16, 1), (17, 1), (18, 0), (19, -1), (20, 1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 0), (29, -1), (30, 0)]), + (23, [(1, 1), (2, 1), (3, 1), (4, 1), (5, -1), (6, 1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, 1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, -1), (22, -1), (23, 0), (24, 1), (25, 1), (26, 1), (27, 1), (28, -1), (29, 1), (30, -1)]), + (25, [(1, 1), (2, 1), (3, 1), (4, 1), (5, 0), (6, 1), (7, 1), (8, 1), (9, 1), (10, 0), (11, 1), (12, 1), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 1), (19, 1), (20, 0), (21, 1), (22, 1), (23, 1), (24, 1), (25, 0), (26, 1), (27, 1), (28, 1), (29, 1), (30, 0)]), + (27, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), + (29, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 1), (23, 1), (24, 1), (25, 1), (26, -1), (27, -1), (28, 1), (29, 0), (30, 1)]), + ] + + js = self.Integer.jacobi_symbol + + for n, kj in tv: + for k, j in kj: + self.assertEqual(js(k, n), j) + + def test_hex(self): + v1, = self.Integers(0x10) + self.assertEqual(hex(v1), "0x10") + + +class TestIntegerInt(TestIntegerBase): + + def setUp(self): + self.Integer = IntegerNative + + +class testIntegerRandom(unittest.TestCase): + + def test_random_exact_bits(self): + + for _ in range(1000): + a = IntegerNative.random(exact_bits=8) + self.failIf(a < 128) + self.failIf(a >= 256) + + for bits_value in range(1024, 1024 + 8): + a = IntegerNative.random(exact_bits=bits_value) + self.failIf(a < 2**(bits_value - 1)) + self.failIf(a >= 2**bits_value) + + def test_random_max_bits(self): + + flag = False + for _ in range(1000): + a = IntegerNative.random(max_bits=8) + flag = flag or a < 128 + self.failIf(a>=256) + self.failUnless(flag) + + for bits_value in range(1024, 1024 + 8): + a = IntegerNative.random(max_bits=bits_value) + self.failIf(a >= 2**bits_value) + + def test_random_bits_custom_rng(self): + + class CustomRNG(object): + def __init__(self): + self.counter = 0 + + def __call__(self, size): + self.counter += size + return bchr(0) * size + + custom_rng = CustomRNG() + a = IntegerNative.random(exact_bits=32, randfunc=custom_rng) + self.assertEqual(custom_rng.counter, 4) + + def test_random_range(self): + + func = IntegerNative.random_range + + for x in range(200): + a = func(min_inclusive=1, max_inclusive=15) + self.failUnless(1 <= a <= 15) + + for x in range(200): + a = func(min_inclusive=1, max_exclusive=15) + self.failUnless(1 <= a < 15) + + self.assertRaises(ValueError, func, min_inclusive=1, max_inclusive=2, + max_exclusive=3) + self.assertRaises(ValueError, func, max_inclusive=2, max_exclusive=3) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestIntegerInt) + + try: + from Crypto.Math._IntegerGMP import IntegerGMP + + class TestIntegerGMP(TestIntegerBase): + def setUp(self): + self.Integer = IntegerGMP + + tests += list_test_cases(TestIntegerGMP) + except (ImportError, OSError) as e: + if sys.platform == "win32": + sys.stdout.write("Skipping GMP tests on Windows\n") + else: + sys.stdout.write("Skipping GMP tests (%s)\n" % str(e) ) + + try: + from Crypto.Math._IntegerCustom import IntegerCustom + + class TestIntegerCustomModexp(TestIntegerBase): + def setUp(self): + self.Integer = IntegerCustom + + tests += list_test_cases(TestIntegerCustomModexp) + except (ImportError, OSError) as e: + sys.stdout.write("Skipping custom modexp tests (%s)\n" % str(e) ) + + tests += list_test_cases(testIntegerRandom) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Math/test_Primality.py b/env/Lib/site-packages/Crypto/SelfTest/Math/test_Primality.py new file mode 100644 index 0000000..38344f3 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Math/test_Primality.py @@ -0,0 +1,118 @@ +# +# SelfTest/Math/test_Primality.py: Self-test for Primality module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math.Numbers""" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.py3compat import * + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import ( + PROBABLY_PRIME, COMPOSITE, + miller_rabin_test, lucas_test, + test_probable_prime, + generate_probable_prime, + generate_probable_safe_prime, + ) + + +class TestPrimality(unittest.TestCase): + + primes = (1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 2**127-1, 175637383534939453397801320455508570374088202376942372758907369518414308188137781042871856139027160010343454418881888953150175357127346872102307696660678617989191485418582475696230580407111841072614783095326672517315988762029036079794994990250662362650625650262324085116467511357592728695033227611029693067539) + composites = (0, 4, 6, 8, 9, 10, 12, 14, 15, 16, 18, 20, 21, 7*23, (2**19-1)*(2**67-1), 9746347772161,) + + def test_miller_rabin(self): + for prime in self.primes: + self.assertEqual(miller_rabin_test(prime, 3), PROBABLY_PRIME) + for composite in self.composites: + self.assertEqual(miller_rabin_test(composite, 3), COMPOSITE) + self.assertRaises(ValueError, miller_rabin_test, -1, 3) + + def test_lucas(self): + for prime in self.primes: + res = lucas_test(prime) + self.assertEqual(res, PROBABLY_PRIME) + for composite in self.composites: + res = lucas_test(composite) + self.assertEqual(res, COMPOSITE) + self.assertRaises(ValueError, lucas_test, -1) + + def test_is_prime(self): + primes = (170141183460469231731687303715884105727, + 19175002942688032928599, + 1363005552434666078217421284621279933627102780881053358473, + 2 ** 521 - 1) + for p in primes: + self.assertEqual(test_probable_prime(p), PROBABLY_PRIME) + + not_primes = ( + 4754868377601046732119933839981363081972014948522510826417784001, + 1334733877147062382486934807105197899496002201113849920496510541601, + 260849323075371835669784094383812120359260783810157225730623388382401, + ) + for np in not_primes: + self.assertEqual(test_probable_prime(np), COMPOSITE) + + from Crypto.Util.number import sieve_base + for p in sieve_base[:100]: + res = test_probable_prime(p) + self.assertEqual(res, PROBABLY_PRIME) + + def test_generate_prime_bit_size(self): + p = generate_probable_prime(exact_bits=512) + self.assertEqual(p.size_in_bits(), 512) + + def test_generate_prime_filter(self): + def ending_with_one(number): + return number % 10 == 1 + + for x in range(20): + q = generate_probable_prime(exact_bits=160, + prime_filter=ending_with_one) + self.assertEqual(q % 10, 1) + + def test_generate_safe_prime(self): + p = generate_probable_safe_prime(exact_bits=161) + self.assertEqual(p.size_in_bits(), 161) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestPrimality) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Math/test_modexp.py b/env/Lib/site-packages/Crypto/SelfTest/Math/test_modexp.py new file mode 100644 index 0000000..b9eb869 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Math/test_modexp.py @@ -0,0 +1,201 @@ +# +# SelfTest/Math/test_modexp.py: Self-test for module exponentiation +# +# =================================================================== +# +# Copyright (c) 2017, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for the custom module exponentiation""" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, + c_size_t, + c_ulonglong) + +from Crypto.Hash import SHAKE128 +from Crypto.Math.Numbers import Integer +from Crypto.Math._IntegerCustom import _raw_montgomery + +from Crypto.Random.random import StrongRandom + + +def create_rng(tag): + rng = StrongRandom(SHAKE128.new(data=tag)) + return rng + +class ExceptionModulus(ValueError): + pass + +def monty_pow(base, exp, modulus): + max_len = len(long_to_bytes(max(base, exp, modulus))) + + base_b, exp_b, modulus_b = [ long_to_bytes(x, max_len) for x in + (base, exp, modulus) ] + + out = create_string_buffer(max_len) + error = _raw_montgomery.monty_pow( + out, + base_b, + exp_b, + modulus_b, + c_size_t(max_len), + c_ulonglong(32) + ) + + if error == 17: + raise ExceptionModulus() + if error: + raise ValueError("monty_pow failed with error: %d" % error) + + result = bytes_to_long(get_raw_buffer(out)) + return result + +exponent1 = 0x2ce0af628901460a419a08ef950d498b9fd6f271a1a52ac293b86fe5c60efe8e8ba93fa1ebe1eb3d614d2e7b328cb60a2591440e163441a190ecf101ceec245f600fffdcf3f5b3a17a7baeacb96a424db1d7ec985e8ec998bb479fecfffed6a75f9a90fc97062fd973303bce855ad7b8d8272a94025e8532be9aabd54a183f303538d2a7e621b4131d59e823a4625f39bd7d518d7784f7c3a8f19061da74974ff42fa1c063dec2db97d461e291a7d6e721708a5229de166c1246363372854e27f3f08ae274bc16bfd205b028a4d81386494433d516dfbb35f495acba5e4e1d1843cb3c3129b6642a85fc7244ce5845fac071c7f622e4ee12ac43fabeeaa0cd01 +modulus1 = 0xd66691b20071be4d66d4b71032b37fa007cfabf579fcb91e50bfc2753b3f0ce7be74e216aef7e26d4ae180bc20d7bd3ea88a6cbf6f87380e613c8979b5b043b200a8ff8856a3b12875e36e98a7569f3852d028e967551000b02c19e9fa52e83115b89309aabb1e1cf1e2cb6369d637d46775ce4523ea31f64ad2794cbc365dd8a35e007ed3b57695877fbf102dbeb8b3212491398e494314e93726926e1383f8abb5889bea954eb8c0ca1c62c8e9d83f41888095c5e645ed6d32515fe0c58c1368cad84694e18da43668c6f43e61d7c9bca633ddcda7aef5b79bc396d4a9f48e2a9abe0836cc455e435305357228e93d25aaed46b952defae0f57339bf26f5a9 + + +class TestModExp(unittest.TestCase): + + def test_small(self): + self.assertEqual(1, monty_pow(11,12,19)) + + def test_large_1(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff + expected = pow(base, exponent1, modulus1) + result = monty_pow(base, exponent1, modulus1) + self.assertEqual(result, expected) + + def test_zero_exp(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff + result = monty_pow(base, 0, modulus1) + self.assertEqual(result, 1) + + def test_zero_base(self): + result = monty_pow(0, exponent1, modulus1) + self.assertEqual(result, 0) + + def test_zero_modulus(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffff + self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, 0) + self.assertRaises(ExceptionModulus, monty_pow, 0, 0, 0) + + def test_larger_exponent(self): + base = modulus1 - 0xFFFFFFF + expected = pow(base, modulus1<<64, modulus1) + result = monty_pow(base, modulus1<<64, modulus1) + self.assertEqual(result, expected) + + def test_even_modulus(self): + base = modulus1 >> 4 + self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, modulus1-1) + + def test_several_lengths(self): + prng = SHAKE128.new().update(b('Test')) + for length in range(1, 100): + modulus2 = Integer.from_bytes(prng.read(length)) | 1 + base = Integer.from_bytes(prng.read(length)) % modulus2 + exponent2 = Integer.from_bytes(prng.read(length)) + + expected = pow(base, exponent2, modulus2) + result = monty_pow(base, exponent2, modulus2) + self.assertEqual(result, expected) + + def test_variable_exponent(self): + prng = create_rng(b('Test variable exponent')) + for i in range(20): + for j in range(7): + modulus = prng.getrandbits(8*30) | 1 + base = prng.getrandbits(8*30) % modulus + exponent = prng.getrandbits(i*8+j) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + exponent ^= (1 << (i*8+j)) - 1 + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_63(self): + prng = create_rng(b('Test 63')) + length = 63 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_64(self): + prng = create_rng(b('Test 64')) + length = 64 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_65(self): + prng = create_rng(b('Test 65')) + length = 65 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestModExp) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py new file mode 100644 index 0000000..1c1c095 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/__init__.py: Self-tests for Crypto.Protocol +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Protocol""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_KDF; tests += test_KDF.get_tests(config=config) + + from Crypto.SelfTest.Protocol import test_SecretSharing; + tests += test_SecretSharing.get_tests(config=config) + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py new file mode 100644 index 0000000..b2869f8 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py @@ -0,0 +1,732 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import b, bchr + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Hash import SHA1, HMAC, SHA256, MD5, SHA224, SHA384, SHA512 +from Crypto.Cipher import AES, DES3 + +from Crypto.Protocol.KDF import (PBKDF1, PBKDF2, _S2V, HKDF, scrypt, + bcrypt, bcrypt_check) + +from Crypto.Protocol.KDF import _bcrypt_decode + + +def t2b(t): + if t is None: + return None + t2 = t.replace(" ", "").replace("\n", "") + return unhexlify(b(t2)) + + +class TestVector(object): + pass + + +class PBKDF1_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (8 bytes encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password", "78578E5A5D63CB06", 16, 1000, "DC19847E05C64D2FAF10EBFB4A3D2A20"), + ) + + def test1(self): + v = self._testData[0] + res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) + self.assertEqual(res, t2b(v[4])) + + +class PBKDF2_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: hash module + # Item #5: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",24,2048, SHA1, "BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), + # From RFC 6050 + ("password","73616c74", 20, 1, SHA1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), + ("password","73616c74", 20, 2, SHA1, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + ("password","73616c74", 20, 4096, SHA1, "4b007901b765489abead49d926f721d065a429c1"), + ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", + 25, 4096, SHA1, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + ( 'pass\x00word',"7361006c74",16,4096, SHA1, "56fa6aa75548099dcc37d7f03425e0c3"), + # From draft-josefsson-scrypt-kdf-01, Chapter 10 + ( 'passwd', '73616c74', 64, 1, SHA256, "55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783"), + ( 'Password', '4e61436c', 64, 80000, SHA256, "4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d"), + ) + + def test1(self): + # Test only for HMAC-SHA1 as PRF + + def prf_SHA1(p,s): + return HMAC.new(p,s,SHA1).digest() + + def prf_SHA256(p,s): + return HMAC.new(p,s,SHA256).digest() + + for i in range(len(self._testData)): + v = self._testData[i] + password = v[0] + salt = t2b(v[1]) + out_len = v[2] + iters = v[3] + hash_mod = v[4] + expected = t2b(v[5]) + + if hash_mod is SHA1: + res = PBKDF2(password, salt, out_len, iters) + self.assertEqual(res, expected) + + res = PBKDF2(password, salt, out_len, iters, prf_SHA1) + self.assertEqual(res, expected) + else: + res = PBKDF2(password, salt, out_len, iters, prf_SHA256) + self.assertEqual(res, expected) + + def test2(self): + # Verify that prf and hmac_hash_module are mutual exclusive + def prf_SHA1(p,s): + return HMAC.new(p,s,SHA1).digest() + + self.assertRaises(ValueError, PBKDF2, b("xxx"), b("yyy"), 16, 100, + prf=prf_SHA1, hmac_hash_module=SHA1) + + def test3(self): + # Verify that hmac_hash_module works like prf + + password = b("xxx") + salt = b("yyy") + + for hashmod in (MD5, SHA1, SHA224, SHA256, SHA384, SHA512): + + pr1 = PBKDF2(password, salt, 16, 100, + prf=lambda p, s: HMAC.new(p,s,hashmod).digest()) + pr2 = PBKDF2(password, salt, 16, 100, hmac_hash_module=hashmod) + + self.assertEqual(pr1, pr2) + + def test4(self): + # Verify that PBKDF2 can take bytes or strings as password or salt + k1 = PBKDF2("xxx", b("yyy"), 16, 10) + k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) + self.assertEqual(k1, k2) + + k1 = PBKDF2(b("xxx"), "yyy", 16, 10) + k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) + self.assertEqual(k1, k2) + + +class S2V_Tests(unittest.TestCase): + + # Sequence of test vectors. + # Each test vector is made up by: + # Item #0: a tuple of strings + # Item #1: an AES key + # Item #2: the result + # Item #3: the cipher module S2V is based on + # Everything is hex encoded + _testData = [ + + # RFC5297, A.1 + ( + ( '101112131415161718191a1b1c1d1e1f2021222324252627', + '112233445566778899aabbccddee' ), + 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0', + '85632d07c6e8f37f950acd320a2ecc93', + AES + ), + + # RFC5297, A.2 + ( + ( '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddcc'+ + 'bbaa99887766554433221100', + '102030405060708090a0', + '09f911029d74e35bd84156c5635688c0', + '7468697320697320736f6d6520706c61'+ + '696e7465787420746f20656e63727970'+ + '74207573696e67205349562d414553'), + '7f7e7d7c7b7a79787776757473727170', + '7bdb6e3b432667eb06f4d14bff2fbd0f', + AES + ), + + ] + + def test1(self): + """Verify correctness of test vector""" + for tv in self._testData: + s2v = _S2V.new(t2b(tv[1]), tv[3]) + for s in tv[0]: + s2v.update(t2b(s)) + result = s2v.derive() + self.assertEqual(result, t2b(tv[2])) + + def test2(self): + """Verify that no more than 127(AES) and 63(TDES) + components are accepted.""" + key = bchr(0) * 8 + bchr(255) * 8 + for module in (AES, DES3): + s2v = _S2V.new(key, module) + max_comps = module.block_size*8-1 + for i in range(max_comps): + s2v.update(b("XX")) + self.assertRaises(TypeError, s2v.update, b("YY")) + + +class HKDF_Tests(unittest.TestCase): + + # Test vectors from RFC5869, Appendix A + # Each tuple is made up by: + # Item #0: hash module + # Item #1: secret + # Item #2: salt + # Item #3: context + # Item #4: expected result + _test_vector = ( + ( + SHA256, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + "000102030405060708090a0b0c", + "f0f1f2f3f4f5f6f7f8f9", + 42, + "3cb25f25faacd57a90434f64d0362f2a" + + "2d2d0a90cf1a5a4c5db02d56ecc4c5bf" + + "34007208d5b887185865" + ), + ( + SHA256, + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f", + "606162636465666768696a6b6c6d6e6f" + + "707172737475767778797a7b7c7d7e7f" + + "808182838485868788898a8b8c8d8e8f" + + "909192939495969798999a9b9c9d9e9f" + + "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", + "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + + "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + + "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + + "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + + "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", + 82, + "b11e398dc80327a1c8e7f78c596a4934" + + "4f012eda2d4efad8a050cc4c19afa97c" + + "59045a99cac7827271cb41c65e590e09" + + "da3275600c2f09b8367793a9aca3db71" + + "cc30c58179ec3e87c14c01d5c1f3434f" + + "1d87" + ), + ( + SHA256, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + None, + None, + 42, + "8da4e775a563c18f715f802a063c5a31" + + "b8a11f5c5ee1879ec3454e5f3c738d2d" + + "9d201395faa4b61a96c8" + ), + ( + SHA1, + "0b0b0b0b0b0b0b0b0b0b0b", + "000102030405060708090a0b0c", + "f0f1f2f3f4f5f6f7f8f9", + 42, + "085a01ea1b10f36933068b56efa5ad81" + + "a4f14b822f5b091568a9cdd4f155fda2" + + "c22e422478d305f3f896" + ), + ( + SHA1, + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f", + "606162636465666768696a6b6c6d6e6f" + + "707172737475767778797a7b7c7d7e7f" + + "808182838485868788898a8b8c8d8e8f" + + "909192939495969798999a9b9c9d9e9f" + + "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", + "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + + "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + + "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + + "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + + "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", + 82, + "0bd770a74d1160f7c9f12cd5912a06eb" + + "ff6adcae899d92191fe4305673ba2ffe" + + "8fa3f1a4e5ad79f3f334b3b202b2173c" + + "486ea37ce3d397ed034c7f9dfeb15c5e" + + "927336d0441f4c4300e2cff0d0900b52" + + "d3b4" + ), + ( + SHA1, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + "", + "", + 42, + "0ac1af7002b3d761d1e55298da9d0506" + + "b9ae52057220a306e07b6b87e8df21d0" + + "ea00033de03984d34918" + ), + ( + SHA1, + "0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c", + None, + "", + 42, + "2c91117204d745f3500d636a62f64f0a" + + "b3bae548aa53d423b0d1f27ebba6f5e5" + + "673a081d70cce7acfc48" + ) + ) + + def test1(self): + for tv in self._test_vector: + secret, salt, info, exp = [ t2b(tv[x]) for x in (1,2,3,5) ] + key_len, hashmod = [ tv[x] for x in (4,0) ] + + output = HKDF(secret, key_len, salt, hashmod, 1, info) + self.assertEqual(output, exp) + + def test2(self): + ref = HKDF(b("XXXXXX"), 12, b("YYYY"), SHA1) + + # Same output, but this time split over 2 keys + key1, key2 = HKDF(b("XXXXXX"), 6, b("YYYY"), SHA1, 2) + self.assertEqual((ref[:6], ref[6:]), (key1, key2)) + + # Same output, but this time split over 3 keys + key1, key2, key3 = HKDF(b("XXXXXX"), 4, b("YYYY"), SHA1, 3) + self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) + + +class scrypt_Tests(unittest.TestCase): + + # Test vectors taken from + # https://tools.ietf.org/html/rfc7914 + # - password + # - salt + # - N + # - r + # - p + data = ( + ( + "", + "", + 16, # 2K + 1, + 1, + """ + 77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97 + f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42 + fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17 + e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06 + """ + ), + ( + "password", + "NaCl", + 1024, # 1M + 8, + 16, + """ + fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe + 7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62 + 2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da + c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40 + """ + ), + ( + "pleaseletmein", + "SodiumChloride", + 16384, # 16M + 8, + 1, + """ + 70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb + fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2 + d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9 + e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87 + """ + ), + ( + "pleaseletmein", + "SodiumChloride", + 1048576, # 1G + 8, + 1, + """ + 21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81 + ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47 + 8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3 + 37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4 + """ + ), + ) + + def setUp(self): + new_test_vectors = [] + for tv in self.data: + new_tv = TestVector() + new_tv.P = b(tv[0]) + new_tv.S = b(tv[1]) + new_tv.N = tv[2] + new_tv.r = tv[3] + new_tv.p = tv[4] + new_tv.output = t2b(tv[5]) + new_tv.dkLen = len(new_tv.output) + new_test_vectors.append(new_tv) + self.data = new_test_vectors + + def test2(self): + + for tv in self.data: + try: + output = scrypt(tv.P, tv.S, tv.dkLen, tv.N, tv.r, tv.p) + except ValueError as e: + if " 2 " in str(e) and tv.N >= 1048576: + import warnings + warnings.warn("Not enough memory to unit test scrypt() with N=1048576", RuntimeWarning) + continue + else: + raise e + self.assertEqual(output, tv.output) + + def test3(self): + ref = scrypt(b("password"), b("salt"), 12, 16, 1, 1) + + # Same output, but this time split over 2 keys + key1, key2 = scrypt(b("password"), b("salt"), 6, 16, 1, 1, 2) + self.assertEqual((ref[:6], ref[6:]), (key1, key2)) + + # Same output, but this time split over 3 keys + key1, key2, key3 = scrypt(b("password"), b("salt"), 4, 16, 1, 1, 3) + self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) + + +class bcrypt_Tests(unittest.TestCase): + + def test_negative_cases(self): + self.assertRaises(ValueError, bcrypt, b"1" * 73, 10) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 3) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 32) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"") + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1") + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1" * 17) + self.assertRaises(ValueError, bcrypt, b"1\x00" * 10, 4) + + def test_bytearray_mismatch(self): + ref = bcrypt("pwd", 4) + bcrypt_check("pwd", ref) + bref = bytearray(ref) + bcrypt_check("pwd", bref) + + wrong = ref[:-1] + bchr(bref[-1] ^ 0x01) + self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) + + wrong = b"x" + ref[1:] + self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) + + # https://github.com/patrickfav/bcrypt/wiki/Published-Test-Vectors + + def test_empty_password(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"", 4, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$04$zVHmKQtGGQob.b/Nc7l9NO8UlrYcW05FiuCj/SxsFO/ZtiN9.mNzy"), + (b"", 5, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$05$zVHmKQtGGQob.b/Nc7l9NOWES.1hkVBgy5IWImh9DOjKNU8atY4Iy"), + (b"", 6, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$06$zVHmKQtGGQob.b/Nc7l9NOjOl7l4oz3WSh5fJ6414Uw8IXRAUoiaO"), + (b"", 7, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$07$zVHmKQtGGQob.b/Nc7l9NOBsj1dQpBA1HYNGpIETIByoNX9jc.hOi"), + (b"", 8, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$08$zVHmKQtGGQob.b/Nc7l9NOiLTUh/9MDpX86/DLyEzyiFjqjBFePgO"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_random_password_and_salt_short_pw(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"<.S.2K(Zq'", 4, b"VYAclAMpaXY/oqAo9yUpku", b"$2a$04$VYAclAMpaXY/oqAo9yUpkuWmoYywaPzyhu56HxXpVltnBIfmO9tgu"), + (b"5.rApO%5jA", 5, b"kVNDrnYKvbNr5AIcxNzeIu", b"$2a$05$kVNDrnYKvbNr5AIcxNzeIuRcyIF5cZk6UrwHGxENbxP5dVv.WQM/G"), + (b"oW++kSrQW^", 6, b"QLKkRMH9Am6irtPeSKN5sO", b"$2a$06$QLKkRMH9Am6irtPeSKN5sObJGr3j47cO6Pdf5JZ0AsJXuze0IbsNm"), + (b"ggJ\\KbTnDG", 7, b"4H896R09bzjhapgCPS/LYu", b"$2a$07$4H896R09bzjhapgCPS/LYuMzAQluVgR5iu/ALF8L8Aln6lzzYXwbq"), + (b"49b0:;VkH/", 8, b"hfvO2retKrSrx5f2RXikWe", b"$2a$08$hfvO2retKrSrx5f2RXikWeFWdtSesPlbj08t/uXxCeZoHRWDz/xFe"), + (b">9N^5jc##'", 9, b"XZLvl7rMB3EvM0c1.JHivu", b"$2a$09$XZLvl7rMB3EvM0c1.JHivuIDPJWeNJPTVrpjZIEVRYYB/mF6cYgJK"), + (b"\\$ch)s4WXp", 10, b"aIjpMOLK5qiS9zjhcHR5TO", b"$2a$10$aIjpMOLK5qiS9zjhcHR5TOU7v2NFDmcsBmSFDt5EHOgp/jeTF3O/q"), + (b"RYoj\\_>2P7", 12, b"esIAHiQAJNNBrsr5V13l7.", b"$2a$12$esIAHiQAJNNBrsr5V13l7.RFWWJI2BZFtQlkFyiWXjou05GyuREZa"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_random_password_and_salt_long_pw(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"^Q&\"]A`%/A(BVGt>QaX0M-#1ghq_+\":Y0CRmY", 5, b"YuQvhokOGVnevctykUYpKu", b"$2a$05$YuQvhokOGVnevctykUYpKutZD2pWeGGYn3auyLOasguMY3/0BbIyq"), + (b"F%uN/j>[GuB7-jB'_Yj!Tnb7Y!u^6)", 6, b"5L3vpQ0tG9O7k5gQ8nAHAe", b"$2a$06$5L3vpQ0tG9O7k5gQ8nAHAe9xxQiOcOLh8LGcI0PLWhIznsDt.S.C6"), + (b"Z>BobP32ub\"Cfe*Q<-q-=tRSjOBh8\\mLNW.", 9, b"nArqOfdCsD9kIbVnAixnwe", b"$2a$09$nArqOfdCsD9kIbVnAixnwe6s8QvyPYWtQBpEXKir2OJF9/oNBsEFe"), + (b"/MH51`!BP&0tj3%YCA;Xk%e3S`o\\EI", 10, b"ePiAc.s.yoBi3B6p1iQUCe", b"$2a$10$ePiAc.s.yoBi3B6p1iQUCezn3mraLwpVJ5XGelVyYFKyp5FZn/y.u"), + (b"ptAP\"mcg6oH.\";c0U2_oll.OKi5?Ui\"^ai#iQH7ZFtNMfs3AROnIncE9\"BNNoEgO[[*Yk8;RQ(#S,;I+aT", + 5, b"wgkOlGNXIVE2fWkT3gyRoO", b"$2a$05$wgkOlGNXIVE2fWkT3gyRoOqWi4gbi1Wv2Q2Jx3xVs3apl1w.Wtj8C"), + (b"M.E1=dt<.L0Q&p;94NfGm_Oo23+Kpl@M5?WIAL.[@/:'S)W96G8N^AWb7_smmC]>7#fGoB", + 6, b"W9zTCl35nEvUukhhFzkKMe", b"$2a$06$W9zTCl35nEvUukhhFzkKMekjT9/pj7M0lihRVEZrX3m8/SBNZRX7i"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_increasing_password_length(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"a", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.l4WvgHIVg17ZawDIrDM2IjlE64GDNQS"), + (b"aa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.AyUxBk.ThHlsLvRTH7IqcG7yVHJ3SXq"), + (b"aaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.BxOVac5xPB6XFdRc/ZrzM9FgZkqmvbW"), + (b"aaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Qbr209bpCtfl5hN7UQlG/L4xiD3AKau"), + (b"aaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oWszihPjDZI0ypReKsaDOW1jBl7oOii"), + (b"aaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ./k.Xxn9YiqtV/sxh3EHbnOHd0Qsq27K"), + (b"aaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.PYJqRFQbgRbIjMd5VNKmdKS4sBVOyDe"), + (b"aaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ..VMYfzaw1wP/SGxowpLeGf13fxCCt.q"), + (b"aaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.5B0p054nO5WgAD1n04XslDY/bqY9RJi"), + (b"aaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.INBTgqm7sdlBJDg.J5mLMSRK25ri04y"), + (b"aaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.s3y7CdFD0OR5p6rsZw/eZ.Dla40KLfm"), + (b"aaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Jx742Djra6Q7PqJWnTAS.85c28g.Siq"), + (b"aaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oKMXW3EZcPHcUV0ib5vDBnh9HojXnLu"), + (b"aaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.w6nIjWpDPNSH5pZUvLjC1q25ONEQpeS"), + (b"aaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.k1b2/r9A/hxdwKEKurg6OCn4MwMdiGq"), + (b"aaaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.3prCNHVX1Ws.7Hm2bJxFUnQOX9f7DFa"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_non_ascii_characters(self): + # password, cost, salt, bcrypt hash + tvs = [ + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 4, b"D3qS2aoTVyqM7z8v8crLm.", b"$2a$04$D3qS2aoTVyqM7z8v8crLm.3nKt4CzBZJbyFB.ZebmfCvRw7BGs.Xm"), + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 5, b"VA1FujiOCMPkUHQ8kF7IaO", b"$2a$05$VA1FujiOCMPkUHQ8kF7IaOg7NGaNvpxwWzSluQutxEVmbZItRTsAa"), + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 6, b"TXiaNrPeBSz5ugiQlehRt.", b"$2a$06$TXiaNrPeBSz5ugiQlehRt.gwpeDQnXWteQL4z2FulouBr6G7D9KUi"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 4, b"YTn1Qlvps8e1odqMn6G5x.", b"$2a$04$YTn1Qlvps8e1odqMn6G5x.85pqKql6w773EZJAExk7/BatYAI4tyO"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 5, b"C.8k5vJKD2NtfrRI9o17DO", b"$2a$05$C.8k5vJKD2NtfrRI9o17DOfIW0XnwItA529vJnh2jzYTb1QdoY0py"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 6, b"xqfRPj3RYAgwurrhcA6uRO", b"$2a$06$xqfRPj3RYAgwurrhcA6uROtGlXDp/U6/gkoDYHwlubtcVcNft5.vW"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 4, b"y8vGgMmr9EdyxP9rmMKjH.", b"$2a$04$y8vGgMmr9EdyxP9rmMKjH.wv2y3r7yRD79gykQtmb3N3zrwjKsyay"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 5, b"iYH4XIKAOOm/xPQs7xKP1u", b"$2a$05$iYH4XIKAOOm/xPQs7xKP1upD0cWyMn3Jf0ZWiizXbEkVpS41K1dcO"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 6, b"wCOob.D0VV8twafNDB2ape", b"$2a$06$wCOob.D0VV8twafNDB2apegiGD5nqF6Y1e6K95q6Y.R8C4QGd265q"), + ("ΔημοσιεύθηκεστηνΕφημερίδατης", 4, b"E5SQtS6P4568MDXW7cyUp.", b"$2a$04$E5SQtS6P4568MDXW7cyUp.18wfDisKZBxifnPZjAI1d/KTYMfHPYO"), + ("АБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмН", 4, b"03e26gQFHhQwRNf81/ww9.", b"$2a$04$03e26gQFHhQwRNf81/ww9.p1UbrNwxpzWjLuT.zpTLH4t/w5WhAhC"), + ("нОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮю", 4, b"PHNoJwpXCfe32nUtLv2Upu", b"$2a$04$PHNoJwpXCfe32nUtLv2UpuhJXOzd4k7IdFwnEpYwfJVCZ/f/.8Pje"), + ("電电電島岛島兔兔兎龜龟亀國国国區区区", 4, b"wU4/0i1TmNl2u.1jIwBX.u", b"$2a$04$wU4/0i1TmNl2u.1jIwBX.uZUaOL3Rc5ID7nlQRloQh6q5wwhV/zLW"), + ("诶比伊艾弗豆贝尔维吾艾尺开艾丝维贼德", 4, b"P4kreGLhCd26d4WIy7DJXu", b"$2a$04$P4kreGLhCd26d4WIy7DJXusPkhxLvBouzV6OXkL5EB0jux0osjsry"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_special_case_salt(self): + # password, cost, salt, bcrypt hash + tvs = [ + ("-O_=*N!2JP", 4, b"......................", b"$2a$04$......................JjuKLOX9OOwo5PceZZXSkaLDvdmgb82"), + ("7B[$Q<4b>U", 5, b"......................", b"$2a$05$......................DRiedDQZRL3xq5A5FL8y7/6NM8a2Y5W"), + (">d5-I_8^.h", 6, b"......................", b"$2a$06$......................5Mq1Ng8jgDY.uHNU4h5p/x6BedzNH2W"), + (")V`/UM/]1t", 4, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$04$.OC/.OC/.OC/.OC/.OC/.OQIvKRDAam.Hm5/IaV/.hc7P8gwwIbmi"), + (":@t2.bWuH]", 5, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$05$.OC/.OC/.OC/.OC/.OC/.ONDbUvdOchUiKmQORX6BlkPofa/QxW9e"), + ("b(#KljF5s\"", 6, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$06$.OC/.OC/.OC/.OC/.OC/.OHfTd9e7svOu34vi1PCvOcAEq07ST7.K"), + ("@3YaJ^Xs]*", 4, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$04$eGA.eGA.eGA.eGA.eGA.e.stcmvh.R70m.0jbfSFVxlONdj1iws0C"), + ("'\"5\\!k*C(p", 5, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$05$eGA.eGA.eGA.eGA.eGA.e.vR37mVSbfdHwu.F0sNMvgn8oruQRghy"), + ("edEu7C?$'W", 6, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$06$eGA.eGA.eGA.eGA.eGA.e.tSq0FN8MWHQXJXNFnHTPQKtA.n2a..G"), + ("N7dHmg\\PI^", 4, b"999999999999999999999u", b"$2a$04$999999999999999999999uCZfA/pLrlyngNDMq89r1uUk.bQ9icOu"), + ("\"eJuHh!)7*", 5, b"999999999999999999999u", b"$2a$05$999999999999999999999uj8Pfx.ufrJFAoWFLjapYBS5vVEQQ/hK"), + ("ZeDRJ:_tu:", 6, b"999999999999999999999u", b"$2a$06$999999999999999999999u6RB0P9UmbdbQgjoQFEJsrvrKe.BoU6q"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + +class TestVectorsHKDFWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def add_tests(self, filename): + + def filter_algo(root): + algo_name = root['algorithm'] + if algo_name == "HKDF-SHA-1": + return SHA1 + elif algo_name == "HKDF-SHA-256": + return SHA256 + elif algo_name == "HKDF-SHA-384": + return SHA384 + elif algo_name == "HKDF-SHA-512": + return SHA512 + else: + raise ValueError("Unknown algorithm " + algo_name) + + def filter_size(unit): + return int(unit['size']) + + result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), + filename, + "Wycheproof HMAC (%s)" % filename, + root_tag={'hash_module': filter_algo}, + unit_tag={'size': filter_size}) + return result + + def setUp(self): + self.tv = [] + self.add_tests("hkdf_sha1_test.json") + self.add_tests("hkdf_sha256_test.json") + self.add_tests("hkdf_sha384_test.json") + self.add_tests("hkdf_sha512_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof HKDF Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + + try: + key = HKDF(tv.ikm, tv.size, tv.salt, tv.hash_module, 1, tv.info) + except ValueError: + assert not tv.valid + else: + if key != tv.okm: + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + if not config.get('slow_tests'): + PBKDF2_Tests._testData = PBKDF2_Tests._testData[:3] + scrypt_Tests.data = scrypt_Tests.data[:3] + + tests = [] + tests += list_test_cases(PBKDF1_Tests) + tests += list_test_cases(PBKDF2_Tests) + tests += list_test_cases(S2V_Tests) + tests += list_test_cases(HKDF_Tests) + tests += [TestVectorsHKDFWycheproof(wycheproof_warnings)] + tests += list_test_cases(scrypt_Tests) + tests += list_test_cases(bcrypt_Tests) + + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py new file mode 100644 index 0000000..368d3c0 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py @@ -0,0 +1,267 @@ +# +# SelfTest/Protocol/test_secret_sharing.py: Self-test for secret sharing protocols +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from unittest import main, TestCase, TestSuite +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Protocol.SecretSharing import Shamir, _Element, \ + _mult_gf2, _div_gf2 + +class GF2_Tests(TestCase): + + def test_mult_gf2(self): + # Prove mult by zero + x = _mult_gf2(0,0) + self.assertEqual(x, 0) + + # Prove mult by unity + x = _mult_gf2(34, 1) + self.assertEqual(x, 34) + + z = 3 # (x+1) + y = _mult_gf2(z, z) + self.assertEqual(y, 5) # (x+1)^2 = x^2 + 1 + y = _mult_gf2(y, z) + self.assertEqual(y, 15) # (x+1)^3 = x^3 + x^2 + x + 1 + y = _mult_gf2(y, z) + self.assertEqual(y, 17) # (x+1)^4 = x^4 + 1 + + # Prove linearity works + comps = [1, 4, 128, 2**34] + sum_comps = 1+4+128+2**34 + y = 908 + z = _mult_gf2(sum_comps, y) + w = 0 + for x in comps: + w ^= _mult_gf2(x, y) + self.assertEqual(w, z) + + def test_div_gf2(self): + from Crypto.Util.number import size as deg + + x, y = _div_gf2(567, 7) + self.failUnless(deg(y) < deg(7)) + + w = _mult_gf2(x, 7) ^ y + self.assertEqual(567, w) + + x, y = _div_gf2(7, 567) + self.assertEqual(x, 0) + self.assertEqual(y, 7) + +class Element_Tests(TestCase): + + def test1(self): + # Test encondings + e = _Element(256) + self.assertEqual(int(e), 256) + self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x00")) + + e = _Element(bchr(0)*14 + b("\x01\x10")) + self.assertEqual(int(e), 0x110) + self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x10")) + + # Only 16 byte string are a valid encoding + self.assertRaises(ValueError, _Element, bchr(0)) + + def test2(self): + # Test addition + e = _Element(0x10) + f = _Element(0x0A) + self.assertEqual(int(e+f), 0x1A) + + def test3(self): + # Test multiplication + zero = _Element(0) + one = _Element(1) + two = _Element(2) + + x = _Element(6) * zero + self.assertEqual(int(x), 0) + + x = _Element(6) * one + self.assertEqual(int(x), 6) + + x = _Element(2**127) * two + self.assertEqual(int(x), 1 + 2 + 4 + 128) + + def test4(self): + # Test inversion + one = _Element(1) + + x = one.inverse() + self.assertEqual(int(x), 1) + + x = _Element(82323923) + y = x.inverse() + self.assertEqual(int(x * y), 1) + +class Shamir_Tests(TestCase): + + def test1(self): + # Test splitting + shares = Shamir.split(2, 3, bchr(90)*16) + self.assertEqual(len(shares), 3) + for index in range(3): + self.assertEqual(shares[index][0], index+1) + self.assertEqual(len(shares[index][1]), 16) + + def test2(self): + # Test recombine + from itertools import permutations + + test_vectors = ( + (2, "d9fe73909bae28b3757854c0af7ad405", + "1-594ae8964294174d95c33756d2504170", + "2-d897459d29da574eb40e93ec552ffe6e", + "3-5823de9bf0e068b054b5f07a28056b1b", + "4-db2c1f8bff46d748f795da995bd080cb"), + (2, "bf4f902d9a7efafd1f3ffd9291fd5de9", + "1-557bd3b0748064b533469722d1cc7935", + "2-6b2717164783c66d47cd28f2119f14d0", + "3-8113548ba97d58256bb4424251ae300c", + "4-179e9e5a218483ddaeda57539139cf04"), + (3, "ec96aa5c14c9faa699354cf1da74e904", + "1-64579fbf1908d66f7239bf6e2b4e41e1", + "2-6cd9428df8017b52322561e8c672ae3e", + "3-e418776ef5c0579bd9299277374806dd", + "4-ab3f77a0107398d23b323e581bb43f5d", + "5-23fe42431db2b41bd03ecdc7ea8e97ac"), + (3, "44cf249b68b80fcdc27b47be60c2c145", + "1-d6515a3905cd755119b86e311c801e31", + "2-16693d9ac9f10c254036ced5f8917fa3", + "3-84f74338a48476b99bf5e75a84d3a0d1", + "4-3fe8878dc4a5d35811cf3cbcd33dbe52", + "5-ad76f92fa9d0a9c4ca0c1533af7f6132"), + (5, "5398717c982db935d968eebe53a47f5a", + "1-be7be2dd4c068e7ef576aaa1b1c11b01", + "2-f821f5848441cb98b3eb467e2733ee21", + "3-25ee52f53e203f6e29a0297b5ab486b5", + "4-fc9fb58ef74dab947fbf9acd9d5d83cd", + "5-b1949cce46d81552e65f248d3f74cc5c", + "6-d64797f59977c4d4a7956ad916da7699", + "7-ab608a6546a8b9af8820ff832b1135c7"), + (5, "4a78db90fbf35da5545d2fb728e87596", + "1-08daf9a25d8aa184cfbf02b30a0ed6a0", + "2-dda28261e36f0b14168c2cf153fb734e", + "3-e9fdec5505d674a57f9836c417c1ecaa", + "4-4dce5636ae06dee42d2c82e65f06c735", + "5-3963dc118afc2ba798fa1d452b28ef00", + "6-6dfe6ff5b09e94d2f84c382b12f42424", + "7-6faea9d4d4a4e201bf6c90b9000630c3"), + (10, "eccbf6d66d680b49b073c4f1ddf804aa", + "01-7d8ac32fe4ae209ead1f3220fda34466", + "02-f9144e76988aad647d2e61353a6e96d5", + "03-b14c3b80179203363922d60760271c98", + "04-770bb2a8c28f6cee89e00f4d5cc7f861", + "05-6e3d7073ea368334ef67467871c66799", + "06-248792bc74a98ce024477c13c8fb5f8d", + "07-fcea4640d2db820c0604851e293d2487", + "08-2776c36fb714bb1f8525a0be36fc7dba", + "09-6ee7ac8be773e473a4bf75ee5f065762", + "10-33657fc073354cf91d4a68c735aacfc8", + "11-7645c65094a5868bf225c516fdee2d0c", + "12-840485aacb8226631ecd9c70e3018086"), + (10, "377e63bdbb5f7d4dc58a483d035212bb", + "01-32c53260103be431c843b1a633afe3bd", + "02-0107eb16cb8695084d452d2cc50bc7d6", + "03-df1e5c66cd755287fb0446faccd72a06", + "04-361bbcd5d40797f49dfa1898652da197", + "05-160d3ad1512f7dec7fd9344aed318591", + "06-659af6d95df4f25beca4fb9bfee3b7e8", + "07-37f3b208977bad50b3724566b72bfa9d", + "08-6c1de2dfc69c2986142c26a8248eb316", + "09-5e19220837a396bd4bc8cd685ff314c3", + "10-86e7b864fb0f3d628e46d50c1ba92f1c", + "11-065d0082c80b1aea18f4abe0c49df72e", + "12-84a09430c1d20ea9f388f3123c3733a3"), + ) + + def get_share(p): + pos = p.find('-') + return int(p[:pos]), unhexlify(p[pos + 1:]) + + for tv in test_vectors: + k = tv[0] + secret = unhexlify(tv[1]) + max_perms = 10 + for perm, shares_idx in enumerate(permutations(range(2, len(tv)), k)): + if perm > max_perms: + break + shares = [ get_share(tv[x]) for x in shares_idx ] + result = Shamir.combine(shares, True) + self.assertEqual(secret, result) + + def test3(self): + # Loopback split/recombine + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret) + + secret2 = Shamir.combine(shares[:2]) + self.assertEqual(secret, secret2) + + secret3 = Shamir.combine([ shares[0], shares[2] ]) + self.assertEqual(secret, secret3) + + def test4(self): + # Loopback split/recombine (SSSS) + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret, ssss=True) + + secret2 = Shamir.combine(shares[:2], ssss=True) + self.assertEqual(secret, secret2) + + def test5(self): + # Detect duplicate shares + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret) + self.assertRaises(ValueError, Shamir.combine, (shares[0], shares[0])) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(GF2_Tests) + tests += list_test_cases(Element_Tests) + tests += list_test_cases(Shamir_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: TestSuite(get_tests()) + main(defaultTest='suite') + diff --git a/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py new file mode 100644 index 0000000..0878cc5 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py @@ -0,0 +1,62 @@ +# +# Test script for Crypto.Util.RFC1751. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import binascii +import unittest +from Crypto.Util import RFC1751 +from Crypto.Util.py3compat import * + +test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + +class RFC1751Test_k2e (unittest.TestCase): + + def runTest (self): + "Check converting keys to English" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.key_to_english(key), words) + +class RFC1751Test_e2k (unittest.TestCase): + + def runTest (self): + "Check converting English strings to keys" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.english_to_key(words), key) + +# class RFC1751Test + +def get_tests(config={}): + return [RFC1751Test_k2e(), RFC1751Test_e2k()] + +if __name__ == "__main__": + unittest.main() diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py new file mode 100644 index 0000000..d4f51bc --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/__init__.py: Self-test for public key crypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for public-key crypto""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.PublicKey import test_DSA; tests += test_DSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_RSA; tests += test_RSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_ECC; tests += test_ECC.get_tests(config=config) + + from Crypto.SelfTest.PublicKey import test_import_DSA + tests +=test_import_DSA.get_tests(config=config) + + from Crypto.SelfTest.PublicKey import test_import_RSA + tests += test_import_RSA.get_tests(config=config) + + from Crypto.SelfTest.PublicKey import test_import_ECC + tests += test_import_ECC.get_tests(config=config) + + from Crypto.SelfTest.PublicKey import test_ElGamal; tests += test_ElGamal.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py new file mode 100644 index 0000000..0249de9 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.DSA""" + +import os +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +def _sws(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return "".join(s.split()) + else: + return b("").join(s.split()) + +class DSATest(unittest.TestCase): + # Test vector from "Appendix 5. Example of the DSA" of + # "Digital Signature Standard (DSS)", + # U.S. Department of Commerce/National Institute of Standards and Technology + # FIPS 186-2 (+Change Notice), 2000 January 27. + # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf + + y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 + 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 + 858fba33 f44c0669 9630a76b 030ee333""") + + g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb + 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c + c42e9f6f 464b088c c572af53 e6d78802""") + + p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 + cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac + 49693dfb f83724c2 ec0736ee 31c80291""") + + q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") + + x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") + + k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") + k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") + m = b2a_hex(b("abc")) + m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") + r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") + s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") + + def setUp(self): + global DSA, Random, bytes_to_long, size + from Crypto.PublicKey import DSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse, size + + self.dsa = DSA + + def test_generate_1arg(self): + """DSA (default implementation) generated key (1 argument)""" + dsaObj = self.dsa.generate(1024) + self._check_private_key(dsaObj) + pub = dsaObj.public_key() + self._check_public_key(pub) + + def test_generate_2arg(self): + """DSA (default implementation) generated key (2 arguments)""" + dsaObj = self.dsa.generate(1024, Random.new().read) + self._check_private_key(dsaObj) + pub = dsaObj.public_key() + self._check_public_key(pub) + + def test_construct_4tuple(self): + """DSA (default implementation) constructed key (4-tuple)""" + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + self._test_verification(dsaObj) + + def test_construct_5tuple(self): + """DSA (default implementation) constructed key (5-tuple)""" + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + dsaObj = self.dsa.construct((y, g, p, q, x)) + self._test_signing(dsaObj) + self._test_verification(dsaObj) + + def test_construct_bad_key4(self): + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + tup = (y, g, p+1, q) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, g, p, q+1) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, 1, p, q) + self.assertRaises(ValueError, self.dsa.construct, tup) + + def test_construct_bad_key5(self): + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + tup = (y, g, p, q, x+1) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, g, p, q, q+10) + self.assertRaises(ValueError, self.dsa.construct, tup) + + def _check_private_key(self, dsaObj): + # Check capabilities + self.assertEqual(1, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p + self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q + + def _check_public_key(self, dsaObj): + k = bytes_to_long(a2b_hex(self.k)) + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + + # Check capabilities + self.assertEqual(0, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(dsaObj, 'x')) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + + # Public-only key objects should raise an error when .sign() is called + self.assertRaises(TypeError, dsaObj._sign, m_hash, k) + + # Check __eq__ and __ne__ + self.assertEqual(dsaObj.public_key() == dsaObj.public_key(),True) # assert_ + self.assertEqual(dsaObj.public_key() != dsaObj.public_key(),False) # failIf + + self.assertEqual(dsaObj.public_key(), dsaObj.publickey()) + + def _test_signing(self, dsaObj): + k = bytes_to_long(a2b_hex(self.k)) + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + (r_out, s_out) = dsaObj._sign(m_hash, k) + self.assertEqual((r, s), (r_out, s_out)) + + def _test_verification(self, dsaObj): + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + self.failUnless(dsaObj._verify(m_hash, (r, s))) + self.failIf(dsaObj._verify(m_hash + 1, (r, s))) + + def test_repr(self): + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + repr(dsaObj) + + +class DSADomainTest(unittest.TestCase): + + def test_domain1(self): + """Verify we can generate new keys in a given domain""" + dsa_key_1 = DSA.generate(1024) + domain_params = dsa_key_1.domain() + + dsa_key_2 = DSA.generate(1024, domain=domain_params) + self.assertEqual(dsa_key_1.p, dsa_key_2.p) + self.assertEqual(dsa_key_1.q, dsa_key_2.q) + self.assertEqual(dsa_key_1.g, dsa_key_2.g) + + self.assertEqual(dsa_key_1.domain(), dsa_key_2.domain()) + + def _get_weak_domain(self): + + from Crypto.Math.Numbers import Integer + from Crypto.Math import Primality + + p = Integer(4) + while p.size_in_bits() != 1024 or Primality.test_probable_prime(p) != Primality.PROBABLY_PRIME: + q1 = Integer.random(exact_bits=80) + q2 = Integer.random(exact_bits=80) + q = q1 * q2 + z = Integer.random(exact_bits=1024-160) + p = z * q + 1 + + h = Integer(2) + g = 1 + while g == 1: + g = pow(h, z, p) + h += 1 + + return (p, q, g) + + + def test_generate_error_weak_domain(self): + """Verify that domain parameters with composite q are rejected""" + + domain_params = self._get_weak_domain() + self.assertRaises(ValueError, DSA.generate, 1024, domain=domain_params) + + + def test_construct_error_weak_domain(self): + """Verify that domain parameters with composite q are rejected""" + + from Crypto.Math.Numbers import Integer + + p, q, g = self._get_weak_domain() + y = pow(g, 89, p) + self.assertRaises(ValueError, DSA.construct, (y, g, p, q)) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(DSATest) + tests += list_test_cases(DSADomainTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ECC.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ECC.py new file mode 100644 index 0000000..ea07a9d --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ECC.py @@ -0,0 +1,859 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +import time +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors + +from Crypto.PublicKey import ECC +from Crypto.PublicKey.ECC import EccPoint, _curves, EccKey + +from Crypto.Math.Numbers import Integer + +class TestEccPoint(unittest.TestCase): + + def test_mix(self): + + p1 = ECC.generate(curve='P-256').pointQ + p2 = ECC.generate(curve='P-384').pointQ + + try: + p1 + p2 + assert(False) + except ValueError as e: + assert "not on the same curve" in str(e) + + try: + p1 += p2 + assert(False) + except ValueError as e: + assert "not on the same curve" in str(e) + + def test_repr(self): + p1 = ECC.construct(curve='P-256', + d=75467964919405407085864614198393977741148485328036093939970922195112333446269, + point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, + point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389) + self.assertEqual(repr(p1), "EccKey(curve='NIST P-256', point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389, d=75467964919405407085864614198393977741148485328036093939970922195112333446269)") + + +class TestEccPoint_NIST_P256(unittest.TestCase): + """Tests defined in section 4.3 of https://www.nsa.gov/ia/_files/nist-routines.pdf""" + + pointS = EccPoint( + 0xde2444bebc8d36e682edd27e0f271508617519b3221a8fa0b77cab3989da97c9, + 0xc093ae7ff36e5380fc01a5aad1e66659702de80f53cec576b6350b243042a256) + + pointT = EccPoint( + 0x55a8b00f8da1d44e62f6b3b25316212e39540dc861c89575bb8cf92e35e0986b, + 0x5421c3209c2d6c704835d82ac4c3dd90f61a8a52598b9e7ab656e9d8c8b24316) + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x7669e6901606ee3ba1a8eef1e0024c33df6c22f3b17481b82a860ffcdb6127b0 + pointRy = 0xfa878162187a54f6c39f6ee0072f33de389ef3eecd03023de10ca2c1db61d0c7 + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + pointRx = 0x51d08d5f2d4278882946d88d83c97d11e62becc3cfc18bedacc89ba34eeca03f + pointRy = 0x75ee68eb8bf626aa5b673ab51f6e744e06f8fcf8a6c0cf3035beca956a7b41d5 + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + # Reverse order + pointR = d * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pointR = Integer(d) * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + + def test_joing_scalar_multiply(self): + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + e = 0xd37f628ece72a462f0145cbefe3f0b355ee8332d37acdd83a358016aea029db7 + pointRx = 0xd867b4679221009234939221b8046245efcf58413daacbeff857b8588341f6b8 + pointRy = 0xf2504055c03cede12d22720dad69c745106b6607ec7e50dd35d54bd80f615275 + + t = self.pointS * d + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 256) + self.assertEqual(self.pointS.size_in_bytes(), 32) + +class TestEccPoint_NIST_P384(unittest.TestCase): + """Tests defined in section 4.4 of https://www.nsa.gov/ia/_files/nist-routines.pdf""" + + pointS = EccPoint( + 0xfba203b81bbd23f2b3be971cc23997e1ae4d89e69cb6f92385dda82768ada415ebab4167459da98e62b1332d1e73cb0e, + 0x5ffedbaefdeba603e7923e06cdb5d0c65b22301429293376d5c6944e3fa6259f162b4788de6987fd59aed5e4b5285e45, + "p384") + + pointT = EccPoint( + 0xaacc05202e7fda6fc73d82f0a66220527da8117ee8f8330ead7d20ee6f255f582d8bd38c5a7f2b40bcdb68ba13d81051, + 0x84009a263fefba7c2c57cffa5db3634d286131afc0fca8d25afa22a7b5dce0d9470da89233cee178592f49b6fecb5092, + "p384") + + def test_set(self): + pointW = EccPoint(0, 0, "p384") + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x12dc5ce7acdfc5844d939f40b4df012e68f865b89c3213ba97090a247a2fc009075cf471cd2e85c489979b65ee0b5eed + pointRy = 0x167312e58fe0c0afa248f2854e3cddcb557f983b3189b67f21eee01341e7e9fe67f6ee81b36988efa406945c8804a4b0 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def _test_inplace_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x2a2111b1e0aa8b2fc5a1975516bc4d58017ff96b25e1bdff3c229d5fac3bacc319dcbec29f9478f42dee597b4641504c + pointRy = 0xfa2e3d9dc84db8954ce8085ef28d7184fddfd1344b4d4797343af9b5f9d837520b450f726443e4114bd4e5bdb2f65ddd + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 + pointRx = 0xe4f77e7ffeb7f0958910e3a680d677a477191df166160ff7ef6bb5261f791aa7b45e3e653d151b95dad3d93ca0290ef2 + pointRy = 0xac7dee41d8c5f4a7d5836960a773cfc1376289d3373f8cf7417b0c6207ac32e913856612fc9ff2e357eb2ee05cf9667f + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + def test_joing_scalar_multiply(self): + d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 + e = 0xafcf88119a3a76c87acbd6008e1349b29f4ba9aa0e12ce89bcfcae2180b38d81ab8cf15095301a182afbc6893e75385d + pointRx = 0x917ea28bcd641741ae5d18c2f1bd917ba68d34f0f0577387dc81260462aea60e2417b8bdc5d954fc729d211db23a02dc + pointRy = 0x1a29f7ce6d074654d77b40888c73e92546c8f16a5ff6bcbd307f758d4aee684beff26f6742f597e2585c86da908f7186 + + t = self.pointS * d + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 384) + self.assertEqual(self.pointS.size_in_bytes(), 48) + + +class TestEccPoint_NIST_P521(unittest.TestCase): + """Tests defined in section 4.5 of https://www.nsa.gov/ia/_files/nist-routines.pdf""" + + pointS = EccPoint( + 0x000001d5c693f66c08ed03ad0f031f937443458f601fd098d3d0227b4bf62873af50740b0bb84aa157fc847bcf8dc16a8b2b8bfd8e2d0a7d39af04b089930ef6dad5c1b4, + 0x00000144b7770963c63a39248865ff36b074151eac33549b224af5c8664c54012b818ed037b2b7c1a63ac89ebaa11e07db89fcee5b556e49764ee3fa66ea7ae61ac01823, + "p521") + + pointT = EccPoint( + 0x000000f411f2ac2eb971a267b80297ba67c322dba4bb21cec8b70073bf88fc1ca5fde3ba09e5df6d39acb2c0762c03d7bc224a3e197feaf760d6324006fe3be9a548c7d5, + 0x000001fdf842769c707c93c630df6d02eff399a06f1b36fb9684f0b373ed064889629abb92b1ae328fdb45534268384943f0e9222afe03259b32274d35d1b9584c65e305, + "p521") + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 + pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 + pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x0000012879442f2450c119e7119a5f738be1f1eba9e9d7c6cf41b325d9ce6d643106e9d61124a91a96bcf201305a9dee55fa79136dc700831e54c3ca4ff2646bd3c36bc6 + pointRy = 0x0000019864a8b8855c2479cbefe375ae553e2393271ed36fadfc4494fc0583f6bd03598896f39854abeae5f9a6515a021e2c0eef139e71de610143f53382f4104dccb543 + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 + pointRx = 0x00000091b15d09d0ca0353f8f96b93cdb13497b0a4bb582ae9ebefa35eee61bf7b7d041b8ec34c6c00c0c0671c4ae063318fb75be87af4fe859608c95f0ab4774f8c95bb + pointRy = 0x00000130f8f8b5e1abb4dd94f6baaf654a2d5810411e77b7423965e0c7fd79ec1ae563c207bd255ee9828eb7a03fed565240d2cc80ddd2cecbb2eb50f0951f75ad87977f + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + def test_joing_scalar_multiply(self): + d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 + e = 0x00000137e6b73d38f153c3a7575615812608f2bab3229c92e21c0d1c83cfad9261dbb17bb77a63682000031b9122c2f0cdab2af72314be95254de4291a8f85f7c70412e3 + pointRx = 0x0000009d3802642b3bea152beb9e05fba247790f7fc168072d363340133402f2585588dc1385d40ebcb8552f8db02b23d687cae46185b27528adb1bf9729716e4eba653d + pointRy = 0x0000000fe44344e79da6f49d87c1063744e5957d9ac0a505bafa8281c9ce9ff25ad53f8da084a2deb0923e46501de5797850c61b229023dd9cf7fc7f04cd35ebb026d89d + + t = self.pointS * d + + pointR = self.pointS * d + pointR += self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 521) + self.assertEqual(self.pointS.size_in_bytes(), 66) + + +class TestEccPoint_PAI_P256(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p256'] + pointG = EccPoint(curve.Gx, curve.Gy, "p256") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P256.txt", + "P-256 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P256, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P384(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p384'] + pointG = EccPoint(curve.Gx, curve.Gy, "p384") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P384.txt", + "P-384 tests from point-at-infinity.org", + {"k" : lambda k: int(k), + "x" : lambda x: int(x, 16), + "y" : lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P384, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P521(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p521'] + pointG = EccPoint(curve.Gx, curve.Gy, "p521") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P521.txt", + "P-521 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P521, "test_%d" % tv.count, new_test) + + +class TestEccKey_P256(unittest.TestCase): + + def test_private_key(self): + + key = EccKey(curve="P-256", d=1) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ.x, _curves['p256'].Gx) + self.assertEqual(key.pointQ.y, _curves['p256'].Gy) + + point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) + key = EccKey(curve="P-256", d=1, point=point) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="secp256r1", d=1) + key = EccKey(curve="prime256v1", d=1) + + def test_public_key(self): + + point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) + key = EccKey(curve="P-256", point=point) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-256", d=3) + pub_key = priv_key.public_key() + self.failIf(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-257", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=_curves['p256'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-256") + private_key2 = ECC.construct(d=3, curve="P-256") + private_key3 = ECC.construct(d=4, curve="P-256") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + +class TestEccKey_P384(unittest.TestCase): + + def test_private_key(self): + + p384 = _curves['p384'] + + key = EccKey(curve="P-384", d=1) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ.x, p384.Gx) + self.assertEqual(key.pointQ.y, p384.Gy) + + point = EccPoint(p384.Gx, p384.Gy, "p384") + key = EccKey(curve="P-384", d=1, point=point) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="p384", d=1) + key = EccKey(curve="secp384r1", d=1) + key = EccKey(curve="prime384v1", d=1) + + def test_public_key(self): + + p384 = _curves['p384'] + point = EccPoint(p384.Gx, p384.Gy, 'p384') + key = EccKey(curve="P-384", point=point) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-384", d=3) + pub_key = priv_key.public_key() + self.failIf(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-385", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-384", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-384", + d=_curves['p384'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-384") + private_key2 = ECC.construct(d=3, curve="P-384") + private_key3 = ECC.construct(d=4, curve="P-384") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + +class TestEccKey_P521(unittest.TestCase): + + def test_private_key(self): + + p521 = _curves['p521'] + + key = EccKey(curve="P-521", d=1) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ.x, p521.Gx) + self.assertEqual(key.pointQ.y, p521.Gy) + + point = EccPoint(p521.Gx, p521.Gy, "p521") + key = EccKey(curve="P-521", d=1, point=point) + self.assertEqual(key.d, 1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="p521", d=1) + key = EccKey(curve="secp521r1", d=1) + key = EccKey(curve="prime521v1", d=1) + + def test_public_key(self): + + p521 = _curves['p521'] + point = EccPoint(p521.Gx, p521.Gy, 'p521') + key = EccKey(curve="P-384", point=point) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-521", d=3) + pub_key = priv_key.public_key() + self.failIf(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-522", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-521", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-521", + d=_curves['p521'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-521") + private_key2 = ECC.construct(d=3, curve="P-521") + private_key3 = ECC.construct(d=4, curve="P-521") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + +class TestEccModule_P256(unittest.TestCase): + + def test_generate(self): + + key = ECC.generate(curve="P-256") + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(_curves['p256'].Gx, + _curves['p256'].Gy) * key.d, + "p256") + + # Other names + ECC.generate(curve="secp256r1") + ECC.generate(curve="prime256v1") + + def test_construct(self): + + key = ECC.construct(curve="P-256", d=1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, _curves['p256'].G) + + key = ECC.construct(curve="P-256", point_x=_curves['p256'].Gx, + point_y=_curves['p256'].Gy) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, _curves['p256'].G) + + # Other names + ECC.construct(curve="p256", d=1) + ECC.construct(curve="secp256r1", d=1) + ECC.construct(curve="prime256v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p256'].Gx, point_y=_curves['p256'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-256", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-256", d=2, **coordG) + + +class TestEccModule_P384(unittest.TestCase): + + def test_generate(self): + + curve = _curves['p384'] + key = ECC.generate(curve="P-384") + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p384") * key.d) + + # Other names + ECC.generate(curve="secp384r1") + ECC.generate(curve="prime384v1") + + def test_construct(self): + + curve = _curves['p384'] + key = ECC.construct(curve="P-384", d=1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, _curves['p384'].G) + + key = ECC.construct(curve="P-384", point_x=curve.Gx, point_y=curve.Gy) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, curve.G) + + # Other names + ECC.construct(curve="p384", d=1) + ECC.construct(curve="secp384r1", d=1) + ECC.construct(curve="prime384v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p384'].Gx, point_y=_curves['p384'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-384", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-384", d=2, **coordG) + + +class TestEccModule_P521(unittest.TestCase): + + def test_generate(self): + + curve = _curves['p521'] + key = ECC.generate(curve="P-521") + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p521") * key.d) + + # Other names + ECC.generate(curve="secp521r1") + ECC.generate(curve="prime521v1") + + def test_construct(self): + + curve = _curves['p521'] + key = ECC.construct(curve="P-521", d=1) + self.failUnless(key.has_private()) + self.assertEqual(key.pointQ, _curves['p521'].G) + + key = ECC.construct(curve="P-521", point_x=curve.Gx, point_y=curve.Gy) + self.failIf(key.has_private()) + self.assertEqual(key.pointQ, curve.G) + + # Other names + ECC.construct(curve="p521", d=1) + ECC.construct(curve="secp521r1", d=1) + ECC.construct(curve="prime521v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p521'].Gx, point_y=_curves['p521'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-521", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-521", d=2, **coordG) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestEccPoint) + tests += list_test_cases(TestEccPoint_NIST_P256) + tests += list_test_cases(TestEccPoint_NIST_P384) + tests += list_test_cases(TestEccPoint_NIST_P521) + tests += list_test_cases(TestEccPoint_PAI_P256) + tests += list_test_cases(TestEccPoint_PAI_P384) + tests += list_test_cases(TestEccPoint_PAI_P521) + tests += list_test_cases(TestEccKey_P256) + tests += list_test_cases(TestEccKey_P384) + tests += list_test_cases(TestEccKey_P521) + tests += list_test_cases(TestEccModule_P256) + tests += list_test_cases(TestEccModule_P384) + tests += list_test_cases(TestEccModule_P521) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py new file mode 100644 index 0000000..0af6738 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.ElGamal""" + +__revision__ = "$Id$" + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.PublicKey import ElGamal +from Crypto.Util.number import bytes_to_long +from Crypto.Util.py3compat import * + +class ElGamalTest(unittest.TestCase): + + # + # Test vectors + # + # There seem to be no real ElGamal test vectors available in the + # public domain. The following test vectors have been generated + # with libgcrypt 1.5.0. + # + # Encryption + tve=[ + { + # 256 bits + 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', + 'g' :'05', + 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', + 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', + 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', + 'pt' :'48656C6C6F207468657265', + 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', + 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' + }, + + { + # 512 bits + 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', + 'g' :'07', + 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', + 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', + 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', + 'pt' :'48656C6C6F207468657265', + 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', + 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' + } + ] + + # Signature + tvs=[ + { + # 256 bits + 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', + 'g' :'05', + 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', + 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', + 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', + 'h' :'48656C6C6F207468657265', + 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', + 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', + }, + { + # 512 bits + 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', + 'g' :'0B', + 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', + 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', + 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', + 'h' :'48656C6C6F207468657265', + 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', + 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', + } + ] + + def test_generate_180(self): + self._test_random_key(180) + + def test_encryption(self): + for tv in self.tve: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + ct = key._encrypt(d['pt'], d['k']) + self.assertEquals(ct[0], d['ct1']) + self.assertEquals(ct[1], d['ct2']) + + def test_decryption(self): + for tv in self.tve: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + pt = key._decrypt((d['ct1'], d['ct2'])) + self.assertEquals(pt, d['pt']) + + def test_signing(self): + for tv in self.tvs: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + sig1, sig2 = key._sign(d['h'], d['k']) + self.assertEquals(sig1, d['sig1']) + self.assertEquals(sig2, d['sig2']) + + def test_verification(self): + for tv in self.tvs: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + # Positive test + res = key._verify( d['h'], (d['sig1'],d['sig2']) ) + self.failUnless(res) + # Negative test + res = key._verify( d['h'], (d['sig1']+1,d['sig2']) ) + self.failIf(res) + + def test_bad_key3(self): + tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key'])[:3] + tup[0] += 1 # p += 1 (not prime) + self.assertRaises(ValueError, ElGamal.construct, tup) + + tup = tup0 + tup[1] = 1 # g = 1 + self.assertRaises(ValueError, ElGamal.construct, tup) + + tup = tup0 + tup[2] = tup[0]*2 # y = 2*p + self.assertRaises(ValueError, ElGamal.construct, tup) + + def test_bad_key4(self): + tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key']) + tup[3] += 1 # x += 1 + self.assertRaises(ValueError, ElGamal.construct, tup) + + def convert_tv(self, tv, as_longs=0): + """Convert a test vector from textual form (hexadecimal ascii + to either integers or byte strings.""" + key_comps = 'p','g','y','x' + tv2 = {} + for c in tv.keys(): + tv2[c] = a2b_hex(tv[c]) + if as_longs or c in key_comps or c in ('sig1','sig2'): + tv2[c] = bytes_to_long(tv2[c]) + tv2['key']=[] + for c in key_comps: + tv2['key'] += [tv2[c]] + del tv2[c] + return tv2 + + def _test_random_key(self, bits): + elgObj = ElGamal.generate(bits, Random.new().read) + self._check_private_key(elgObj) + self._exercise_primitive(elgObj) + pub = elgObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(elgObj) + + def _check_private_key(self, elgObj): + + # Check capabilities + self.failUnless(elgObj.has_private()) + + # Sanity check key data + self.failUnless(1 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.RSA""" + +__revision__ = "$Id$" + +import os +import pickle +from pickle import PicklingError +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +class RSATest(unittest.TestCase): + # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # See RSADSI's PKCS#1 page at + # http://www.rsa.com/rsalabs/node.asp?id=2125 + + # from oaep-int.txt + + # TODO: PyCrypto treats the message as starting *after* the leading "00" + # TODO: That behaviour should probably be changed in the future. + plaintext = """ + eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 + ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 + c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af + f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db + 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a + b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 + 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f + 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d + """ + + ciphertext = """ + 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 + """ + + modulus = """ + bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb + """ + + e = 0x11 # public exponent + + prime_factor = """ + c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 + 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 + 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf + ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 + """ + + def setUp(self): + global RSA, Random, bytes_to_long + from Crypto.PublicKey import RSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse + self.n = bytes_to_long(a2b_hex(self.modulus)) + self.p = bytes_to_long(a2b_hex(self.prime_factor)) + + # Compute q, d, and u from n, e, and p + self.q = self.n // self.p + self.d = inverse(self.e, (self.p-1)*(self.q-1)) + self.u = inverse(self.p, self.q) # u = e**-1 (mod q) + + self.rsa = RSA + + def test_generate_1arg(self): + """RSA (default implementation) generated key (1 argument)""" + rsaObj = self.rsa.generate(1024) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_2arg(self): + """RSA (default implementation) generated key (2 arguments)""" + rsaObj = self.rsa.generate(1024, Random.new().read) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_3args(self): + rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + self.assertEqual(65537,rsaObj.e) + + def test_construct_2tuple(self): + """RSA (default implementation) constructed key (2-tuple)""" + pub = self.rsa.construct((self.n, self.e)) + self._check_public_key(pub) + self._check_encryption(pub) + + def test_construct_3tuple(self): + """RSA (default implementation) constructed key (3-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_4tuple(self): + """RSA (default implementation) constructed key (4-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_5tuple(self): + """RSA (default implementation) constructed key (5-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_6tuple(self): + """RSA (default implementation) constructed key (6-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_bad_key2(self): + tup = (self.n, 1) + self.assertRaises(ValueError, self.rsa.construct, tup) + + # An even modulus is wrong + tup = (self.n+1, self.e) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key3(self): + tup = (self.n, self.e, self.d+1) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key5(self): + tup = (self.n, self.e, self.d, self.p, self.p) + self.assertRaises(ValueError, self.rsa.construct, tup) + + tup = (self.p*self.p, self.e, self.p, self.p) + self.assertRaises(ValueError, self.rsa.construct, tup) + + tup = (self.p*self.p, 3, self.p, self.q) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key6(self): + tup = (self.n, self.e, self.d, self.p, self.q, 10) + self.assertRaises(ValueError, self.rsa.construct, tup) + + from Crypto.Util.number import inverse + tup = (self.n, self.e, self.d, self.p, self.q, inverse(self.q, self.p)) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_factoring(self): + rsaObj = self.rsa.construct([self.n, self.e, self.d]) + self.failUnless(rsaObj.p==self.p or rsaObj.p==self.q) + self.failUnless(rsaObj.q==self.p or rsaObj.q==self.q) + self.failUnless(rsaObj.q*rsaObj.p == self.n) + + self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) + + def test_repr(self): + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + repr(rsaObj) + + def test_serialization(self): + """RSA keys are unpickable""" + + rsa_key = self.rsa.generate(1024) + self.assertRaises(PicklingError, pickle.dumps, rsa_key) + + def test_raw_rsa_boundary(self): + # The argument of every RSA raw operation (encrypt/decrypt) must be + # non-negative and no larger than the modulus + rsa_obj = self.rsa.generate(1024) + + self.assertRaises(ValueError, rsa_obj._decrypt, rsa_obj.n) + self.assertRaises(ValueError, rsa_obj._encrypt, rsa_obj.n) + + self.assertRaises(ValueError, rsa_obj._decrypt, -1) + self.assertRaises(ValueError, rsa_obj._encrypt, -1) + + def test_size(self): + pub = self.rsa.construct((self.n, self.e)) + self.assertEquals(pub.size_in_bits(), 1024) + self.assertEquals(pub.size_in_bytes(), 128) + + def _check_private_key(self, rsaObj): + from Crypto.Math.Numbers import Integer + + # Check capabilities + self.assertEqual(1, rsaObj.has_private()) + + # Sanity check key data + self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq + lcm = int(Integer(rsaObj.p-1).lcm(rsaObj.q-1)) + self.assertEqual(1, rsaObj.d * rsaObj.e % lcm) # ed = 1 (mod LCM(p-1, q-1)) + self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) + self.assertEqual(1, rsaObj.p > 1) # p > 1 + self.assertEqual(1, rsaObj.q > 1) # q > 1 + self.assertEqual(1, rsaObj.e > 1) # e > 1 + self.assertEqual(1, rsaObj.d > 1) # d > 1 + + def _check_public_key(self, rsaObj): + ciphertext = a2b_hex(self.ciphertext) + + # Check capabilities + self.assertEqual(0, rsaObj.has_private()) + + # Check rsaObj.[ne] -> rsaObj.[ne] mapping + self.assertEqual(rsaObj.n, rsaObj.n) + self.assertEqual(rsaObj.e, rsaObj.e) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(rsaObj, 'd')) + self.assertEqual(0, hasattr(rsaObj, 'p')) + self.assertEqual(0, hasattr(rsaObj, 'q')) + self.assertEqual(0, hasattr(rsaObj, 'u')) + + # Sanity check key data + self.assertEqual(1, rsaObj.e > 1) # e > 1 + + # Public keys should not be able to sign or decrypt + self.assertRaises(TypeError, rsaObj._decrypt, + bytes_to_long(ciphertext)) + + # Check __eq__ and __ne__ + self.assertEqual(rsaObj.public_key() == rsaObj.public_key(),True) # assert_ + self.assertEqual(rsaObj.public_key() != rsaObj.public_key(),False) # failIf + + self.assertEqual(rsaObj.publickey(), rsaObj.public_key()) + + def _exercise_primitive(self, rsaObj): + # Since we're using a randomly-generated key, we can't check the test + # vector, but we can make sure encryption and decryption are inverse + # operations. + ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) + + # Test decryption + plaintext = rsaObj._decrypt(ciphertext) + + # Test encryption (2 arguments) + new_ciphertext2 = rsaObj._encrypt(plaintext) + self.assertEqual(ciphertext, new_ciphertext2) + + def _exercise_public_primitive(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + + # Test encryption (2 arguments) + new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) + + def _check_encryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test encryption + new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) + self.assertEqual(bytes_to_long(ciphertext), new_ciphertext2) + + def _check_decryption(self, rsaObj): + plaintext = bytes_to_long(a2b_hex(self.plaintext)) + ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) + + # Test plain decryption + new_plaintext = rsaObj._decrypt(ciphertext) + self.assertEqual(plaintext, new_plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(RSATest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py new file mode 100644 index 0000000..2a12ea2 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py @@ -0,0 +1,554 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_import_DSA.py: Self-test for importing DSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import unittest +import re + +from Crypto.PublicKey import DSA +from Crypto.SelfTest.st_common import * +from Crypto.Util.py3compat import * + +from binascii import unhexlify + +class ImportKeyTests(unittest.TestCase): + + y = 92137165128186062214622779787483327510946462589285775188003362705875131352591574106484271700740858696583623951844732128165434284507709057439633739849986759064015013893156866539696757799934634945787496920169462601722830899660681779448742875054459716726855443681559131362852474817534616736104831095601710736729 + p = 162452170958135306109773853318304545923250830605675936228618290525164105310663722368377131295055868997377338797580997938253236213714988311430600065853662861806894003694743806769284131194035848116051021923956699231855223389086646903420682639786976554552864568460372266462812137447840653688476258666833303658691 + q = 988791743931120302950649732173330531512663554851 + g = 85583152299197514738065570254868711517748965097380456700369348466136657764813442044039878840094809620913085570225318356734366886985903212775602770761953571967834823306046501307810937486758039063386311593890777319935391363872375452381836756832784184928202587843258855704771836753434368484556809100537243908232 + x = 540873410045082450874416847965843801027716145253 + + def setUp(self): + + # It is easier to write test vectors in text form, + # and convert them to byte strigs dynamically here + for mname, mvalue in ImportKeyTests.__dict__.items(): + if mname[:4] in ('der_', 'pem_', 'ssh_'): + if mname[:4] == 'der_': + mvalue = unhexlify(tobytes(mvalue)) + mvalue = tobytes(mvalue) + setattr(self, mname, mvalue) + + # 1. SubjectPublicKeyInfo + der_public=\ + '308201b73082012b06072a8648ce3804013082011e02818100e756ee1717f4b6'+\ + '794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a2757695ec91'+\ + '5697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8b81b47'+\ + '9a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656cecb4c'+\ + '8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad32f48c'+\ + 'd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb7eaeae'+\ + '3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466cf444f3'+\ + '4b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b92370040a'+\ + 'ca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074b41c56'+\ + 'ae43fd300d89262e4efd89943f99a651b03888038185000281810083352a69a1'+\ + '32f34843d2a0eb995bff4e2f083a73f0049d2c91ea2f0ce43d144abda48199e4'+\ + 'b003c570a8af83303d45105f606c5c48d925a40ed9c2630c2fa4cdbf838539de'+\ + 'b9a29f919085f2046369f627ca84b2cb1e2c7940564b670f963ab1164d4e2ca2'+\ + 'bf6ffd39f12f548928bf4d2d1b5e6980b4f1be4c92a91986fba559' + + def testImportKey1(self): + key_obj = DSA.importKey(self.der_public) + self.failIf(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey1(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('DER') + self.assertEqual(self.der_public, encoded) + + # 2. + pem_public="""\ +-----BEGIN PUBLIC KEY----- +MIIBtzCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/ +j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtH +mjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2 +qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzrfq6u +NxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa +5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxW +rkP9MA2JJi5O/YmUP5mmUbA4iAOBhQACgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPw +BJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTne +uaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmA +tPG+TJKpGYb7pVk= +-----END PUBLIC KEY-----""" + + def testImportKey2(self): + for pem in (self.pem_public, tostr(self.pem_public)): + key_obj = DSA.importKey(pem) + self.failIf(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey2(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('PEM') + self.assertEqual(self.pem_public, encoded) + + # 3. OpenSSL/OpenSSH format + der_private=\ + '308201bb02010002818100e756ee1717f4b6794c7c214724a19763742c45572b'+\ + '4b3f8ff3b44f3be9f44ce039a2757695ec915697da74ef914fcd1b05660e2419'+\ + 'c761d639f45d2d79b802dbd23e7ab8b81b479a380e1f30932584ba2a0b955032'+\ + '342ebc83cb5ca906e7b0d7cd6fe656cecb4c8b5a77123a8c6750a481e3b06057'+\ + 'aff6aa6eba620b832d60c3021500ad32f48cd3ae0c45a198a61fa4b5e2032076'+\ + '3b2302818079dfdc3d614fe635fceb7eaeae3718dc2efefb45282993ac6749dc'+\ + '83c223d8c1887296316b3b0b54466cf444f34b82e3554d0b90a778faaf1306f0'+\ + '25dae6a3e36c7f93dd5bac4052b92370040aca70b8d5820599711900efbc9618'+\ + '12c355dd9beffe0981da85c5548074b41c56ae43fd300d89262e4efd89943f99'+\ + 'a651b038880281810083352a69a132f34843d2a0eb995bff4e2f083a73f0049d'+\ + '2c91ea2f0ce43d144abda48199e4b003c570a8af83303d45105f606c5c48d925'+\ + 'a40ed9c2630c2fa4cdbf838539deb9a29f919085f2046369f627ca84b2cb1e2c'+\ + '7940564b670f963ab1164d4e2ca2bf6ffd39f12f548928bf4d2d1b5e6980b4f1'+\ + 'be4c92a91986fba55902145ebd9a3f0b82069d98420986b314215025756065' + + def testImportKey3(self): + key_obj = DSA.importKey(self.der_private) + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey3(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('DER', pkcs8=False) + self.assertEqual(self.der_private, encoded) + + # 4. + pem_private="""\ +-----BEGIN DSA PRIVATE KEY----- +MIIBuwIBAAKBgQDnVu4XF/S2eUx8IUckoZdjdCxFVytLP4/ztE876fRM4DmidXaV +7JFWl9p075FPzRsFZg4kGcdh1jn0XS15uALb0j56uLgbR5o4Dh8wkyWEuioLlVAy +NC68g8tcqQbnsNfNb+ZWzstMi1p3EjqMZ1CkgeOwYFev9qpuumILgy1gwwIVAK0y +9IzTrgxFoZimH6S14gMgdjsjAoGAed/cPWFP5jX8636urjcY3C7++0UoKZOsZ0nc +g8Ij2MGIcpYxazsLVEZs9ETzS4LjVU0LkKd4+q8TBvAl2uaj42x/k91brEBSuSNw +BArKcLjVggWZcRkA77yWGBLDVd2b7/4JgdqFxVSAdLQcVq5D/TANiSYuTv2JlD+Z +plGwOIgCgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LAD +xXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4s +eUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVkCFF69mj8L +ggadmEIJhrMUIVAldWBl +-----END DSA PRIVATE KEY-----""" + + def testImportKey4(self): + for pem in (self.pem_private, tostr(self.pem_private)): + key_obj = DSA.importKey(pem) + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey4(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM', pkcs8=False) + self.assertEqual(self.pem_private, encoded) + + # 5. PKCS8 (unencrypted) + der_pkcs8=\ + '3082014a0201003082012b06072a8648ce3804013082011e02818100e756ee17'+\ + '17f4b6794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a27576'+\ + '95ec915697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8'+\ + 'b81b479a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656'+\ + 'cecb4c8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad'+\ + '32f48cd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb'+\ + '7eaeae3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466c'+\ + 'f444f34b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b923'+\ + '70040aca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074'+\ + 'b41c56ae43fd300d89262e4efd89943f99a651b03888041602145ebd9a3f0b82'+\ + '069d98420986b314215025756065' + + def testImportKey5(self): + key_obj = DSA.importKey(self.der_pkcs8) + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey5(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('DER') + self.assertEqual(self.der_pkcs8, encoded) + encoded = key.export_key('DER', pkcs8=True) + self.assertEqual(self.der_pkcs8, encoded) + + # 6. + pem_pkcs8="""\ +-----BEGIN PRIVATE KEY----- +MIIBSgIBADCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVX +K0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4 +uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47Bg +V6/2qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzr +fq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG +8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0 +tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAQWAhRevZo/C4IGnZhCCYazFCFQJXVgZQ== +-----END PRIVATE KEY-----""" + + def testImportKey6(self): + for pem in (self.pem_pkcs8, tostr(self.pem_pkcs8)): + key_obj = DSA.importKey(pem) + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey6(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM') + self.assertEqual(self.pem_pkcs8, encoded) + encoded = key.export_key('PEM', pkcs8=True) + self.assertEqual(self.pem_pkcs8, encoded) + + # 7. OpenSSH/RFC4253 + ssh_pub="""ssh-dss AAAAB3NzaC1kc3MAAACBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2qm66YguDLWDDAAAAFQCtMvSM064MRaGYph+kteIDIHY7IwAAAIB539w9YU/mNfzrfq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAAAAIEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVk=""" + + def testImportKey7(self): + for ssh in (self.ssh_pub, tostr(self.ssh_pub)): + key_obj = DSA.importKey(ssh) + self.failIf(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey7(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('OpenSSH') + self.assertEqual(self.ssh_pub, encoded) + + # 8. Encrypted OpenSSL/OpenSSH + pem_private_encrypted="""\ +-----BEGIN DSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,70B6908939D65E9F2EB999E8729788CE + +4V6GHRDpCrdZ8MBjbyp5AlGUrjvr2Pn2e2zVxy5RBt4FBj9/pa0ae0nnyUPMLSUU +kKyOR0topRYTVRLElm4qVrb5uNZ3hRwfbklr+pSrB7O9eHz9V5sfOQxyODS07JxK +k1OdOs70/ouMXLF9EWfAZOmWUccZKHNblUwg1p1UrZIz5jXw4dUE/zqhvXh6d+iC +ADsICaBCjCrRQJKDp50h3+ndQjkYBKVH+pj8TiQ79U7lAvdp3+iMghQN6YXs9mdI +gFpWw/f97oWM4GHZFqHJ+VSMNFjBiFhAvYV587d7Lk4dhD8sCfbxj42PnfRgUItc +nnPqHxmhMQozBWzYM4mQuo3XbF2WlsNFbOzFVyGhw1Bx1s91qvXBVWJh2ozrW0s6 +HYDV7ZkcTml/4kjA/d+mve6LZ8kuuR1qCiZx6rkffhh1gDN/1Xz3HVvIy/dQ+h9s +5zp7PwUoWbhqp3WCOr156P6gR8qo7OlT6wMh33FSXK/mxikHK136fV2shwTKQVII +rJBvXpj8nACUmi7scKuTWGeUoXa+dwTZVVe+b+L2U1ZM7+h/neTJiXn7u99PFUwu +xVJtxaV37m3aXxtCsPnbBg== +-----END DSA PRIVATE KEY-----""" + + def testImportKey8(self): + for pem in (self.pem_private_encrypted, tostr(self.pem_private_encrypted)): + key_obj = DSA.importKey(pem, "PWDTEST") + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey8(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM', pkcs8=False, passphrase="PWDTEST") + key = DSA.importKey(encoded, "PWDTEST") + self.assertEqual(self.y, key.y) + self.assertEqual(self.p, key.p) + self.assertEqual(self.q, key.q) + self.assertEqual(self.g, key.g) + self.assertEqual(self.x, key.x) + + # 9. Encrypted PKCS8 + # pbeWithMD5AndDES-CBC + pem_pkcs8_encrypted="""\ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIBcTAbBgkqhkiG9w0BBQMwDgQI0GC3BJ/jSw8CAggABIIBUHc1cXZpExIE9tC7 +7ryiW+5ihtF2Ekurq3e408GYSAu5smJjN2bvQXmzRFBz8W38K8eMf1sbWroZ4+zn +kZSbb9nSm5kAa8lR2+oF2k+WRswMR/PTC3f/D9STO2X0QxdrzKgIHEcSGSHp5jTx +aVvbkCDHo9vhBTl6S3ogZ48As/MEro76+9igUwJ1jNhIQZPJ7e20QH5qDpQFFJN4 +CKl2ENSEuwGiqBszItFy4dqH0g63ZGZV/xt9wSO9Rd7SK/EbA/dklOxBa5Y/VItM +gnIhs9XDMoGYyn6F023EicNJm6g/bVQk81BTTma4tm+12TKGdYm+QkeZvCOMZylr +Wv67cKwO3cAXt5C3QXMDgYR64XvuaT5h7C0igMp2afSXJlnbHEbFxQVJlv83T4FM +eZ4k+NQDbEL8GiHmFxzDWQAuPPZKJWEEEV2p/To+WOh+kSDHQw== +-----END ENCRYPTED PRIVATE KEY-----""" + + def testImportKey9(self): + for pem in (self.pem_pkcs8_encrypted, tostr(self.pem_pkcs8_encrypted)): + key_obj = DSA.importKey(pem, "PWDTEST") + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + # 10. Encrypted PKCS8 + # pkcs5PBES2 / + # pkcs5PBKDF2 (rounds=1000, salt=D725BF1B6B8239F4) / + # des-EDE3-CBC (iv=27A1C66C42AFEECE) + # + der_pkcs8_encrypted=\ + '30820196304006092a864886f70d01050d3033301b06092a864886f70d01050c'+\ + '300e0408d725bf1b6b8239f4020203e8301406082a864886f70d0307040827a1'+\ + 'c66c42afeece048201505cacfde7bf8edabb3e0d387950dc872662ea7e9b1ed4'+\ + '400d2e7e6186284b64668d8d0328c33a9d9397e6f03df7cb68268b0a06b4e22f'+\ + '7d132821449ecf998a8b696dbc6dd2b19e66d7eb2edfeb4153c1771d49702395'+\ + '4f36072868b5fcccf93413a5ac4b2eb47d4b3f681c6bd67ae363ed776f45ae47'+\ + '174a00098a7c930a50f820b227ddf50f9742d8e950d02586ff2dac0e3c372248'+\ + 'e5f9b6a7a02f4004f20c87913e0f7b52bccc209b95d478256a890b31d4c9adec'+\ + '21a4d157a179a93a3dad06f94f3ce486b46dfa7fc15fd852dd7680bbb2f17478'+\ + '7e71bd8dbaf81eca7518d76c1d26256e95424864ba45ca5d47d7c5a421be02fa'+\ + 'b94ab01e18593f66cf9094eb5c94b9ecf3aa08b854a195cf87612fbe5e96c426'+\ + '2b0d573e52dc71ba3f5e468c601e816c49b7d32c698b22175e89aaef0c443770'+\ + '5ef2f88a116d99d8e2869a4fd09a771b84b49e4ccb79aadcb1c9' + + def testImportKey10(self): + key_obj = DSA.importKey(self.der_pkcs8_encrypted, "PWDTEST") + self.failUnless(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey10(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + randfunc = BytesIO(unhexlify(b("27A1C66C42AFEECE") + b("D725BF1B6B8239F4"))).read + encoded = key.export_key('DER', pkcs8=True, passphrase="PWDTEST", randfunc=randfunc) + self.assertEqual(self.der_pkcs8_encrypted, encoded) + + # ---- + + def testImportError1(self): + self.assertRaises(ValueError, DSA.importKey, self.der_pkcs8_encrypted, "wrongpwd") + + def testExportError2(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + self.assertRaises(ValueError, key.export_key, 'DER', pkcs8=False, passphrase="PWDTEST") + + def test_import_key(self): + """Verify importKey is an alias to import_key""" + + key_obj = DSA.import_key(self.der_public) + self.failIf(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def test_exportKey(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + self.assertEquals(key.exportKey(), key.export_key()) + + + def test_import_empty(self): + self.assertRaises(ValueError, DSA.import_key, b'') + + +class ImportKeyFromX509Cert(unittest.TestCase): + + def test_x509v1(self): + + # Sample V1 certificate with a 1024 bit DSA key + x509_v1_cert = """ +-----BEGIN CERTIFICATE----- +MIIDUjCCArsCAQIwDQYJKoZIhvcNAQEFBQAwfjENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT +Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG +A1UEAxMEdGVzdDAeFw0xNDA3MTEyMDM4NDNaFw0xNzA0MDYyMDM4NDNaME0xCzAJ +BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxDzANBgNVBAMTBnBvbGFuZDCCAbYwggErBgcqhkjOOAQBMIIBHgKB +gQDOrN4Ox4+t3T6wKeHfhzArhcrNEFMQ4Ss+4PIKyimDy9Bn64WPkL1B/9dvYIga +23GLu6tVJmXo6EdJnVOHEMhr99EeOwuDWWeP7Awq7RSlKEejokr4BEzMTW/tExSD +cO6/GI7xzh0eTH+VTTPDfyrJMYCkh0rJAfCP+5xrmPNetwIVALtXYOV1yoRrzJ2Q +M5uEjidH6GiZAoGAfUqA1SAm5g5U68SILMVX9l5rq0OpB0waBMpJQ31/R/yXNDqo +c3gGWZTOJFU4IzwNpGhrGNADUByz/lc1SAOAdEJIr0JVrhbGewQjB4pWqoLGbBKz +RoavTNDc/zD7SYa12evWDHADwvlXoeQg+lWop1zS8OqaDC7aLGKpWN3/m8kDgYQA +AoGAKoirPAfcp1rbbl4y2FFAIktfW8f4+T7d2iKSg73aiVfujhNOt1Zz1lfC0NI2 +eonLWO3tAM4XGKf1TLjb5UXngGn40okPsaA81YE6ZIKm20ywjlOY3QkAEdMaLVY3 +9PJvM8RGB9m7pLKxyHfGMfF40MVN4222zKeGp7xhM0CNiCUwDQYJKoZIhvcNAQEF +BQADgYEAfbNZfpYa2KlALEM1FZnwvQDvJHntHz8LdeJ4WM7CXDlKi67wY2HKM30w +s2xej75imkVOFd1kF2d0A8sjfriXLVIt1Hwq9ANZomhu4Edx0xpH8tqdh/bDtnM2 +TmduZNY9OWkb07h0CtWD6Zt8fhRllVsSSrlWd/2or7FXNC5weFQ= +-----END CERTIFICATE----- + """.strip() + + # DSA public key as dumped by openssl + y_str = """ +2a:88:ab:3c:07:dc:a7:5a:db:6e:5e:32:d8:51:40: +22:4b:5f:5b:c7:f8:f9:3e:dd:da:22:92:83:bd:da: +89:57:ee:8e:13:4e:b7:56:73:d6:57:c2:d0:d2:36: +7a:89:cb:58:ed:ed:00:ce:17:18:a7:f5:4c:b8:db: +e5:45:e7:80:69:f8:d2:89:0f:b1:a0:3c:d5:81:3a: +64:82:a6:db:4c:b0:8e:53:98:dd:09:00:11:d3:1a: +2d:56:37:f4:f2:6f:33:c4:46:07:d9:bb:a4:b2:b1: +c8:77:c6:31:f1:78:d0:c5:4d:e3:6d:b6:cc:a7:86: +a7:bc:61:33:40:8d:88:25 + """ + p_str = """ +00:ce:ac:de:0e:c7:8f:ad:dd:3e:b0:29:e1:df:87: +30:2b:85:ca:cd:10:53:10:e1:2b:3e:e0:f2:0a:ca: +29:83:cb:d0:67:eb:85:8f:90:bd:41:ff:d7:6f:60: +88:1a:db:71:8b:bb:ab:55:26:65:e8:e8:47:49:9d: +53:87:10:c8:6b:f7:d1:1e:3b:0b:83:59:67:8f:ec: +0c:2a:ed:14:a5:28:47:a3:a2:4a:f8:04:4c:cc:4d: +6f:ed:13:14:83:70:ee:bf:18:8e:f1:ce:1d:1e:4c: +7f:95:4d:33:c3:7f:2a:c9:31:80:a4:87:4a:c9:01: +f0:8f:fb:9c:6b:98:f3:5e:b7 + """ + q_str = """ +00:bb:57:60:e5:75:ca:84:6b:cc:9d:90:33:9b:84: +8e:27:47:e8:68:99 + """ + g_str = """ +7d:4a:80:d5:20:26:e6:0e:54:eb:c4:88:2c:c5:57: +f6:5e:6b:ab:43:a9:07:4c:1a:04:ca:49:43:7d:7f: +47:fc:97:34:3a:a8:73:78:06:59:94:ce:24:55:38: +23:3c:0d:a4:68:6b:18:d0:03:50:1c:b3:fe:57:35: +48:03:80:74:42:48:af:42:55:ae:16:c6:7b:04:23: +07:8a:56:aa:82:c6:6c:12:b3:46:86:af:4c:d0:dc: +ff:30:fb:49:86:b5:d9:eb:d6:0c:70:03:c2:f9:57: +a1:e4:20:fa:55:a8:a7:5c:d2:f0:ea:9a:0c:2e:da: +2c:62:a9:58:dd:ff:9b:c9 + """ + + key = DSA.importKey(x509_v1_cert) + for comp_name in ('y', 'p', 'q', 'g'): + comp_str = locals()[comp_name + "_str"] + comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) + self.assertEqual(getattr(key, comp_name), comp) + self.failIf(key.has_private()) + + def test_x509v3(self): + + # Sample V3 certificate with a 1024 bit DSA key + x509_v3_cert = """ +-----BEGIN CERTIFICATE----- +MIIFhjCCA26gAwIBAgIBAzANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL +MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD +QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTMyMDUz +MjBaFw0xNzA0MDgyMDUzMjBaMEAxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES +MBAGA1UEBwwJQmFsdGltb3JlMRAwDgYDVQQDDAdhdXN0cmlhMIIBtjCCASsGByqG +SM44BAEwggEeAoGBALfd8gyEpVPA0ZI69Kp3nyJcu5N0ZZ3K1K9hleQLNqKEcZOh +7a/C2J1TPdmHTLJ0rAwBZ1nWxnARSgRphziGDFspKCYQwYcSMz8KoFgvXbXpuchy +oFACiQ2LqZnc5MakuLQtLcQciSYGYj3zmZdYMoa904F1aDWr+DxQI6DVC3/bAhUA +hqXMCJ6fQK3G2O9S3/CC/yVZXCsCgYBRXROl3R2khX7l10LQjDEgo3B1IzjXU/jP +McMBl6XO+nBJXxr/scbq8Ajiv7LTnGpSjgryHtvfj887kfvo8QbSS3kp3vq5uSqI +ui7E7r3jguWaLj616AG1HWOctXJUjqsiabZwsp2h09gHTzmHEXBOmiARu8xFxKAH +xsuo7onAbwOBhAACgYBylWjWSnKHE8mHx1A5m/0GQx6xnhWIe3+MJAnEhRGxA2J4 +SCsfWU0OwglIQToh1z5uUU9oDi9cYgNPBevOFRnDhc2yaJY6VAYnI+D+6J5IU6Yd +0iaG/iSc4sV4bFr0axcPpse3SN0XaQxiKeSFBfFnoMqL+dd9Gb3QPZSllBcVD6OB +1TCB0jAdBgNVHQ4EFgQUx5wN0Puotv388M9Tp/fsPbZpzAUwHwYDVR0jBBgwFoAU +a0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwSgYD +VR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNvbYIQbWFpbC5leGFt +cGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM +IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsFAAOCAgEAyWf1TiJI +aNEIA9o/PG8/JiGASTS2/HBVTJbkq03k6NkJVk/GxC1DPziTUJ+CdWlHWcAi1EOW +Ach3QxNDRrVfCOfCMDgElIO1094/reJgdFYG00LRi8QkRJuxANV7YS4tLudhyHJC +kR2lhdMNmEuzWK+s2y+5cLrdm7qdvdENQCcV67uvGPx4sc+EaE7x13SczKjWBtbo +QCs6JTOW+EkPRl4Zo27K4OIZ43/J+GxvwU9QUVH3wPVdbbLNw+QeTFBYMTEcxyc4 +kv50HPBFaithziXBFyvdIs19FjkFzu0Uz/e0zb1+vMzQlJMD94HVOrMnIj5Sb2cL +KKdYXS4uhxFJmdV091Xur5JkYYwEzuaGav7J3zOzYutrIGTgDluLCvA+VQkRcTsy +jZ065SkY/v+38QHp+cmm8WRluupJTs8wYzVp6Fu0iFaaK7ztFmaZmHpiPIfDFjva +aCIgzzT5NweJd/b71A2SyzHXJ14zBXsr1PMylMp2TpHIidhuuNuQL6I0HaollB4M +Z3FsVBMhVDw4Z76qnFPr8mZE2tar33hSlJI/3pS/bBiukuBk8U7VB0X8OqaUnP3C +7b2Z4G8GtqDVcKGMzkvMjT4n9rKd/Le+qHSsQOGO9W/0LB7UDAZSwUsfAPnoBgdS +5t9tIomLCOstByXi+gGZue1TcdCa3Ph4kO0= +-----END CERTIFICATE----- + """.strip() + + # DSA public key as dumped by openssl + y_str = """ +72:95:68:d6:4a:72:87:13:c9:87:c7:50:39:9b:fd: +06:43:1e:b1:9e:15:88:7b:7f:8c:24:09:c4:85:11: +b1:03:62:78:48:2b:1f:59:4d:0e:c2:09:48:41:3a: +21:d7:3e:6e:51:4f:68:0e:2f:5c:62:03:4f:05:eb: +ce:15:19:c3:85:cd:b2:68:96:3a:54:06:27:23:e0: +fe:e8:9e:48:53:a6:1d:d2:26:86:fe:24:9c:e2:c5: +78:6c:5a:f4:6b:17:0f:a6:c7:b7:48:dd:17:69:0c: +62:29:e4:85:05:f1:67:a0:ca:8b:f9:d7:7d:19:bd: +d0:3d:94:a5:94:17:15:0f + """ + p_str = """ +00:b7:dd:f2:0c:84:a5:53:c0:d1:92:3a:f4:aa:77: +9f:22:5c:bb:93:74:65:9d:ca:d4:af:61:95:e4:0b: +36:a2:84:71:93:a1:ed:af:c2:d8:9d:53:3d:d9:87: +4c:b2:74:ac:0c:01:67:59:d6:c6:70:11:4a:04:69: +87:38:86:0c:5b:29:28:26:10:c1:87:12:33:3f:0a: +a0:58:2f:5d:b5:e9:b9:c8:72:a0:50:02:89:0d:8b: +a9:99:dc:e4:c6:a4:b8:b4:2d:2d:c4:1c:89:26:06: +62:3d:f3:99:97:58:32:86:bd:d3:81:75:68:35:ab: +f8:3c:50:23:a0:d5:0b:7f:db + """ + q_str = """ +00:86:a5:cc:08:9e:9f:40:ad:c6:d8:ef:52:df:f0: +82:ff:25:59:5c:2b + """ + g_str = """ +51:5d:13:a5:dd:1d:a4:85:7e:e5:d7:42:d0:8c:31: +20:a3:70:75:23:38:d7:53:f8:cf:31:c3:01:97:a5: +ce:fa:70:49:5f:1a:ff:b1:c6:ea:f0:08:e2:bf:b2: +d3:9c:6a:52:8e:0a:f2:1e:db:df:8f:cf:3b:91:fb: +e8:f1:06:d2:4b:79:29:de:fa:b9:b9:2a:88:ba:2e: +c4:ee:bd:e3:82:e5:9a:2e:3e:b5:e8:01:b5:1d:63: +9c:b5:72:54:8e:ab:22:69:b6:70:b2:9d:a1:d3:d8: +07:4f:39:87:11:70:4e:9a:20:11:bb:cc:45:c4:a0: +07:c6:cb:a8:ee:89:c0:6f + """ + + key = DSA.importKey(x509_v3_cert) + for comp_name in ('y', 'p', 'q', 'g'): + comp_str = locals()[comp_name + "_str"] + comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) + self.assertEqual(getattr(key, comp_name), comp) + self.failIf(key.has_private()) + + +if __name__ == '__main__': + unittest.main() + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ImportKeyTests) + tests += list_test_cases(ImportKeyFromX509Cert) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py new file mode 100644 index 0000000..bce8d6f --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py @@ -0,0 +1,1346 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import errno +import warnings +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import bord, tostr, FileNotFoundError +from Crypto.Util.number import bytes_to_long +from Crypto.Hash import SHAKE128 + +from Crypto.PublicKey import ECC + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +class MissingTestVectorException(ValueError): + pass + + +def load_file(file_name, mode="rb"): + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + dir_comps = ("PublicKey", "ECC") + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name, mode) as file_in: + results = file_in.read() + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for ECC", + UserWarning, + stacklevel=2) + + if results is None: + raise MissingTestVectorException("Missing %s" % file_name) + + return results + + +def compact(lines): + ext = b"".join(lines) + return unhexlify(tostr(ext).replace(" ", "").replace(":", "")) + + +def create_ref_keys_p256(): + key_len = 32 + key_lines = load_file("ecc_p256.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:5])) + public_key_xy = compact(key_lines[6:11]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-256", d=private_key_d), + ECC.construct(curve="P-256", point_x=public_key_x, point_y=public_key_y)) + +def create_ref_keys_p384(): + key_len = 48 + key_lines = load_file("ecc_p384.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:6])) + public_key_xy = compact(key_lines[7:14]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-384", d=private_key_d), + ECC.construct(curve="P-384", point_x=public_key_x, point_y=public_key_y)) + +def create_ref_keys_p521(): + key_len = 66 + key_lines = load_file("ecc_p521.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:7])) + public_key_xy = compact(key_lines[8:17]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-521", d=private_key_d), + ECC.construct(curve="P-521", point_x=public_key_x, point_y=public_key_y)) + +# Create reference key pair +# ref_private, ref_public = create_ref_keys_p521() + +def get_fixed_prng(): + return SHAKE128.new().update(b"SEED").read + + +class TestImport(unittest.TestCase): + + def test_empty(self): + self.assertRaises(ValueError, ECC.import_key, b"") + + +class TestImport_P256(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P256, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p256() + + def test_import_public_der(self): + key_file = load_file("ecc_p256_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_der(self): + key_file = load_file("ecc_p256_private.der") + + key = ECC._import_private_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p256_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p256_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p256_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p256_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p256_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_with_ecparams(self): + key_file = load_file("ecc_p256_private_ecparams.pem") + key = ECC.import_key(key_file) + # We just check if the import succeeds + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p256_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p256_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p256_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p256_private_openssh.pem") + key_file_old = load_file("ecc_p256_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p256_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p256_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestImport_P384(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P384, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p384() + + def test_import_public_der(self): + key_file = load_file("ecc_p384_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_der(self): + key_file = load_file("ecc_p384_private.der") + + key = ECC._import_private_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p384_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p384_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p384_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p384_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p384_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p384_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p384_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p384_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p384_private_openssh.pem") + key_file_old = load_file("ecc_p384_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p384_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p384_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestImport_P521(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P521, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p521() + + def test_import_public_der(self): + key_file = load_file("ecc_p521_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_der(self): + key_file = load_file("ecc_p521_private.der") + + key = ECC._import_private_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p521_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p521_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p521_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p521_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p521_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p521_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p521_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p521_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p521_private_openssh.pem") + key_file_old = load_file("ecc_p521_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p521_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p521_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestExport_P256(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P256, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p256() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p256_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p256_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p256_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_der(self): + key_file = load_file("ecc_p256_private.der") + + encoded = self.ref_private._export_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p256_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p256_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p256_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p256_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p256_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p256_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEquals(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEquals(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEquals(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p256_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEquals(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + def test_unsupported_curve(self): + + # openssl ecparam -name secp224r1 -genkey -noout -out strange-curve.pem -conv_form uncompressed + curve = """-----BEGIN EC PRIVATE KEY----- +MGgCAQEEHEi7xTHW+5oT8wgpjoEKV7uwMuY8rt2YUZe4j1SgBwYFK4EEACGhPAM6 +AATJgfOG+Bnki8robpNM8MtArji43GU9up4B0x9sVhqB+fZP+hXgV9ITN7YX4E/k +gVnJp9EBND/tHQ== +-----END EC PRIVATE KEY-----""" + + from Crypto.PublicKey.ECC import UnsupportedEccFeature + try: + ECC.import_key(curve) + except UnsupportedEccFeature as uef: + assert("1.3.132.0.33" in str(uef)) + else: + assert(False) + + def test_compressed_curve(self): + + # Compressed P-256 curve (Y-point is even) + pem1 = """-----BEGIN EC PRIVATE KEY----- + MFcCAQEEIHTuc09jC51xXomV6MVCDN+DpAAvSmaJWZPTEHM6D5H1oAoGCCqGSM49 + AwEHoSQDIgACWFuGbHe8yJ43rir7PMTE9w8vHz0BSpXHq90Xi7/s+a0= + -----END EC PRIVATE KEY-----""" + + # Compressed P-256 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- + MFcCAQEEIFggiPN9SQP+FAPTCPp08fRUz7rHp2qNBRcBJ1DXhb3ZoAoGCCqGSM49 + AwEHoSQDIgADLpph1trTIlVfa8NJvlMUPyWvL+wP+pW3BJITUL/wj9A= + -----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0xA6FC) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0x6E57) + + +class TestExport_P384(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P384, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p384() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p384_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p384_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p384_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_der(self): + key_file = load_file("ecc_p384_private.der") + + encoded = self.ref_private._export_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p384_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p384_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p384_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p384_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p384_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p384_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEquals(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEquals(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEquals(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p384_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEquals(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + def test_compressed_curve(self): + + # Compressed P-384 curve (Y-point is even) + # openssl ecparam -name secp384p1 -genkey -noout -conv_form compressed -out /tmp/a.pem + # openssl ec -in /tmp/a.pem -text -noout + pem1 = """-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDAM0lEIhvXuekK2SWtdbgOcZtBaxa9TxfpO/GcDFZLCJ3JVXaTgwken +QT+C+XLtD6WgBwYFK4EEACKhZANiAATs0kZMhFDu8DoBC21jrSDPyAUn4aXZ/DM4 +ylhDfWmb4LEbeszXceIzfhIUaaGs5y1xXaqf5KXTiAAYx2pKUzAAM9lcGUHCGKJG +k4AgUmVJON29XoUilcFrzjDmuye3B6Q= +-----END EC PRIVATE KEY-----""" + + # Compressed P-384 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDDHPFTslYLltE16fHdSDTtE/2HTmd3M8mqy5MttAm4wZ833KXiGS9oe +kFdx9sNV0KygBwYFK4EEACKhZANiAASLIE5RqVMtNhtBH/u/p/ifqOAlKnK/+RrQ +YC46ZRsnKNayw3wATdPjgja7L/DSII3nZK0G6KOOVwJBznT/e+zudUJYhZKaBLRx +/bgXyxUtYClOXxb1Y/5N7txLstYRyP0= +-----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x07a4) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0xc8fd) + + +class TestExport_P521(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P521, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p521() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p521_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p521_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p521_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_der(self): + key_file = load_file("ecc_p521_private.der") + + encoded = self.ref_private._export_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p521_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p521_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p521_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p521_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p521_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p521_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEquals(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEquals(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEquals(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p521_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEquals(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEquals(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + def test_compressed_curve(self): + + # Compressed P-521 curve (Y-point is even) + # openssl ecparam -name secp521r1 -genkey -noout -conv_form compressed -out /tmp/a.pem + # openssl ec -in /tmp/a.pem -text -noout + pem1 = """-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIAnm1CEjVjvNfXEN730p+D6su5l+mOztdc5XmTEoti+s2R4GQ4mAv3 +0zYLvyklvOHw0+yy8d0cyGEJGb8T3ZVKmg2gBwYFK4EEACOhgYkDgYYABAHzjTI1 +ckxQ3Togi0LAxiG0PucdBBBs5oIy3df95xv6SInp70z+4qQ2EltEmdNMssH8eOrl +M5CYdZ6nbcHMVaJUvQEzTrYxvFjOgJiOd+E9eBWbLkbMNqsh1UKVO6HbMbW0ohCI +uGxO8tM6r3w89/qzpG2SvFM/fvv3mIR30wSZDD84qA== +-----END EC PRIVATE KEY-----""" + + # Compressed P-521 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIB84OfhJluLBRLn3+cC/RQ37C2SfQVP/t0gQK2tCsTf5avRcWYRrOJ +PmX9lNnkC0Hobd75QFRmdxrB0Wd1/M4jZOWgBwYFK4EEACOhgYkDgYYABAAMZcdJ +1YLCGHt3bHCEzdidVy6+brlJIbv1aQ9fPQLF7WKNv4c8w3H8d5a2+SDZilBOsk5c +6cNJDMz2ExWQvxl4CwDJtJGt1+LHVKFGy73NANqVxMbRu+2F8lOxkNp/ziFTbVyV +vv6oYkMIIi7r5oQWAiQDrR2mlrrFDL9V7GH/r8SWQw== +-----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x38a8) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0x9643) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestImport) + try: + tests += list_test_cases(TestImport_P256) + tests += list_test_cases(TestImport_P384) + tests += list_test_cases(TestImport_P521) + tests += list_test_cases(TestExport_P256) + tests += list_test_cases(TestExport_P384) + tests += list_test_cases(TestExport_P521) + except MissingTestVectorException: + pass + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py new file mode 100644 index 0000000..a2963c6 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import os +import re +import errno +import warnings +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import a2b_hex, list_test_cases +from Crypto.Util.py3compat import b, tostr, FileNotFoundError +from Crypto.Util.number import inverse +from Crypto.Util import asn1 + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +def load_file(file_name, mode="rb"): + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + dir_comps = ("PublicKey", "RSA") + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name, mode) as file_in: + results = file_in.read() + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for RSA", + UserWarning, + stacklevel=2) + + return results + + +def der2pem(der, text='PUBLIC'): + import binascii + chunks = [binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48)] + pem = b('-----BEGIN %s KEY-----\n' % text) + pem += b('').join(chunks) + pem += b('-----END %s KEY-----' % text) + return pem + + +class ImportKeyTests(unittest.TestCase): + # 512-bit RSA key generated with openssl + rsaKeyPEM = u'''-----BEGIN RSA PRIVATE KEY----- +MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII +q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 +Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI +OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr ++rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK +JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 +n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== +-----END RSA PRIVATE KEY-----''' + + # As above, but this is actually an unencrypted PKCS#8 key + rsaKeyPEM8 = u'''-----BEGIN PRIVATE KEY----- +MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS +ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj +wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK +5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk +B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC +NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx +yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 +BX85JB8zqwHB +-----END PRIVATE KEY-----''' + + # The same RSA private key as in rsaKeyPEM, but now encrypted + rsaKeyEncryptedPEM = ( + + # PEM encryption + # With DES and passphrase 'test' + ('test', u'''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-CBC,AF8F9A40BD2FA2FC + +Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA +u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs +C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP +BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy +9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY +IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp +dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= +-----END RSA PRIVATE KEY-----'''), + + # PKCS8 encryption + ('winter', u'''-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIBpjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIeZIsbW3O+JcCAggA +MBQGCCqGSIb3DQMHBAgSM2p0D8FilgSCAWBhFyP2tiGKVpGj3mO8qIBzinU60ApR +3unvP+N6j7LVgnV2lFGaXbJ6a1PbQXe+2D6DUyBLo8EMXrKKVLqOMGkFMHc0UaV6 +R6MmrsRDrbOqdpTuVRW+NVd5J9kQQh4xnfU/QrcPPt7vpJvSf4GzG0n666Ki50OV +M/feuVlIiyGXY6UWdVDpcOV72cq02eNUs/1JWdh2uEBvA9fCL0c07RnMrdT+CbJQ +NjJ7f8ULtp7xvR9O3Al/yJ4Wv3i4VxF1f3MCXzhlUD4I0ONlr0kJWgeQ80q/cWhw +ntvgJwnCn2XR1h6LA8Wp+0ghDTsL2NhJpWd78zClGhyU4r3hqu1XDjoXa7YCXCix +jCV15+ViDJzlNCwg+W6lRg18sSLkCT7alviIE0U5tHc6UPbbHwT5QqAxAABaP+nZ +CGqJGyiwBzrKebjgSm/KRd4C91XqcsysyH2kKPfT51MLAoD4xelOURBP +-----END ENCRYPTED PRIVATE KEY-----''' + ), + ) + + rsaPublicKeyPEM = u'''-----BEGIN PUBLIC KEY----- +MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T +Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== +-----END PUBLIC KEY-----''' + + # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' + rsaPublicKeyOpenSSH = b('''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''') + + # The private key, in PKCS#1 format encoded with DER + rsaKeyDER = a2b_hex( + '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe + 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 + 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 + 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c + 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f + 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 + 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 + a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca + 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b + c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 + e4defe43ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + # The private key, in unencrypted PKCS#8 format encoded with DER + rsaKeyDER8 = a2b_hex( + '''30820155020100300d06092a864886f70d01010105000482013f3082013 + b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 + ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 + 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 + 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb + f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da + 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c + a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 + 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a + f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf + 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 + 3ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + rsaPublicKeyDER = a2b_hex( + '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a + a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c + b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 + 03010001 + '''.replace(" ","")) + + n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) + e = 65537 + d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) + p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) + q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) + + # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} + # mod q) instead! + qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) + pInv = inverse(p,q) + + def testImportKey1(self): + """Verify import of RSAPrivateKey DER SEQUENCE""" + key = RSA.importKey(self.rsaKeyDER) + self.failUnless(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey2(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" + key = RSA.importKey(self.rsaPublicKeyDER) + self.failIf(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey3unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaKeyPEM) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey3bytes(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaKeyPEM)) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey4unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaPublicKeyPEM) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey4bytes(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaPublicKeyPEM)) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey5(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyPEM) + idem = key._encrypt(key._decrypt(89)) + self.assertEqual(idem, 89) + + def testImportKey6(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyDER) + idem = key._encrypt(key._decrypt(65)) + self.assertEqual(idem, 65) + + def testImportKey7(self): + """Verify import of OpenSSH public key""" + key = RSA.importKey(self.rsaPublicKeyOpenSSH) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey8(self): + """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" + for t in self.rsaKeyEncryptedPEM: + key = RSA.importKey(t[1], t[0]) + self.failUnless(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey9(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" + key = RSA.importKey(self.rsaKeyDER8) + self.failUnless(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey10(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" + key = RSA.importKey(self.rsaKeyPEM8) + self.failUnless(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey11(self): + """Verify import of RSAPublicKey DER SEQUENCE""" + der = asn1.DerSequence([17, 3]).encode() + key = RSA.importKey(der) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def testImportKey12(self): + """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" + der = asn1.DerSequence([17, 3]).encode() + pem = der2pem(der) + key = RSA.importKey(pem) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def test_import_key_windows_cr_lf(self): + pem_cr_lf = "\r\n".join(self.rsaKeyPEM.splitlines()) + key = RSA.importKey(pem_cr_lf) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def test_import_empty(self): + self.assertRaises(ValueError, RSA.import_key, b"") + + ### + def testExportKey1(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.export_key("DER") + self.assertEqual(derKey, self.rsaKeyDER) + + def testExportKey2(self): + key = RSA.construct([self.n, self.e]) + derKey = key.export_key("DER") + self.assertEqual(derKey, self.rsaPublicKeyDER) + + def testExportKey3(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.export_key("PEM") + self.assertEqual(pemKey, b(self.rsaKeyPEM)) + + def testExportKey4(self): + key = RSA.construct([self.n, self.e]) + pemKey = key.export_key("PEM") + self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) + + def testExportKey5(self): + key = RSA.construct([self.n, self.e]) + openssh_1 = key.export_key("OpenSSH").split() + openssh_2 = self.rsaPublicKeyOpenSSH.split() + self.assertEqual(openssh_1[0], openssh_2[0]) + self.assertEqual(openssh_1[1], openssh_2[1]) + + def testExportKey7(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.export_key("DER", pkcs=8) + self.assertEqual(derKey, self.rsaKeyDER8) + + def testExportKey8(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.export_key("PEM", pkcs=8) + self.assertEqual(pemKey, b(self.rsaKeyPEM8)) + + def testExportKey9(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertRaises(ValueError, key.export_key, "invalid-format") + + def testExportKey10(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#1, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test') + self.failUnless(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.failUnless(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey11(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#1, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=1) + self.failUnless(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.failUnless(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey12(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#8, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=8) + self.failUnless(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.failUnless(tostr(outkey).find('BEGIN PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey13(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#8, PKCS#8 encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=8, + protection='PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') + self.failUnless(tostr(outkey).find('4,ENCRYPTED')==-1) + self.failUnless(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey14(self): + # Export and re-import the encrypted key. It must match. + # DER envelope, PKCS#8, PKCS#8 encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('DER', 'test', pkcs=8) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey15(self): + # Verify that that error an condition is detected when trying to + # use a password with DER encoding and PKCS#1. + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertRaises(ValueError, key.export_key, 'DER', 'test', 1) + + def test_import_key(self): + """Verify that import_key is an alias to importKey""" + key = RSA.import_key(self.rsaPublicKeyDER) + self.failIf(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def test_exportKey(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertEqual(key.export_key(), key.exportKey()) + + +class ImportKeyFromX509Cert(unittest.TestCase): + + def test_x509v1(self): + + # Sample V1 certificate with a 1024 bit RSA key + x509_v1_cert = """ +-----BEGIN CERTIFICATE----- +MIICOjCCAaMCAQEwDQYJKoZIhvcNAQEEBQAwfjENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT +Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG +A1UEAxMEdGVzdDAeFw0xNDA3MTExOTU3MjRaFw0xNzA0MDYxOTU3MjRaME0xCzAJ +BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxDzANBgNVBAMTBmxhdHZpYTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEAyG+kytdRj3TFbRmHDYp3TXugVQ81chew0qeOxZWOz80IjtWpgdOaCvKW +NCuc8wUR9BWrEQW+39SaRMLiQfQtyFSQZijc3nsEBu/Lo4uWZ0W/FHDRVSvkJA/V +Ex5NL5ikI+wbUeCV5KajGNDalZ8F1pk32+CBs8h1xNx5DyxuEHUCAwEAATANBgkq +hkiG9w0BAQQFAAOBgQCVQF9Y//Q4Psy+umEM38pIlbZ2hxC5xNz/MbVPwuCkNcGn +KYNpQJP+JyVTsPpO8RLZsAQDzRueMI3S7fbbwTzAflN0z19wvblvu93xkaBytVok +9VBAH28olVhy9b1MMeg2WOt5sUEQaFNPnwwsyiY9+HsRpvpRnPSQF+kyYVsshQ== +-----END CERTIFICATE----- + """.strip() + + # RSA public key as dumped by openssl + exponent = 65537 + modulus_str = """ +00:c8:6f:a4:ca:d7:51:8f:74:c5:6d:19:87:0d:8a: +77:4d:7b:a0:55:0f:35:72:17:b0:d2:a7:8e:c5:95: +8e:cf:cd:08:8e:d5:a9:81:d3:9a:0a:f2:96:34:2b: +9c:f3:05:11:f4:15:ab:11:05:be:df:d4:9a:44:c2: +e2:41:f4:2d:c8:54:90:66:28:dc:de:7b:04:06:ef: +cb:a3:8b:96:67:45:bf:14:70:d1:55:2b:e4:24:0f: +d5:13:1e:4d:2f:98:a4:23:ec:1b:51:e0:95:e4:a6: +a3:18:d0:da:95:9f:05:d6:99:37:db:e0:81:b3:c8: +75:c4:dc:79:0f:2c:6e:10:75 + """ + modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) + + key = RSA.importKey(x509_v1_cert) + self.assertEqual(key.e, exponent) + self.assertEqual(key.n, modulus) + self.failIf(key.has_private()) + + def test_x509v3(self): + + # Sample V3 certificate with a 1024 bit RSA key + x509_v3_cert = """ +-----BEGIN CERTIFICATE----- +MIIEcjCCAlqgAwIBAgIBATANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL +MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD +QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTIwOTM1 +MTJaFw0xNzA0MDcwOTM1MTJaMEQxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES +MBAGA1UEBwwJQmFsdGltb3JlMRQwEgYDVQQDDAtUZXN0IFNlcnZlcjCBnzANBgkq +hkiG9w0BAQEFAAOBjQAwgYkCgYEA/S7GJV2OcFdyNMQ4K75KrYFtMEn3VnEFdPHa +jyS37XlMxSh0oS4GeTGVUCJInl5Cpsv8WQdh03FfeOdvzp5IZ46OcjeOPiWnmjgl +2G5j7e2bDH7RSchGV+OD6Fb1Agvuu2/9iy8fdf3rPQ/7eAddzKUrzwacVbnW+tg2 +QtSXKRcCAwEAAaOB1TCB0jAdBgNVHQ4EFgQU/WwCX7FfWMIPDFfJ+I8a2COG+l8w +HwYDVR0jBBgwFoAUa0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNV +HQ8EBAMCBeAwSgYDVR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNv +bYIQbWFpbC5leGFtcGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIB +DQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsF +AAOCAgEAvO6xfdsGbnoK4My3eJthodTAjMjPwFVY133LH04QLcCv54TxKhtUg1fi +PgdjVe1HpTytPBfXy2bSZbXAN0abZCtw1rYrnn7o1g2pN8iypVq3zVn0iMTzQzxs +zEPO3bpR/UhNSf90PmCsS5rqZpAAnXSaAy1ClwHWk/0eG2pYkhE1m1ABVMN2lsAW +e9WxGk6IFqaI9O37NYQwmEypMs4DC+ECJEvbPFiqi3n0gbXCZJJ6omDA5xJldaYK +Oa7KR3s/qjBsu9UAiWpLBuFoSTHIF2aeRKRFmUdmzwo43eVPep65pY6eQ4AdL2RF +rqEuINbGlzI5oQyYhu71IwB+iPZXaZZPlwjLgOsuad/p2hOgDb5WxUi8FnDPursQ +ujfpIpmrOP/zpvvQWnwePI3lI+5n41kTBSbefXEdv6rXpHk3QRzB90uPxnXPdxSC +16ASA8bQT5an/1AgoE3k9CrcD2K0EmgaX0YI0HUhkyzbkg34EhpWJ6vvRUbRiNRo +9cIbt/ya9Y9u0Ja8GLXv6dwX0l0IdJMkL8KifXUFAVCujp1FBrr/gdmwQn8itANy ++qbnWSxmOvtaY0zcaFAcONuHva0h51/WqXOMO1eb8PhR4HIIYU8p1oBwQp7dSni8 +THDi1F+GG5PsymMDj5cWK42f+QzjVw5PrVmFqqrrEoMlx8DWh5Y= +-----END CERTIFICATE----- +""".strip() + + # RSA public key as dumped by openssl + exponent = 65537 + modulus_str = """ +00:fd:2e:c6:25:5d:8e:70:57:72:34:c4:38:2b:be: +4a:ad:81:6d:30:49:f7:56:71:05:74:f1:da:8f:24: +b7:ed:79:4c:c5:28:74:a1:2e:06:79:31:95:50:22: +48:9e:5e:42:a6:cb:fc:59:07:61:d3:71:5f:78:e7: +6f:ce:9e:48:67:8e:8e:72:37:8e:3e:25:a7:9a:38: +25:d8:6e:63:ed:ed:9b:0c:7e:d1:49:c8:46:57:e3: +83:e8:56:f5:02:0b:ee:bb:6f:fd:8b:2f:1f:75:fd: +eb:3d:0f:fb:78:07:5d:cc:a5:2b:cf:06:9c:55:b9: +d6:fa:d8:36:42:d4:97:29:17 + """ + modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) + + key = RSA.importKey(x509_v3_cert) + self.assertEqual(key.e, exponent) + self.assertEqual(key.n, modulus) + self.failIf(key.has_private()) + + +class TestImport_2048(unittest.TestCase): + + def test_import_openssh_public(self): + key_file_ref = load_file("rsa2048_private.pem") + key_file = load_file("rsa2048_public_openssh.txt") + + # Skip test if test vectors are not installed + if None in (key_file_ref, key_file): + return + + key_ref = RSA.import_key(key_file_ref).public_key() + key = RSA.import_key(key_file) + self.assertEqual(key_ref, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("rsa2048_private_openssh.pem") + key_file_old = load_file("rsa2048_private_openssh_old.pem") + + # Skip test if test vectors are not installed + if None in (key_file_old, key_file): + return + + key = RSA.import_key(key_file) + key_old = RSA.import_key(key_file_old) + + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("rsa2048_private_openssh_pwd.pem") + key_file_old = load_file("rsa2048_private_openssh_pwd_old.pem") + + # Skip test if test vectors are not installed + if None in (key_file_old, key_file): + return + + key = RSA.import_key(key_file, b"password") + key_old = RSA.import_key(key_file_old) + self.assertEqual(key, key_old) + + +if __name__ == '__main__': + unittest.main() + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ImportKeyTests) + tests += list_test_cases(ImportKeyFromX509Cert) + tests += list_test_cases(TestImport_2048) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Random/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Random/__init__.py new file mode 100644 index 0000000..53061cc --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Random/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/__init__.py: Self-test for random number generation modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for random number generators""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Random/test_random.py b/env/Lib/site-packages/Crypto/SelfTest/Random/test_random.py new file mode 100644 index 0000000..8fadc53 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Random/test_random.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.new()""" + +import sys +import unittest +from Crypto.Util.py3compat import b + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.new()""" + # Import the Random module and try to use it + from Crypto import Random + randobj = Random.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + z = Random.get_random_bytes(16) + self.assertNotEqual(x, z) + self.assertNotEqual(y, z) + # Test the Random.random module, which + # implements a subset of Python's random API + # Not implemented: + # seed(), getstate(), setstate(), jumpahead() + # random(), uniform(), triangular(), betavariate() + # expovariate(), gammavariate(), gauss(), + # longnormvariate(), normalvariate(), + # vonmisesvariate(), paretovariate() + # weibullvariate() + # WichmannHill(), whseed(), SystemRandom() + from Crypto.Random import random + x = random.getrandbits(16*8) + y = random.getrandbits(16*8) + self.assertNotEqual(x, y) + # Test randrange + if x>y: + start = y + stop = x + else: + start = x + stop = y + for step in range(1,10): + x = random.randrange(start,stop,step) + y = random.randrange(start,stop,step) + self.assertNotEqual(x, y) + self.assertEqual(start <= x < stop, True) + self.assertEqual(start <= y < stop, True) + self.assertEqual((x - start) % step, 0) + self.assertEqual((y - start) % step, 0) + for i in range(10): + self.assertEqual(random.randrange(1,2), 1) + self.assertRaises(ValueError, random.randrange, start, start) + self.assertRaises(ValueError, random.randrange, stop, start, step) + self.assertRaises(TypeError, random.randrange, start, stop, step, step) + self.assertRaises(TypeError, random.randrange, start, stop, "1") + self.assertRaises(TypeError, random.randrange, "1", stop, step) + self.assertRaises(TypeError, random.randrange, 1, "2", step) + self.assertRaises(ValueError, random.randrange, start, stop, 0) + # Test randint + x = random.randint(start,stop) + y = random.randint(start,stop) + self.assertNotEqual(x, y) + self.assertEqual(start <= x <= stop, True) + self.assertEqual(start <= y <= stop, True) + for i in range(10): + self.assertEqual(random.randint(1,1), 1) + self.assertRaises(ValueError, random.randint, stop, start) + self.assertRaises(TypeError, random.randint, start, stop, step) + self.assertRaises(TypeError, random.randint, "1", stop) + self.assertRaises(TypeError, random.randint, 1, "2") + # Test choice + seq = range(10000) + x = random.choice(seq) + y = random.choice(seq) + self.assertNotEqual(x, y) + self.assertEqual(x in seq, True) + self.assertEqual(y in seq, True) + for i in range(10): + self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) + self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) + if sys.version_info[0] == 3: + self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) + self.assertEqual(1, random.choice([1])) + self.assertRaises(IndexError, random.choice, []) + self.assertRaises(TypeError, random.choice, 1) + # Test shuffle. Lacks random parameter to specify function. + # Make copies of seq + seq = range(500) + x = list(seq) + y = list(seq) + random.shuffle(x) + random.shuffle(y) + self.assertNotEqual(x, y) + self.assertEqual(len(seq), len(x)) + self.assertEqual(len(seq), len(y)) + for i in range(len(seq)): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + self.assertEqual(seq[i] in x, True) + self.assertEqual(seq[i] in y, True) + z = [1] + random.shuffle(z) + self.assertEqual(z, [1]) + if sys.version_info[0] == 3: + z = bytearray(b('12')) + random.shuffle(z) + self.assertEqual(b('1') in z, True) + self.assertRaises(TypeError, random.shuffle, b('12')) + self.assertRaises(TypeError, random.shuffle, 1) + self.assertRaises(TypeError, random.shuffle, "11") + self.assertRaises(TypeError, random.shuffle, (1,2)) + # 2to3 wraps a list() around it, alas - but I want to shoot + # myself in the foot here! :D + # if sys.version_info[0] == 3: + # self.assertRaises(TypeError, random.shuffle, range(3)) + # Test sample + x = random.sample(seq, 20) + y = random.sample(seq, 20) + self.assertNotEqual(x, y) + for i in range(20): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + z = random.sample([1], 1) + self.assertEqual(z, [1]) + z = random.sample((1,2,3), 1) + self.assertEqual(z[0] in (1,2,3), True) + z = random.sample("123", 1) + self.assertEqual(z[0] in "123", True) + z = random.sample(range(3), 1) + self.assertEqual(z[0] in range(3), True) + if sys.version_info[0] == 3: + z = random.sample(b("123"), 1) + self.assertEqual(z[0] in b("123"), True) + z = random.sample(bytearray(b("123")), 1) + self.assertEqual(z[0] in bytearray(b("123")), True) + self.assertRaises(TypeError, random.sample, 1) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py new file mode 100644 index 0000000..88c7f34 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/__init__.py: Self-test for signature modules +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for signature modules""" + +import os + +def get_tests(config={}): + tests = [] + from . import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from . import test_pss; tests += test_pss.get_tests(config=config) + from . import test_dss; tests += test_dss.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Signature/test_dss.py b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_dss.py new file mode 100644 index 0000000..1d23e09 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_dss.py @@ -0,0 +1,1132 @@ +# +# SelfTest/Signature/test_dss.py: Self-test for DSS signatures +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import re +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import tobytes, bord, bchr + +from Crypto.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, SHA3_256, + SHA3_384, SHA3_512) +from Crypto.Signature import DSS +from Crypto.PublicKey import DSA, ECC +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof +from Crypto.Util.number import bytes_to_long, long_to_bytes + + +def t2b(hexstring): + ws = hexstring.replace(" ", "").replace("\n", "") + return unhexlify(tobytes(ws)) + + +def t2l(hexstring): + ws = hexstring.replace(" ", "").replace("\n", "") + return int(ws, 16) + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class StrRNG: + + def __init__(self, randomness): + length = len(randomness) + self._idx = 0 + # Fix required to get the right K (see how randint() works!) + self._randomness = long_to_bytes(bytes_to_long(randomness) - 1, length) + + def __call__(self, n): + out = self._randomness[self._idx:self._idx + n] + self._idx += n + return out + + +class FIPS_DSA_Tests(unittest.TestCase): + + # 1st 1024 bit key from SigGen.txt + P = 0xa8f9cd201e5e35d892f85f80e4db2599a5676a3b1d4f190330ed3256b26d0e80a0e49a8fffaaad2a24f472d2573241d4d6d6c7480c80b4c67bb4479c15ada7ea8424d2502fa01472e760241713dab025ae1b02e1703a1435f62ddf4ee4c1b664066eb22f2e3bf28bb70a2a76e4fd5ebe2d1229681b5b06439ac9c7e9d8bde283 + Q = 0xf85f0f83ac4df7ea0cdf8f469bfeeaea14156495 + G = 0x2b3152ff6c62f14622b8f48e59f8af46883b38e79b8c74deeae9df131f8b856e3ad6c8455dab87cc0da8ac973417ce4f7878557d6cdf40b35b4a0ca3eb310c6a95d68ce284ad4e25ea28591611ee08b8444bd64b25f3f7c572410ddfb39cc728b9c936f85f419129869929cdb909a6a3a99bbe089216368171bd0ba81de4fe33 + X = 0xc53eae6d45323164c7d07af5715703744a63fc3a + Y = 0x313fd9ebca91574e1c2eebe1517c57e0c21b0209872140c5328761bbb2450b33f1b18b409ce9ab7c4cd8fda3391e8e34868357c199e16a6b2eba06d6749def791d79e95d3a4d09b24c392ad89dbf100995ae19c01062056bb14bce005e8731efde175f95b975089bdcdaea562b32786d96f5a31aedf75364008ad4fffebb970b + + key_pub = DSA.construct((Y, G, P, Q)) + key_priv = DSA.construct((Y, G, P, Q, X)) + + def shortDescription(self): + return "FIPS DSA Tests" + + def test_loopback(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv, 'fips-186-3') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub, 'fips-186-3') + verifier.verify(hashed_msg, signature) + + def test_negative_unapproved_hashes(self): + """Verify that unapproved hashes are rejected""" + + from Crypto.Hash import RIPEMD160 + + self.description = "Unapproved hash (RIPEMD160) test" + hash_obj = RIPEMD160.new() + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertRaises(ValueError, signer.sign, hash_obj) + self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) + + def test_negative_unknown_modes_encodings(self): + """Verify that unknown modes/encodings are rejected""" + + self.description = "Unknown mode test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') + + self.description = "Unknown encoding test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') + + def test_asn1_encoding(self): + """Verify ASN.1 encoding""" + + self.description = "ASN.1 encoding test" + hash_obj = SHA1.new() + signer = DSS.new(self.key_priv, 'fips-186-3', 'der') + signature = signer.sign(hash_obj) + + # Verify that output looks like a DER SEQUENCE + self.assertEqual(bord(signature[0]), 48) + signer.verify(hash_obj, signature) + + # Verify that ASN.1 parsing fails as expected + signature = bchr(7) + signature[1:] + self.assertRaises(ValueError, signer.verify, hash_obj, signature) + + def test_sign_verify(self): + """Verify public/private method""" + + self.description = "can_sign() test" + signer = DSS.new(self.key_priv, 'fips-186-3') + self.failUnless(signer.can_sign()) + + signer = DSS.new(self.key_pub, 'fips-186-3') + self.failIf(signer.can_sign()) + + +class FIPS_DSA_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "DSA"), + "FIPS_186_3_SigVer.rsp", + "Signature Verification 186-3", + {'result': lambda x: x}) or [] + +for idx, tv in enumerate(test_vectors_verify): + + if isinstance(tv, str): + res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) + assert(res) + hash_name = res.group(3).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + if hasattr(tv, "p"): + modulus = tv.p + generator = tv.g + suborder = tv.q + continue + + hash_obj = hash_module.new(tv.msg) + + comps = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder)] + key = DSA.construct(comps, False) # type: ignore + verifier = DSS.new(key, 'fips-186-3') + + def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'p': + setattr(FIPS_DSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) + else: + setattr(FIPS_DSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) + + +test_vectors_sign = load_test_vectors(("Signature", "DSA"), + "FIPS_186_3_SigGen.txt", + "Signature Creation 186-3", + {}) or [] + +for idx, tv in enumerate(test_vectors_sign): + + if isinstance(tv, str): + res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) + assert(res) + hash_name = res.group(3).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + if hasattr(tv, "p"): + modulus = tv.p + generator = tv.g + suborder = tv.q + continue + + hash_obj = hash_module.new(tv.msg) + comps_dsa = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder, tv.x)] + key = DSA.construct(comps_dsa, False) # type: ignore + signer = DSS.new(key, 'fips-186-3', randfunc=StrRNG(tv.k)) + + def new_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertEqual(signer.sign(hash_obj), signature) + setattr(FIPS_DSA_Tests_KAT, "test_sign_%d" % idx, new_test) + + +class FIPS_ECDSA_Tests(unittest.TestCase): + + key_priv = ECC.generate(curve="P-256") + key_pub = key_priv.public_key() + + def shortDescription(self): + return "FIPS ECDSA Tests" + + def test_loopback(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv, 'fips-186-3') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub, 'fips-186-3') + verifier.verify(hashed_msg, signature) + + def test_negative_unapproved_hashes(self): + """Verify that unapproved hashes are rejected""" + + from Crypto.Hash import SHA1 + + self.description = "Unapproved hash (SHA-1) test" + hash_obj = SHA1.new() + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertRaises(ValueError, signer.sign, hash_obj) + self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) + + def test_sign_verify(self): + """Verify public/private method""" + + self.description = "can_sign() test" + signer = DSS.new(self.key_priv, 'fips-186-3') + self.failUnless(signer.can_sign()) + + signer = DSS.new(self.key_pub, 'fips-186-3') + self.failIf(signer.can_sign()) + + def test_negative_unknown_modes_encodings(self): + """Verify that unknown modes/encodings are rejected""" + + self.description = "Unknown mode test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') + + self.description = "Unknown encoding test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') + + def test_asn1_encoding(self): + """Verify ASN.1 encoding""" + + self.description = "ASN.1 encoding test" + hash_obj = SHA256.new() + signer = DSS.new(self.key_priv, 'fips-186-3', 'der') + signature = signer.sign(hash_obj) + + # Verify that output looks like a DER SEQUENCE + self.assertEqual(bord(signature[0]), 48) + signer.verify(hash_obj, signature) + + # Verify that ASN.1 parsing fails as expected + signature = bchr(7) + signature[1:] + self.assertRaises(ValueError, signer.verify, hash_obj, signature) + + +class FIPS_ECDSA_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "ECDSA"), + "SigVer.rsp", + "ECDSA Signature Verification 186-3", + {'result': lambda x: x, + 'qx': lambda x: int(x, 16), + 'qy': lambda x: int(x, 16), + }) or [] + +for idx, tv in enumerate(test_vectors_verify): + + if isinstance(tv, str): + res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) + assert res + curve_name = res.group(1) + hash_name = res.group(2).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + hash_obj = hash_module.new(tv.msg) + ecc_key = ECC.construct(curve=curve_name, point_x=tv.qx, point_y=tv.qy) + verifier = DSS.new(ecc_key, 'fips-186-3') + + def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result.startswith('p'): + setattr(FIPS_ECDSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) + else: + setattr(FIPS_ECDSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) + + +test_vectors_sign = load_test_vectors(("Signature", "ECDSA"), + "SigGen.txt", + "ECDSA Signature Verification 186-3", + {'d': lambda x: int(x, 16)}) or [] + +for idx, tv in enumerate(test_vectors_sign): + + if isinstance(tv, str): + res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) + assert res + curve_name = res.group(1) + hash_name = res.group(2).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + hash_obj = hash_module.new(tv.msg) + ecc_key = ECC.construct(curve=curve_name, d=tv.d) + signer = DSS.new(ecc_key, 'fips-186-3', randfunc=StrRNG(tv.k)) + + def sign_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertEqual(signer.sign(hash_obj), signature) + setattr(FIPS_ECDSA_Tests_KAT, "test_sign_%d" % idx, sign_test) + + +class Det_DSA_Tests(unittest.TestCase): + """Tests from rfc6979""" + + # Each key is (p, q, g, x, y, desc) + keys = [ + ( + """ + 86F5CA03DCFEB225063FF830A0C769B9DD9D6153AD91D7CE27F787C43278B447 + E6533B86B18BED6E8A48B784A14C252C5BE0DBF60B86D6385BD2F12FB763ED88 + 73ABFD3F5BA2E0A8C0A59082EAC056935E529DAF7C610467899C77ADEDFC846C + 881870B7B19B2B58F9BE0521A17002E3BDD6B86685EE90B3D9A1B02B782B1779""", + "996F967F6C8E388D9E28D01E205FBA957A5698B1", + """ + 07B0F92546150B62514BB771E2A0C0CE387F03BDA6C56B505209FF25FD3C133D + 89BBCD97E904E09114D9A7DEFDEADFC9078EA544D2E401AEECC40BB9FBBF78FD + 87995A10A1C27CB7789B594BA7EFB5C4326A9FE59A070E136DB77175464ADCA4 + 17BE5DCE2F40D10A46A3A3943F26AB7FD9C0398FF8C76EE0A56826A8A88F1DBD""", + "411602CB19A6CCC34494D79D98EF1E7ED5AF25F7", + """ + 5DF5E01DED31D0297E274E1691C192FE5868FEF9E19A84776454B100CF16F653 + 92195A38B90523E2542EE61871C0440CB87C322FC4B4D2EC5E1E7EC766E1BE8D + 4CE935437DC11C3C8FD426338933EBFE739CB3465F4D3668C5E473508253B1E6 + 82F65CBDC4FAE93C2EA212390E54905A86E2223170B44EAA7DA5DD9FFCFB7F3B""", + "DSA1024" + ), + ( + """ + 9DB6FB5951B66BB6FE1E140F1D2CE5502374161FD6538DF1648218642F0B5C48 + C8F7A41AADFA187324B87674FA1822B00F1ECF8136943D7C55757264E5A1A44F + FE012E9936E00C1D3E9310B01C7D179805D3058B2A9F4BB6F9716BFE6117C6B5 + B3CC4D9BE341104AD4A80AD6C94E005F4B993E14F091EB51743BF33050C38DE2 + 35567E1B34C3D6A5C0CEAA1A0F368213C3D19843D0B4B09DCB9FC72D39C8DE41 + F1BF14D4BB4563CA28371621CAD3324B6A2D392145BEBFAC748805236F5CA2FE + 92B871CD8F9C36D3292B5509CA8CAA77A2ADFC7BFD77DDA6F71125A7456FEA15 + 3E433256A2261C6A06ED3693797E7995FAD5AABBCFBE3EDA2741E375404AE25B""", + "F2C3119374CE76C9356990B465374A17F23F9ED35089BD969F61C6DDE9998C1F", + """ + 5C7FF6B06F8F143FE8288433493E4769C4D988ACE5BE25A0E24809670716C613 + D7B0CEE6932F8FAA7C44D2CB24523DA53FBE4F6EC3595892D1AA58C4328A06C4 + 6A15662E7EAA703A1DECF8BBB2D05DBE2EB956C142A338661D10461C0D135472 + 085057F3494309FFA73C611F78B32ADBB5740C361C9F35BE90997DB2014E2EF5 + AA61782F52ABEB8BD6432C4DD097BC5423B285DAFB60DC364E8161F4A2A35ACA + 3A10B1C4D203CC76A470A33AFDCBDD92959859ABD8B56E1725252D78EAC66E71 + BA9AE3F1DD2487199874393CD4D832186800654760E1E34C09E4D155179F9EC0 + DC4473F996BDCE6EED1CABED8B6F116F7AD9CF505DF0F998E34AB27514B0FFE7""", + "69C7548C21D0DFEA6B9A51C9EAD4E27C33D3B3F180316E5BCAB92C933F0E4DBC", + """ + 667098C654426C78D7F8201EAC6C203EF030D43605032C2F1FA937E5237DBD94 + 9F34A0A2564FE126DC8B715C5141802CE0979C8246463C40E6B6BDAA2513FA61 + 1728716C2E4FD53BC95B89E69949D96512E873B9C8F8DFD499CC312882561ADE + CB31F658E934C0C197F2C4D96B05CBAD67381E7B768891E4DA3843D24D94CDFB + 5126E9B8BF21E8358EE0E0A30EF13FD6A664C0DCE3731F7FB49A4845A4FD8254 + 687972A2D382599C9BAC4E0ED7998193078913032558134976410B89D2C171D1 + 23AC35FD977219597AA7D15C1A9A428E59194F75C721EBCBCFAE44696A499AFA + 74E04299F132026601638CB87AB79190D4A0986315DA8EEC6561C938996BEADF""", + "DSA2048" + ), + ] + + # This is a sequence of items: + # message, k, r, s, hash module + signatures = [ + ( + "sample", + "7BDB6B0FF756E1BB5D53583EF979082F9AD5BD5B", + "2E1A0C2562B2912CAAF89186FB0F42001585DA55", + "29EFB6B0AFF2D7A68EB70CA313022253B9A88DF5", + SHA1, + 'DSA1024' + ), + ( + "sample", + "562097C06782D60C3037BA7BE104774344687649", + "4BC3B686AEA70145856814A6F1BB53346F02101E", + "410697B92295D994D21EDD2F4ADA85566F6F94C1", + SHA224, + 'DSA1024' + ), + ( + "sample", + "519BA0546D0C39202A7D34D7DFA5E760B318BCFB", + "81F2F5850BE5BC123C43F71A3033E9384611C545", + "4CDD914B65EB6C66A8AAAD27299BEE6B035F5E89", + SHA256, + 'DSA1024' + ), + ( + "sample", + "95897CD7BBB944AA932DBC579C1C09EB6FCFC595", + "07F2108557EE0E3921BC1774F1CA9B410B4CE65A", + "54DF70456C86FAC10FAB47C1949AB83F2C6F7595", + SHA384, + 'DSA1024' + ), + ( + "sample", + "09ECE7CA27D0F5A4DD4E556C9DF1D21D28104F8B", + "16C3491F9B8C3FBBDD5E7A7B667057F0D8EE8E1B", + "02C36A127A7B89EDBB72E4FFBC71DABC7D4FC69C", + SHA512, + 'DSA1024' + ), + ( + "test", + "5C842DF4F9E344EE09F056838B42C7A17F4A6433", + "42AB2052FD43E123F0607F115052A67DCD9C5C77", + "183916B0230D45B9931491D4C6B0BD2FB4AAF088", + SHA1, + 'DSA1024' + ), + ( + "test", + "4598B8EFC1A53BC8AECD58D1ABBB0C0C71E67297", + "6868E9964E36C1689F6037F91F28D5F2C30610F2", + "49CEC3ACDC83018C5BD2674ECAAD35B8CD22940F", + SHA224, + 'DSA1024' + ), + ( + "test", + "5A67592E8128E03A417B0484410FB72C0B630E1A", + "22518C127299B0F6FDC9872B282B9E70D0790812", + "6837EC18F150D55DE95B5E29BE7AF5D01E4FE160", + SHA256, + 'DSA1024' + ), + ( + "test", + "220156B761F6CA5E6C9F1B9CF9C24BE25F98CD89", + "854CF929B58D73C3CBFDC421E8D5430CD6DB5E66", + "91D0E0F53E22F898D158380676A871A157CDA622", + SHA384, + 'DSA1024' + ), + ( + "test", + "65D2C2EEB175E370F28C75BFCDC028D22C7DBE9C", + "8EA47E475BA8AC6F2D821DA3BD212D11A3DEB9A0", + "7C670C7AD72B6C050C109E1790008097125433E8", + SHA512, + 'DSA1024' + ), + ( + "sample", + "888FA6F7738A41BDC9846466ABDB8174C0338250AE50CE955CA16230F9CBD53E", + "3A1B2DBD7489D6ED7E608FD036C83AF396E290DBD602408E8677DAABD6E7445A", + "D26FCBA19FA3E3058FFC02CA1596CDBB6E0D20CB37B06054F7E36DED0CDBBCCF", + SHA1, + 'DSA2048' + ), + ( + "sample", + "BC372967702082E1AA4FCE892209F71AE4AD25A6DFD869334E6F153BD0C4D806", + "DC9F4DEADA8D8FF588E98FED0AB690FFCE858DC8C79376450EB6B76C24537E2C", + "A65A9C3BC7BABE286B195D5DA68616DA8D47FA0097F36DD19F517327DC848CEC", + SHA224, + 'DSA2048' + ), + ( + "sample", + "8926A27C40484216F052F4427CFD5647338B7B3939BC6573AF4333569D597C52", + "EACE8BDBBE353C432A795D9EC556C6D021F7A03F42C36E9BC87E4AC7932CC809", + "7081E175455F9247B812B74583E9E94F9EA79BD640DC962533B0680793A38D53", + SHA256, + 'DSA2048' + ), + ( + "sample", + "C345D5AB3DA0A5BCB7EC8F8FB7A7E96069E03B206371EF7D83E39068EC564920", + "B2DA945E91858834FD9BF616EBAC151EDBC4B45D27D0DD4A7F6A22739F45C00B", + "19048B63D9FD6BCA1D9BAE3664E1BCB97F7276C306130969F63F38FA8319021B", + SHA384, + 'DSA2048' + ), + ( + "sample", + "5A12994431785485B3F5F067221517791B85A597B7A9436995C89ED0374668FC", + "2016ED092DC5FB669B8EFB3D1F31A91EECB199879BE0CF78F02BA062CB4C942E", + "D0C76F84B5F091E141572A639A4FB8C230807EEA7D55C8A154A224400AFF2351", + SHA512, + 'DSA2048' + ), + ( + "test", + "6EEA486F9D41A037B2C640BC5645694FF8FF4B98D066A25F76BE641CCB24BA4F", + "C18270A93CFC6063F57A4DFA86024F700D980E4CF4E2CB65A504397273D98EA0", + "414F22E5F31A8B6D33295C7539C1C1BA3A6160D7D68D50AC0D3A5BEAC2884FAA", + SHA1, + 'DSA2048' + ), + ( + "test", + "06BD4C05ED74719106223BE33F2D95DA6B3B541DAD7BFBD7AC508213B6DA6670", + "272ABA31572F6CC55E30BF616B7A265312018DD325BE031BE0CC82AA17870EA3", + "E9CC286A52CCE201586722D36D1E917EB96A4EBDB47932F9576AC645B3A60806", + SHA224, + 'DSA2048' + ), + ( + "test", + "1D6CE6DDA1C5D37307839CD03AB0A5CBB18E60D800937D67DFB4479AAC8DEAD7", + "8190012A1969F9957D56FCCAAD223186F423398D58EF5B3CEFD5A4146A4476F0", + "7452A53F7075D417B4B013B278D1BB8BBD21863F5E7B1CEE679CF2188E1AB19E", + SHA256, + 'DSA2048' + ), + ( + "test", + "206E61F73DBE1B2DC8BE736B22B079E9DACD974DB00EEBBC5B64CAD39CF9F91C", + "239E66DDBE8F8C230A3D071D601B6FFBDFB5901F94D444C6AF56F732BEB954BE", + "6BD737513D5E72FE85D1C750E0F73921FE299B945AAD1C802F15C26A43D34961", + SHA384, + 'DSA2048' + ), + ( + "test", + "AFF1651E4CD6036D57AA8B2A05CCF1A9D5A40166340ECBBDC55BE10B568AA0AA", + "89EC4BB1400ECCFF8E7D9AA515CD1DE7803F2DAFF09693EE7FD1353E90A68307", + "C9F0BDABCC0D880BB137A994CC7F3980CE91CC10FAF529FC46565B15CEA854E1", + SHA512, + 'DSA2048' + ) + ] + + def setUp(self): + # Convert DSA key components from hex strings to integers + # Each key is (p, q, g, x, y, desc) + + from collections import namedtuple + + TestKey = namedtuple('TestKey', 'p q g x y') + new_keys = {} + for k in self.keys: + tk = TestKey(*[t2l(y) for y in k[:-1]]) + new_keys[k[-1]] = tk + self.keys = new_keys + + # Convert signature encoding + TestSig = namedtuple('TestSig', 'message nonce result module test_key') + new_signatures = [] + for message, nonce, r, s, module, test_key in self.signatures: + tsig = TestSig( + tobytes(message), + t2l(nonce), + t2b(r) + t2b(s), + module, + self.keys[test_key] + ) + new_signatures.append(tsig) + self.signatures = new_signatures + + def test1(self): + q = 0x4000000000000000000020108A2E0CC0D99F8A5EF + x = 0x09A4D6792295A7F730FC3F2B49CBC0F62E862272F + p = 2 * q + 1 + y = pow(2, x, p) + key = DSA.construct([pow(y, 2, p), 2, p, q, x], False) + signer = DSS.new(key, 'deterministic-rfc6979') + + # Test _int2octets + self.assertEqual(hexlify(signer._int2octets(x)), + b'009a4d6792295a7f730fc3f2b49cbc0f62e862272f') + + # Test _bits2octets + h1 = SHA256.new(b"sample").digest() + self.assertEqual(hexlify(signer._bits2octets(h1)), + b'01795edf0d54db760f156d0dac04c0322b3a204224') + + def test2(self): + + for sig in self.signatures: + tk = sig.test_key + key = DSA.construct([tk.y, tk.g, tk.p, tk.q, tk.x], False) + signer = DSS.new(key, 'deterministic-rfc6979') + + hash_obj = sig.module.new(sig.message) + result = signer.sign(hash_obj) + self.assertEqual(sig.result, result) + + +class Det_ECDSA_Tests(unittest.TestCase): + + key_priv_p256 = ECC.construct(curve="P-256", d=0xC9AFA9D845BA75166B5C215767B1D6934E50C3DB36E89B127B8A622B120F6721) + key_pub_p256 = key_priv_p256.public_key() + + key_priv_p384 = ECC.construct(curve="P-384", d=0x6B9D3DAD2E1B8C1C05B19875B6659F4DE23C3B667BF297BA9AA47740787137D896D5724E4C70A825F872C9EA60D2EDF5) + key_pub_p384 = key_priv_p384.public_key() + + key_priv_p521 = ECC.construct(curve="P-521", d=0x0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538) + key_pub_p521 = key_priv_p521.public_key() + + # This is a sequence of items: + # message, k, r, s, hash module + # taken from RFC6979 + signatures_p256_ = ( + ( + "sample", + "882905F1227FD620FBF2ABF21244F0BA83D0DC3A9103DBBEE43A1FB858109DB4", + "61340C88C3AAEBEB4F6D667F672CA9759A6CCAA9FA8811313039EE4A35471D32", + "6D7F147DAC089441BB2E2FE8F7A3FA264B9C475098FDCF6E00D7C996E1B8B7EB", + SHA1 + ), + ( + "sample", + "103F90EE9DC52E5E7FB5132B7033C63066D194321491862059967C715985D473", + "53B2FFF5D1752B2C689DF257C04C40A587FABABB3F6FC2702F1343AF7CA9AA3F", + "B9AFB64FDC03DC1A131C7D2386D11E349F070AA432A4ACC918BEA988BF75C74C", + SHA224 + ), + ( + "sample", + "A6E3C57DD01ABE90086538398355DD4C3B17AA873382B0F24D6129493D8AAD60", + "EFD48B2AACB6A8FD1140DD9CD45E81D69D2C877B56AAF991C34D0EA84EAF3716", + "F7CB1C942D657C41D436C7A1B6E29F65F3E900DBB9AFF4064DC4AB2F843ACDA8", + SHA256 + ), + ( + "sample", + "09F634B188CEFD98E7EC88B1AA9852D734D0BC272F7D2A47DECC6EBEB375AAD4", + "0EAFEA039B20E9B42309FB1D89E213057CBF973DC0CFC8F129EDDDC800EF7719", + "4861F0491E6998B9455193E34E7B0D284DDD7149A74B95B9261F13ABDE940954", + SHA384 + ), + ( + "sample", + "5FA81C63109BADB88C1F367B47DA606DA28CAD69AA22C4FE6AD7DF73A7173AA5", + "8496A60B5E9B47C825488827E0495B0E3FA109EC4568FD3F8D1097678EB97F00", + "2362AB1ADBE2B8ADF9CB9EDAB740EA6049C028114F2460F96554F61FAE3302FE", + SHA512 + ), + ( + "test", + "8C9520267C55D6B980DF741E56B4ADEE114D84FBFA2E62137954164028632A2E", + "0CBCC86FD6ABD1D99E703E1EC50069EE5C0B4BA4B9AC60E409E8EC5910D81A89", + "01B9D7B73DFAA60D5651EC4591A0136F87653E0FD780C3B1BC872FFDEAE479B1", + SHA1 + ), + ( + "test", + "669F4426F2688B8BE0DB3A6BD1989BDAEFFF84B649EEB84F3DD26080F667FAA7", + "C37EDB6F0AE79D47C3C27E962FA269BB4F441770357E114EE511F662EC34A692", + "C820053A05791E521FCAAD6042D40AEA1D6B1A540138558F47D0719800E18F2D", + SHA224 + ), + ( + "test", + "D16B6AE827F17175E040871A1C7EC3500192C4C92677336EC2537ACAEE0008E0", + "F1ABB023518351CD71D881567B1EA663ED3EFCF6C5132B354F28D3B0B7D38367", + "019F4113742A2B14BD25926B49C649155F267E60D3814B4C0CC84250E46F0083", + SHA256 + ), + ( + "test", + "16AEFFA357260B04B1DD199693960740066C1A8F3E8EDD79070AA914D361B3B8", + "83910E8B48BB0C74244EBDF7F07A1C5413D61472BD941EF3920E623FBCCEBEB6", + "8DDBEC54CF8CD5874883841D712142A56A8D0F218F5003CB0296B6B509619F2C", + SHA384 + ), + ( + "test", + "6915D11632ACA3C40D5D51C08DAF9C555933819548784480E93499000D9F0B7F", + "461D93F31B6540894788FD206C07CFA0CC35F46FA3C91816FFF1040AD1581A04", + "39AF9F15DE0DB8D97E72719C74820D304CE5226E32DEDAE67519E840D1194E55", + SHA512 + ) + ) + + signatures_p384_ = ( + ( + "sample", + "4471EF7518BB2C7C20F62EAE1C387AD0C5E8E470995DB4ACF694466E6AB096630F29E5938D25106C3C340045A2DB01A7", + "EC748D839243D6FBEF4FC5C4859A7DFFD7F3ABDDF72014540C16D73309834FA37B9BA002899F6FDA3A4A9386790D4EB2", + "A3BCFA947BEEF4732BF247AC17F71676CB31A847B9FF0CBC9C9ED4C1A5B3FACF26F49CA031D4857570CCB5CA4424A443", + SHA1 + ), + ( + "sample", + "A4E4D2F0E729EB786B31FC20AD5D849E304450E0AE8E3E341134A5C1AFA03CAB8083EE4E3C45B06A5899EA56C51B5879", + "42356E76B55A6D9B4631C865445DBE54E056D3B3431766D0509244793C3F9366450F76EE3DE43F5A125333A6BE060122", + "9DA0C81787064021E78DF658F2FBB0B042BF304665DB721F077A4298B095E4834C082C03D83028EFBF93A3C23940CA8D", + SHA224 + ), + ( + "sample", + "180AE9F9AEC5438A44BC159A1FCB277C7BE54FA20E7CF404B490650A8ACC414E375572342863C899F9F2EDF9747A9B60", + "21B13D1E013C7FA1392D03C5F99AF8B30C570C6F98D4EA8E354B63A21D3DAA33BDE1E888E63355D92FA2B3C36D8FB2CD", + "F3AA443FB107745BF4BD77CB3891674632068A10CA67E3D45DB2266FA7D1FEEBEFDC63ECCD1AC42EC0CB8668A4FA0AB0", + SHA256 + ), + ( + "sample", + "94ED910D1A099DAD3254E9242AE85ABDE4BA15168EAF0CA87A555FD56D10FBCA2907E3E83BA95368623B8C4686915CF9", + "94EDBB92A5ECB8AAD4736E56C691916B3F88140666CE9FA73D64C4EA95AD133C81A648152E44ACF96E36DD1E80FABE46", + "99EF4AEB15F178CEA1FE40DB2603138F130E740A19624526203B6351D0A3A94FA329C145786E679E7B82C71A38628AC8", + SHA384 + ), + ( + "sample", + "92FC3C7183A883E24216D1141F1A8976C5B0DD797DFA597E3D7B32198BD35331A4E966532593A52980D0E3AAA5E10EC3", + "ED0959D5880AB2D869AE7F6C2915C6D60F96507F9CB3E047C0046861DA4A799CFE30F35CC900056D7C99CD7882433709", + "512C8CCEEE3890A84058CE1E22DBC2198F42323CE8ACA9135329F03C068E5112DC7CC3EF3446DEFCEB01A45C2667FDD5", + SHA512 + ), + ( + "test", + "66CC2C8F4D303FC962E5FF6A27BD79F84EC812DDAE58CF5243B64A4AD8094D47EC3727F3A3C186C15054492E30698497", + "4BC35D3A50EF4E30576F58CD96CE6BF638025EE624004A1F7789A8B8E43D0678ACD9D29876DAF46638645F7F404B11C7", + "D5A6326C494ED3FF614703878961C0FDE7B2C278F9A65FD8C4B7186201A2991695BA1C84541327E966FA7B50F7382282", + SHA1 + ), + ( + "test", + "18FA39DB95AA5F561F30FA3591DC59C0FA3653A80DAFFA0B48D1A4C6DFCBFF6E3D33BE4DC5EB8886A8ECD093F2935726", + "E8C9D0B6EA72A0E7837FEA1D14A1A9557F29FAA45D3E7EE888FC5BF954B5E62464A9A817C47FF78B8C11066B24080E72", + "07041D4A7A0379AC7232FF72E6F77B6DDB8F09B16CCE0EC3286B2BD43FA8C6141C53EA5ABEF0D8231077A04540A96B66", + SHA224 + ), + ( + "test", + "0CFAC37587532347DC3389FDC98286BBA8C73807285B184C83E62E26C401C0FAA48DD070BA79921A3457ABFF2D630AD7", + "6D6DEFAC9AB64DABAFE36C6BF510352A4CC27001263638E5B16D9BB51D451559F918EEDAF2293BE5B475CC8F0188636B", + "2D46F3BECBCC523D5F1A1256BF0C9B024D879BA9E838144C8BA6BAEB4B53B47D51AB373F9845C0514EEFB14024787265", + SHA256 + ), + ( + "test", + "015EE46A5BF88773ED9123A5AB0807962D193719503C527B031B4C2D225092ADA71F4A459BC0DA98ADB95837DB8312EA", + "8203B63D3C853E8D77227FB377BCF7B7B772E97892A80F36AB775D509D7A5FEB0542A7F0812998DA8F1DD3CA3CF023DB", + "DDD0760448D42D8A43AF45AF836FCE4DE8BE06B485E9B61B827C2F13173923E06A739F040649A667BF3B828246BAA5A5", + SHA384 + ), + ( + "test", + "3780C4F67CB15518B6ACAE34C9F83568D2E12E47DEAB6C50A4E4EE5319D1E8CE0E2CC8A136036DC4B9C00E6888F66B6C", + "A0D5D090C9980FAF3C2CE57B7AE951D31977DD11C775D314AF55F76C676447D06FB6495CD21B4B6E340FC236584FB277", + "976984E59B4C77B0E8E4460DCA3D9F20E07B9BB1F63BEEFAF576F6B2E8B224634A2092CD3792E0159AD9CEE37659C736", + SHA512 + ), + ) + + signatures_p521_ = ( + ( + "sample", + "0089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", + "00343B6EC45728975EA5CBA6659BBB6062A5FF89EEA58BE3C80B619F322C87910FE092F7D45BB0F8EEE01ED3F20BABEC079D202AE677B243AB40B5431D497C55D75D", + "00E7B0E675A9B24413D448B8CC119D2BF7B2D2DF032741C096634D6D65D0DBE3D5694625FB9E8104D3B842C1B0E2D0B98BEA19341E8676AEF66AE4EBA3D5475D5D16", + SHA1 + ), + ( + "sample", + "0121415EC2CD7726330A61F7F3FA5DE14BE9436019C4DB8CB4041F3B54CF31BE0493EE3F427FB906393D895A19C9523F3A1D54BB8702BD4AA9C99DAB2597B92113F3", + "01776331CFCDF927D666E032E00CF776187BC9FDD8E69D0DABB4109FFE1B5E2A30715F4CC923A4A5E94D2503E9ACFED92857B7F31D7152E0F8C00C15FF3D87E2ED2E", + "0050CB5265417FE2320BBB5A122B8E1A32BD699089851128E360E620A30C7E17BA41A666AF126CE100E5799B153B60528D5300D08489CA9178FB610A2006C254B41F", + SHA224 + ), + ( + "sample", + "00EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", + "01511BB4D675114FE266FC4372B87682BAECC01D3CC62CF2303C92B3526012659D16876E25C7C1E57648F23B73564D67F61C6F14D527D54972810421E7D87589E1A7", + "004A171143A83163D6DF460AAF61522695F207A58B95C0644D87E52AA1A347916E4F7A72930B1BC06DBE22CE3F58264AFD23704CBB63B29B931F7DE6C9D949A7ECFC", + SHA256 + ), + ( + "sample", + "01546A108BC23A15D6F21872F7DED661FA8431DDBD922D0DCDB77CC878C8553FFAD064C95A920A750AC9137E527390D2D92F153E66196966EA554D9ADFCB109C4211", + "01EA842A0E17D2DE4F92C15315C63DDF72685C18195C2BB95E572B9C5136CA4B4B576AD712A52BE9730627D16054BA40CC0B8D3FF035B12AE75168397F5D50C67451", + "01F21A3CEE066E1961025FB048BD5FE2B7924D0CD797BABE0A83B66F1E35EEAF5FDE143FA85DC394A7DEE766523393784484BDF3E00114A1C857CDE1AA203DB65D61", + SHA384 + ), + ( + "sample", + "01DAE2EA071F8110DC26882D4D5EAE0621A3256FC8847FB9022E2B7D28E6F10198B1574FDD03A9053C08A1854A168AA5A57470EC97DD5CE090124EF52A2F7ECBFFD3", + "00C328FAFCBD79DD77850370C46325D987CB525569FB63C5D3BC53950E6D4C5F174E25A1EE9017B5D450606ADD152B534931D7D4E8455CC91F9B15BF05EC36E377FA", + "00617CCE7CF5064806C467F678D3B4080D6F1CC50AF26CA209417308281B68AF282623EAA63E5B5C0723D8B8C37FF0777B1A20F8CCB1DCCC43997F1EE0E44DA4A67A", + SHA512 + ), + ( + "test", + "00BB9F2BF4FE1038CCF4DABD7139A56F6FD8BB1386561BD3C6A4FC818B20DF5DDBA80795A947107A1AB9D12DAA615B1ADE4F7A9DC05E8E6311150F47F5C57CE8B222", + "013BAD9F29ABE20DE37EBEB823C252CA0F63361284015A3BF430A46AAA80B87B0693F0694BD88AFE4E661FC33B094CD3B7963BED5A727ED8BD6A3A202ABE009D0367", + "01E9BB81FF7944CA409AD138DBBEE228E1AFCC0C890FC78EC8604639CB0DBDC90F717A99EAD9D272855D00162EE9527567DD6A92CBD629805C0445282BBC916797FF", + SHA1 + ), + ( + "test", + "0040D09FCF3C8A5F62CF4FB223CBBB2B9937F6B0577C27020A99602C25A01136987E452988781484EDBBCF1C47E554E7FC901BC3085E5206D9F619CFF07E73D6F706", + "01C7ED902E123E6815546065A2C4AF977B22AA8EADDB68B2C1110E7EA44D42086BFE4A34B67DDC0E17E96536E358219B23A706C6A6E16BA77B65E1C595D43CAE17FB", + "0177336676304FCB343CE028B38E7B4FBA76C1C1B277DA18CAD2A8478B2A9A9F5BEC0F3BA04F35DB3E4263569EC6AADE8C92746E4C82F8299AE1B8F1739F8FD519A4", + SHA224 + ), + ( + "test", + "001DE74955EFAABC4C4F17F8E84D881D1310B5392D7700275F82F145C61E843841AF09035BF7A6210F5A431A6A9E81C9323354A9E69135D44EBD2FCAA7731B909258", + "000E871C4A14F993C6C7369501900C4BC1E9C7B0B4BA44E04868B30B41D8071042EB28C4C250411D0CE08CD197E4188EA4876F279F90B3D8D74A3C76E6F1E4656AA8", + "00CD52DBAA33B063C3A6CD8058A1FB0A46A4754B034FCC644766CA14DA8CA5CA9FDE00E88C1AD60CCBA759025299079D7A427EC3CC5B619BFBC828E7769BCD694E86", + SHA256 + ), + ( + "test", + "01F1FC4A349A7DA9A9E116BFDD055DC08E78252FF8E23AC276AC88B1770AE0B5DCEB1ED14A4916B769A523CE1E90BA22846AF11DF8B300C38818F713DADD85DE0C88", + "014BEE21A18B6D8B3C93FAB08D43E739707953244FDBE924FA926D76669E7AC8C89DF62ED8975C2D8397A65A49DCC09F6B0AC62272741924D479354D74FF6075578C", + "0133330865C067A0EAF72362A65E2D7BC4E461E8C8995C3B6226A21BD1AA78F0ED94FE536A0DCA35534F0CD1510C41525D163FE9D74D134881E35141ED5E8E95B979", + SHA384 + ), + ( + "test", + "016200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", + "013E99020ABF5CEE7525D16B69B229652AB6BDF2AFFCAEF38773B4B7D08725F10CDB93482FDCC54EDCEE91ECA4166B2A7C6265EF0CE2BD7051B7CEF945BABD47EE6D", + "01FBD0013C674AA79CB39849527916CE301C66EA7CE8B80682786AD60F98F7E78A19CA69EFF5C57400E3B3A0AD66CE0978214D13BAF4E9AC60752F7B155E2DE4DCE3", + SHA512 + ), + ) + + signatures_p256 = [] + for a, b, c, d, e in signatures_p256_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p256.append(new_tv) + + signatures_p384 = [] + for a, b, c, d, e in signatures_p384_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p384.append(new_tv) + + signatures_p521 = [] + for a, b, c, d, e in signatures_p521_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p521.append(new_tv) + + def shortDescription(self): + return "Deterministic ECDSA Tests" + + def test_loopback_p256(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p256, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p384(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p384, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p521(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p521, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_data_rfc6979_p256(self): + signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p256: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p384(self): + signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p384: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p521(self): + signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p521: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + +def get_hash_module(hash_name): + if hash_name == "SHA-512": + hash_module = SHA512 + elif hash_name == "SHA-512/224": + hash_module = SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + hash_module = SHA512.new(truncate="256") + elif hash_name == "SHA-384": + hash_module = SHA384 + elif hash_name == "SHA-256": + hash_module = SHA256 + elif hash_name == "SHA-224": + hash_module = SHA224 + elif hash_name == "SHA-1": + hash_module = SHA1 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + return hash_module + + +class TestVectorsDSAWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._slow_tests = slow_tests + self._id = "None" + self.tv = [] + + def setUp(self): + + def filter_dsa(group): + return DSA.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_type(group): + sig_type = group['type'] + if sig_type != 'DsaVerify': + raise ValueError("Unknown signature type " + sig_type) + return sig_type + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + "dsa_test.json", + "Wycheproof DSA signature", + group_tag={'key': filter_dsa, + 'hash_module': filter_sha, + 'sig_type': filter_type}) + self.tv += result + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof DSA Test #" + str(tv.id) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = DSS.new(tv.key, 'fips-186-3', encoding='der') + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +class TestVectorsECDSAWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._slow_tests = slow_tests + self._id = "None" + + def add_tests(self, filename): + + def filter_ecc(group): + # These are the only curves we accept to skip + if group['key']['curve'] in ('secp224r1', 'secp224k1', 'secp256k1', + 'brainpoolP224r1', 'brainpoolP224t1', + 'brainpoolP256r1', 'brainpoolP256t1', + 'brainpoolP320r1', 'brainpoolP320t1', + 'brainpoolP384r1', 'brainpoolP384t1', + 'brainpoolP512r1', 'brainpoolP512t1', + ): + return None + return ECC.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_encoding(group): + encoding_name = group['type'] + if encoding_name == "EcdsaVerify": + return "der" + elif encoding_name == "EcdsaP1363Verify": + return "binary" + else: + raise ValueError("Unknown signature type " + encoding_name) + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof ECDSA signature (%s)" % filename, + group_tag={'key': filter_ecc, + 'hash_module': filter_sha, + 'encoding': filter_encoding, + }) + self.tv += result + + def setUp(self): + self.tv = [] + self.add_tests("ecdsa_secp224r1_sha224_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha224_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp224r1_sha256_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha256_test.json") + self.add_tests("ecdsa_secp224r1_sha3_224_test.json") + self.add_tests("ecdsa_secp224r1_sha3_256_test.json") + self.add_tests("ecdsa_secp224r1_sha3_512_test.json") + self.add_tests("ecdsa_secp224r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha512_test.json") + self.add_tests("ecdsa_secp256r1_sha256_p1363_test.json") + self.add_tests("ecdsa_secp256r1_sha256_test.json") + self.add_tests("ecdsa_secp256r1_sha3_256_test.json") + self.add_tests("ecdsa_secp256r1_sha3_512_test.json") + self.add_tests("ecdsa_secp256r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp256r1_sha512_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp384r1_sha3_384_test.json") + self.add_tests("ecdsa_secp384r1_sha3_512_test.json") + self.add_tests("ecdsa_secp384r1_sha384_p1363_test.json") + self.add_tests("ecdsa_secp384r1_sha384_test.json") + self.add_tests("ecdsa_secp384r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp384r1_sha512_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp521r1_sha3_512_test.json") + self.add_tests("ecdsa_secp521r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp521r1_sha512_test.json") + self.add_tests("ecdsa_test.json") + self.add_tests("ecdsa_webcrypto_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof ECDSA Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + + # Skip tests with unsupported curves + if tv.key is None: + return + + hashed_msg = tv.hash_module.new(tv.msg) + signer = DSS.new(tv.key, 'fips-186-3', encoding=tv.encoding) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + if tv.comment == "k*G has a large x-coordinate": + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(FIPS_DSA_Tests) + tests += list_test_cases(FIPS_ECDSA_Tests) + tests += list_test_cases(Det_DSA_Tests) + tests += list_test_cases(Det_ECDSA_Tests) + + slow_tests = not config.get('slow_tests') + if slow_tests: + tests += list_test_cases(FIPS_DSA_Tests_KAT) + tests += list_test_cases(FIPS_ECDSA_Tests_KAT) + + tests += [TestVectorsDSAWycheproof(wycheproof_warnings, slow_tests)] + tests += [TestVectorsECDSAWycheproof(wycheproof_warnings, slow_tests)] + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py new file mode 100644 index 0000000..8e2c6ee --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py @@ -0,0 +1,348 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import json +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import bchr +from Crypto.Util.number import bytes_to_long +from Crypto.Util.strxor import strxor +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, SHA3_384, + SHA3_224, SHA3_256, SHA3_512) +from Crypto.PublicKey import RSA +from Crypto.Signature import pkcs1_15 +from Crypto.Signature import PKCS1_v1_5 + +from Crypto.Util._file_system import pycryptodome_filename +from Crypto.Util.strxor import strxor + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class FIPS_PKCS1_Verify_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Verify)" + + def test_can_sign(self): + test_public_key = RSA.generate(1024).public_key() + verifier = pkcs1_15.new(test_public_key) + self.assertEqual(verifier.can_sign(), False) + + +class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigVer15_186-3.rsp", + "Signature Verification 186-3", + {'shaalg': lambda x: x, + 'd': lambda x: int(x), + 'result': lambda x: x}) or [] + + +for count, tv in enumerate(test_vectors_verify): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore + verifier = pkcs1_15.new(public_key) + + def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'f': + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) + else: + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) + + +class FIPS_PKCS1_Sign_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Sign)" + + def test_can_sign(self): + test_private_key = RSA.generate(1024) + signer = pkcs1_15.new(test_private_key) + self.assertEqual(signer.can_sign(), True) + + +class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_sign = load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigGen15_186-2.txt", + "Signature Generation 186-2", + {'shaalg': lambda x: x}) or [] + +test_vectors_sign += load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigGen15_186-3.txt", + "Signature Generation 186-3", + {'shaalg': lambda x: x}) or [] + +for count, tv in enumerate(test_vectors_sign): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "e"): + private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore + signer = pkcs1_15.new(private_key) + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + + def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): + signature = signer.sign(hash_obj) + self.assertEqual(signature, result) + + setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) + + +class PKCS1_15_NoParams(unittest.TestCase): + """Verify that PKCS#1 v1.5 signatures pass even without NULL parameters in + the algorithm identifier (PyCrypto/LP bug #1119552).""" + + rsakey = """-----BEGIN RSA PRIVATE KEY----- + MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII + q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 + Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI + OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr + +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK + JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 + n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== + -----END RSA PRIVATE KEY-----""" + + msg = b"This is a test\x0a" + + # PKCS1 v1.5 signature of the message computed using SHA-1. + # The digestAlgorithm SEQUENCE does NOT contain the NULL parameter. + sig_str = "a287a13517f716e72fb14eea8e33a8db4a4643314607e7ca3e3e28"\ + "1893db74013dda8b855fd99f6fecedcb25fcb7a434f35cd0a101f8"\ + "b19348e0bd7b6f152dfc" + signature = unhexlify(sig_str) + + def runTest(self): + verifier = pkcs1_15.new(RSA.importKey(self.rsakey)) + hashed = SHA1.new(self.msg) + verifier.verify(hashed, self.signature) + + +class PKCS1_Legacy_Module_Tests(unittest.TestCase): + """Verify that the legacy module Crypto.Signature.PKCS1_v1_5 + behaves as expected. The only difference is that the verify() + method returns True/False and does not raise exceptions.""" + + def shortDescription(self): + return "Test legacy Crypto.Signature.PKCS1_v1_5" + + def runTest(self): + key = RSA.importKey(PKCS1_15_NoParams.rsakey) + hashed = SHA1.new(b"Test") + good_signature = PKCS1_v1_5.new(key).sign(hashed) + verifier = PKCS1_v1_5.new(key.public_key()) + + self.assertEqual(verifier.verify(hashed, good_signature), True) + + # Flip a few bits in the signature + bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) + self.assertEqual(verifier.verify(hashed, bad_signature), False) + + +class PKCS1_All_Hashes_Tests(unittest.TestCase): + + def shortDescription(self): + return "Test PKCS#1v1.5 signature in combination with all hashes" + + def runTest(self): + + key = RSA.generate(1024) + signer = pkcs1_15.new(key) + hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", + "SHA224", "SHA256", "SHA384", "SHA512", + "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") + + for name in hash_names: + hashed = load_hash_by_name(name).new(b"Test") + signer.sign(hashed) + + from Crypto.Hash import BLAKE2b, BLAKE2s + for hash_size in (20, 32, 48, 64): + hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b"Test") + signer.sign(hashed_b) + for hash_size in (16, 20, 28, 32): + hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b"Test") + signer.sign(hashed_s) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + self.tv = [] + self.add_tests("rsa_sig_gen_misc_test.json") + self.add_tests("rsa_signature_2048_sha224_test.json") + self.add_tests("rsa_signature_2048_sha256_test.json") + self.add_tests("rsa_signature_2048_sha384_test.json") + self.add_tests("rsa_signature_2048_sha3_224_test.json") + self.add_tests("rsa_signature_2048_sha3_256_test.json") + self.add_tests("rsa_signature_2048_sha3_384_test.json") + self.add_tests("rsa_signature_2048_sha3_512_test.json") + self.add_tests("rsa_signature_2048_sha512_test.json") + self.add_tests("rsa_signature_2048_sha512_224_test.json") + self.add_tests("rsa_signature_2048_sha512_256_test.json") + self.add_tests("rsa_signature_3072_sha256_test.json") + self.add_tests("rsa_signature_3072_sha384_test.json") + self.add_tests("rsa_signature_3072_sha3_256_test.json") + self.add_tests("rsa_signature_3072_sha3_384_test.json") + self.add_tests("rsa_signature_3072_sha3_512_test.json") + self.add_tests("rsa_signature_3072_sha512_test.json") + self.add_tests("rsa_signature_3072_sha512_256_test.json") + self.add_tests("rsa_signature_4096_sha384_test.json") + self.add_tests("rsa_signature_4096_sha512_test.json") + self.add_tests("rsa_signature_4096_sha512_256_test.json") + self.add_tests("rsa_signature_test.json") + + def add_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['keyPem']) + + def filter_sha(group): + hash_name = group['sha'] + if hash_name == "SHA-512": + return SHA512 + elif hash_name == "SHA-512/224": + return SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + return SHA512.new(truncate="256") + elif hash_name == "SHA3-512": + return SHA3_512 + elif hash_name == "SHA-384": + return SHA384 + elif hash_name == "SHA3-384": + return SHA3_384 + elif hash_name == "SHA-256": + return SHA256 + elif hash_name == "SHA3-256": + return SHA3_256 + elif hash_name == "SHA-224": + return SHA224 + elif hash_name == "SHA3-224": + return SHA3_224 + elif hash_name == "SHA-1": + return SHA1 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + + def filter_type(group): + type_name = group['type'] + if type_name not in ("RsassaPkcs1Verify", "RsassaPkcs1Generate"): + raise ValueError("Unknown type name " + type_name) + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof PKCS#1v1.5 signature (%s)" % filename, + group_tag={'rsa_key': filter_rsa, + 'hash_mod': filter_sha, + 'type': filter_type}) + return result + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof RSA PKCS$#1 Test #" + str(tv.id) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = pkcs1_15.new(tv.key) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(FIPS_PKCS1_Verify_Tests) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests) + tests += list_test_cases(PKCS1_15_NoParams) + tests += list_test_cases(PKCS1_Legacy_Module_Tests) + tests += list_test_cases(PKCS1_All_Hashes_Tests) + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + + if config.get('slow_tests'): + tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) + + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pss.py b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pss.py new file mode 100644 index 0000000..535474b --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Signature/test_pss.py @@ -0,0 +1,377 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest + +from Crypto.Util.py3compat import b, bchr +from Crypto.Util.number import bytes_to_long +from Crypto.Util.strxor import strxor +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Hash import SHA1, SHA224, SHA256, SHA384, SHA512 +from Crypto.PublicKey import RSA +from Crypto.Signature import pss +from Crypto.Signature import PKCS1_PSS + +from Crypto.Signature.pss import MGF1 + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class PRNG(object): + + def __init__(self, stream): + self.stream = stream + self.idx = 0 + + def __call__(self, rnd_size): + result = self.stream[self.idx:self.idx + rnd_size] + self.idx += rnd_size + return result + + +class PSS_Tests(unittest.TestCase): + + rsa_key = b'-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAsvI34FgiTK8+txBvmooNGpNwk23YTU51dwNZi5yha3W4lA/Q\nvcZrDalkmD7ekWQwnduxVKa6pRSI13KBgeUOIqJoGXSWhntEtY3FEwvWOHW5AE7Q\njUzTzCiYT6TVaCcpa/7YLai+p6ai2g5f5Zfh4jSawa9uYeuggFygQq4IVW796MgV\nyqxYMM/arEj+/sKz3Viua9Rp9fFosertCYCX4DUTgW0mX9bwEnEOgjSI3pLOPXz1\n8vx+DRZS5wMCmwCUa0sKonLn3cAUPq+sGix7+eo7T0Z12MU8ud7IYVX/75r3cXiF\nPaYE2q8Le0kgOApIXbb+x74x0rNgyIh1yGygkwIDAQABAoIBABz4t1A0pLT6qHI2\nEIOaNz3mwhK0dZEqkz0GB1Dhtoax5ATgvKCFB98J3lYB08IBURe1snOsnMpOVUtg\naBRSM+QqnCUG6bnzKjAkuFP5liDE+oNQv1YpKp9CsUovuzdmI8Au3ewihl+ZTIN2\nUVNYMEOR1b5m+z2SSwWNOYsiJwpBrT7zkpdlDyjat7FiiPhMMIMXjhQFVxURMIcB\njUBtPzGvV/PG90cVDWi1wRGeeP1dDqti/jsnvykQ15KW1MqGrpeNKRmDdTy/Ucl1\nWIoYklKw3U456lgZ/rDTDB818+Tlnk35z4yF7d5ANPM8CKfqOPcnO1BCKVFzf4eq\n54wvUtkCgYEA1Zv2lp06l7rXMsvNtyYQjbFChezRDRnPwZmN4NCdRtTgGG1G0Ryd\nYz6WWoPGqZp0b4LAaaHd3W2GTcpXF8WXMKfMX1W+tMAxMozfsXRKMcHoypwuS5wT\nfJRXJCG4pvd57AB0iVUEJW2we+uGKU5Zxcx//id2nXGCpoRyViIplQsCgYEA1nVC\neHupHChht0Fh4N09cGqZHZzuwXjOUMzR3Vsfz+4WzVS3NvIgN4g5YgmQFOeKwo5y\niRq5yvubcNdFvf85eHWClg0zPAyxJCVUWigCrrOanGEhJo6re4idJvNVzu4Ucg0v\n6B3SJ1HsCda+ZSNz24bSyqRep8A+RoAaoVSFx5kCgYEAn3RvXPs9s+obnqWYiPF3\nRe5etE6Vt2vfNKwFxx6zaR6bsmBQjuUHcABWiHb6I71S0bMPI0tbrWGG8ibrYKl1\nNTLtUvVVCOS3VP7oNTWT9RTFTAnOXU7DFSo+6o/poWn3r36ff6zhDXeWWMr2OXtt\ndEQ1/2lCGEGVv+v61eVmmQUCgYABFHITPTwqwiFL1O5zPWnzyPWgaovhOYSAb6eW\n38CXQXGn8wdBJZL39J2lWrr4//l45VK6UgIhfYbY2JynSkO10ZGow8RARygVMILu\nOUlaK9lZdDvAf/NpGdUAvzTtZ9F+iYZ2OsA2JnlzyzsGM1l//3vMPWukmJk3ral0\nqoJJ8QKBgGRG3eVHnIegBbFVuMDp2NTcfuSuDVUQ1fGAwtPiFa8u81IodJnMk2pq\niXu2+0ytNA/M+SVrAnE2AgIzcaJbtr0p2srkuVM7KMWnG1vWFNjtXN8fAhf/joOv\nD+NmPL/N4uE57e40tbiU/H7KdyZaDt+5QiTmdhuyAe6CBjKsF2jy\n-----END RSA PRIVATE KEY-----' + msg = b'AAA' + tag = b'\x00[c5\xd8\xb0\x8b!D\x81\x83\x07\xc0\xdd\xb9\xb4\xb2`\x92\xe7\x02\xf1\xe1P\xea\xc3\xf0\xe3>\xddX5\xdd\x8e\xc5\x89\xef\xf3\xc2\xdc\xfeP\x02\x7f\x12+\xc9\xaf\xbb\xec\xfe\xb0\xa5\xb9\x08\x11P\x8fL\xee5\x9b\xb0k{=_\xd2\x14\xfb\x01R\xb7\xfe\x14}b\x03\x8d5Y\x89~}\xfc\xf2l\xd01-\xbd\xeb\x11\xcdV\x11\xe9l\x19k/o5\xa2\x0f\x15\xe7Q$\t=\xec\x1dAB\x19\xa5P\x9a\xaf\xa3G\x86"\xd6~\xf0j\xfcqkbs\x13\x84b\xe4\xbdm(\xed`\xa4F\xfb\x8f.\xe1\x8c)/_\x9eS\x98\xa4v\xb8\xdc\xfe\xf7/D\x18\x19\xb3T\x97:\xe2\x96s\xe8<\xa2\xb4\xb9\xf8/' + + def test_positive_1(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg) + verifier = pss.new(key) + verifier.verify(h, self.tag) + + def test_negative_1(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg + b'A') + verifier = pss.new(key) + tag = bytearray(self.tag) + self.assertRaises(ValueError, verifier.verify, h, tag) + + def test_negative_2(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg) + verifier = pss.new(key, salt_bytes=1000) + tag = bytearray(self.tag) + self.assertRaises(ValueError, verifier.verify, h, tag) + + +class FIPS_PKCS1_Verify_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Verify)" + + def verify_positive(self, hashmod, message, public_key, salt, signature): + prng = PRNG(salt) + hashed = hashmod.new(message) + verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) + verifier.verify(hashed, signature) + + def verify_negative(self, hashmod, message, public_key, salt, signature): + prng = PRNG(salt) + hashed = hashmod.new(message) + verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) + self.assertRaises(ValueError, verifier.verify, hashed, signature) + + def test_can_sign(self): + test_public_key = RSA.generate(1024).public_key() + verifier = pss.new(test_public_key) + self.assertEqual(verifier.can_sign(), False) + + +class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "PKCS1-PSS"), + "SigVerPSS_186-3.rsp", + "Signature Verification 186-3", + {'shaalg': lambda x: x, + 'result': lambda x: x}) or [] + + +for count, tv in enumerate(test_vectors_verify): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "p"): + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore + if tv.saltval != b("\x00"): + prng = PRNG(tv.saltval) + verifier = pss.new(public_key, salt_bytes=len(tv.saltval), rand_func=prng) + else: + verifier = pss.new(public_key, salt_bytes=0) + + def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'p': + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) + else: + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) + + +class FIPS_PKCS1_Sign_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Sign)" + + def test_can_sign(self): + test_private_key = RSA.generate(1024) + signer = pss.new(test_private_key) + self.assertEqual(signer.can_sign(), True) + + +class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_sign = load_test_vectors(("Signature", "PKCS1-PSS"), + "SigGenPSS_186-2.txt", + "Signature Generation 186-2", + {'shaalg': lambda x: x}) or [] + +test_vectors_sign += load_test_vectors(("Signature", "PKCS1-PSS"), + "SigGenPSS_186-3.txt", + "Signature Generation 186-3", + {'shaalg': lambda x: x}) or [] + +for count, tv in enumerate(test_vectors_sign): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "e"): + private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + if tv.saltval != b("\x00"): + prng = PRNG(tv.saltval) + signer = pss.new(private_key, salt_bytes=len(tv.saltval), rand_func=prng) + else: + signer = pss.new(private_key, salt_bytes=0) + + def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): + signature = signer.sign(hash_obj) + self.assertEqual(signature, result) + + setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) + + +class PKCS1_Legacy_Module_Tests(unittest.TestCase): + """Verify that the legacy module Crypto.Signature.PKCS1_PSS + behaves as expected. The only difference is that the verify() + method returns True/False and does not raise exceptions.""" + + def shortDescription(self): + return "Test legacy Crypto.Signature.PKCS1_PSS" + + def runTest(self): + key = RSA.generate(1024) + hashed = SHA1.new(b("Test")) + good_signature = PKCS1_PSS.new(key).sign(hashed) + verifier = PKCS1_PSS.new(key.public_key()) + + self.assertEqual(verifier.verify(hashed, good_signature), True) + + # Flip a few bits in the signature + bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) + self.assertEqual(verifier.verify(hashed, bad_signature), False) + + +class PKCS1_All_Hashes_Tests(unittest.TestCase): + + def shortDescription(self): + return "Test PKCS#1 PSS signature in combination with all hashes" + + def runTest(self): + + key = RSA.generate(1280) + signer = pss.new(key) + hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", + "SHA224", "SHA256", "SHA384", "SHA512", + "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") + + for name in hash_names: + hashed = load_hash_by_name(name).new(b("Test")) + signer.sign(hashed) + + from Crypto.Hash import BLAKE2b, BLAKE2s + for hash_size in (20, 32, 48, 64): + hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b("Test")) + signer.sign(hashed_b) + for hash_size in (16, 20, 28, 32): + hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b("Test")) + signer.sign(hashed_s) + + +def get_hash_module(hash_name): + if hash_name == "SHA-512": + hash_module = SHA512 + elif hash_name == "SHA-512/224": + hash_module = SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + hash_module = SHA512.new(truncate="256") + elif hash_name == "SHA-384": + hash_module = SHA384 + elif hash_name == "SHA-256": + hash_module = SHA256 + elif hash_name == "SHA-224": + hash_module = SHA224 + elif hash_name == "SHA-1": + hash_module = SHA1 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + return hash_module + + +class TestVectorsPSSWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def add_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_type(group): + type_name = group['type'] + if type_name not in ("RsassaPssVerify", ): + raise ValueError("Unknown type name " + type_name) + + def filter_slen(group): + return group['sLen'] + + def filter_mgf(group): + mgf = group['mgf'] + if mgf not in ("MGF1", ): + raise ValueError("Unknown MGF " + mgf) + mgf1_hash = get_hash_module(group['mgfSha']) + + def mgf(x, y, mh=mgf1_hash): + return MGF1(x, y, mh) + + return mgf + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof PSS signature (%s)" % filename, + group_tag={'key': filter_rsa, + 'hash_module': filter_sha, + 'sLen': filter_slen, + 'mgf': filter_mgf, + 'type': filter_type}) + return result + + def setUp(self): + self.tv = [] + self.add_tests("rsa_pss_2048_sha1_mgf1_20_test.json") + self.add_tests("rsa_pss_2048_sha256_mgf1_0_test.json") + self.add_tests("rsa_pss_2048_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_2048_sha512_256_mgf1_28_test.json") + self.add_tests("rsa_pss_2048_sha512_256_mgf1_32_test.json") + self.add_tests("rsa_pss_3072_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_4096_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_4096_sha512_mgf1_32_test.json") + self.add_tests("rsa_pss_misc_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof RSA PSS Test #%d (%s)" % (tv.id, tv.comment) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = pss.new(tv.key, mask_func=tv.mgf, salt_bytes=tv.sLen) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PSS_Tests) + tests += list_test_cases(FIPS_PKCS1_Verify_Tests) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests) + tests += list_test_cases(PKCS1_Legacy_Module_Tests) + tests += list_test_cases(PKCS1_All_Hashes_Tests) + + if config.get('slow_tests'): + tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) + + tests += [TestVectorsPSSWycheproof(wycheproof_warnings)] + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/Util/__init__.py new file mode 100644 index 0000000..ee993db --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/__init__.py: Self-test for utility modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for utility modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) + from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) + from Crypto.SelfTest.Util import test_Padding; tests += test_Padding.get_tests(config=config) + from Crypto.SelfTest.Util import test_strxor; tests += test_strxor.get_tests(config=config) + from Crypto.SelfTest.Util import test_asn1; tests += test_asn1.get_tests(config=config) + from Crypto.SelfTest.Util import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py new file mode 100644 index 0000000..8837a32 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_Counter: Self-test for the Crypto.Util.Counter module +# +# Written in 2009 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.Counter""" + +from Crypto.Util.py3compat import * + +import unittest + +class CounterTests(unittest.TestCase): + def setUp(self): + global Counter + from Crypto.Util import Counter + + def test_BE(self): + """Big endian""" + c = Counter.new(128) + c = Counter.new(128, little_endian=False) + + def test_LE(self): + """Little endian""" + c = Counter.new(128, little_endian=True) + + def test_nbits(self): + c = Counter.new(nbits=128) + self.assertRaises(ValueError, Counter.new, 129) + + def test_prefix(self): + c = Counter.new(128, prefix=b("xx")) + + def test_suffix(self): + c = Counter.new(128, suffix=b("xx")) + + def test_iv(self): + c = Counter.new(128, initial_value=2) + self.assertRaises(ValueError, Counter.new, 16, initial_value=0x1FFFF) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(CounterTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_Padding.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_Padding.py new file mode 100644 index 0000000..4634659 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_Padding.py @@ -0,0 +1,154 @@ +# +# SelfTest/Util/test_Padding.py: Self-test for padding functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify as uh + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.Padding import pad, unpad + +class PKCS7_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4) + self.failUnless(padded == uh(b("04040404"))) + padded = pad(b(""), 4, 'pkcs7') + self.failUnless(padded == uh(b("04040404"))) + back = unpad(padded, 4) + self.failUnless(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4) + self.failUnless(padded == uh(b("1234567804040404"))) + back = unpad(padded, 4) + self.failUnless(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4) + self.failUnless(padded == uh(b("12345601"))) + back = unpad(padded, 4) + self.failUnless(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4) + self.failUnless(padded == uh(b("1234567890030303"))) + back = unpad(padded, 4) + self.failUnless(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, pad, uh(b("12")), 4, 'pkcs8') + + def testn2(self): + self.assertRaises(ValueError, unpad, b("\0\0\0"), 4) + self.assertRaises(ValueError, unpad, b(""), 4) + + def testn3(self): + self.assertRaises(ValueError, unpad, b("123456\x02"), 4) + self.assertRaises(ValueError, unpad, b("123456\x00"), 4) + self.assertRaises(ValueError, unpad, b("123456\x05\x05\x05\x05\x05"), 4) + +class X923_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4, 'x923') + self.failUnless(padded == uh(b("00000004"))) + back = unpad(padded, 4, 'x923') + self.failUnless(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4, 'x923') + self.failUnless(padded == uh(b("1234567800000004"))) + back = unpad(padded, 4, 'x923') + self.failUnless(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4, 'x923') + self.failUnless(padded == uh(b("12345601"))) + back = unpad(padded, 4, 'x923') + self.failUnless(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4, 'x923') + self.failUnless(padded == uh(b("1234567890000003"))) + back = unpad(padded, 4, 'x923') + self.failUnless(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, unpad, b("123456\x02"), 4, 'x923') + self.assertRaises(ValueError, unpad, b("123456\x00"), 4, 'x923') + self.assertRaises(ValueError, unpad, b("123456\x00\x00\x00\x00\x05"), 4, 'x923') + self.assertRaises(ValueError, unpad, b(""), 4, 'x923') + +class ISO7816_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4, 'iso7816') + self.failUnless(padded == uh(b("80000000"))) + back = unpad(padded, 4, 'iso7816') + self.failUnless(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4, 'iso7816') + self.failUnless(padded == uh(b("1234567880000000"))) + back = unpad(padded, 4, 'iso7816') + self.failUnless(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4, 'iso7816') + self.failUnless(padded == uh(b("12345680"))) + #import pdb; pdb.set_trace() + back = unpad(padded, 4, 'iso7816') + self.failUnless(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4, 'iso7816') + self.failUnless(padded == uh(b("1234567890800000"))) + back = unpad(padded, 4, 'iso7816') + self.failUnless(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, unpad, b("123456\x81"), 4, 'iso7816') + self.assertRaises(ValueError, unpad, b(""), 4, 'iso7816') + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS7_Tests) + tests += list_test_cases(X923_Tests) + tests += list_test_cases(ISO7816_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py new file mode 100644 index 0000000..368e47d --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py @@ -0,0 +1,784 @@ +# +# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.Util.asn1""" + +import unittest + +from Crypto.Util.py3compat import * +from Crypto.Util.asn1 import (DerObject, DerSetOf, DerInteger, + DerBitString, + DerObjectId, DerNull, DerOctetString, + DerSequence) + +class DerObjectTests(unittest.TestCase): + + def testObjInit1(self): + # Fail with invalid tag format (must be 1 byte) + self.assertRaises(ValueError, DerObject, b('\x00\x99')) + # Fail with invalid implicit tag (must be <0x1F) + self.assertRaises(ValueError, DerObject, 0x1F) + + # ------ + + def testObjEncode1(self): + # No payload + der = DerObject(b('\x02')) + self.assertEquals(der.encode(), b('\x02\x00')) + # Small payload (primitive) + der.payload = b('\x45') + self.assertEquals(der.encode(), b('\x02\x01\x45')) + # Invariant + self.assertEquals(der.encode(), b('\x02\x01\x45')) + # Initialize with numerical tag + der = DerObject(0x04) + der.payload = b('\x45') + self.assertEquals(der.encode(), b('\x04\x01\x45')) + # Initialize with constructed type + der = DerObject(b('\x10'), constructed=True) + self.assertEquals(der.encode(), b('\x30\x00')) + + def testObjEncode2(self): + # Initialize with payload + der = DerObject(0x03, b('\x12\x12')) + self.assertEquals(der.encode(), b('\x03\x02\x12\x12')) + + def testObjEncode3(self): + # Long payload + der = DerObject(b('\x10')) + der.payload = b("0")*128 + self.assertEquals(der.encode(), b('\x10\x81\x80' + "0"*128)) + + def testObjEncode4(self): + # Implicit tags (constructed) + der = DerObject(0x10, implicit=1, constructed=True) + der.payload = b('ppll') + self.assertEquals(der.encode(), b('\xa1\x04ppll')) + # Implicit tags (primitive) + der = DerObject(0x02, implicit=0x1E, constructed=False) + der.payload = b('ppll') + self.assertEquals(der.encode(), b('\x9E\x04ppll')) + + def testObjEncode5(self): + # Encode type with explicit tag + der = DerObject(0x10, explicit=5) + der.payload = b("xxll") + self.assertEqual(der.encode(), b("\xa5\x06\x10\x04xxll")) + + # ----- + + def testObjDecode1(self): + # Decode short payload + der = DerObject(0x02) + der.decode(b('\x02\x02\x01\x02')) + self.assertEquals(der.payload, b("\x01\x02")) + self.assertEquals(der._tag_octet, 0x02) + + def testObjDecode2(self): + # Decode long payload + der = DerObject(0x02) + der.decode(b('\x02\x81\x80' + "1"*128)) + self.assertEquals(der.payload, b("1")*128) + self.assertEquals(der._tag_octet, 0x02) + + def testObjDecode3(self): + # Decode payload with too much data gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02\xFF')) + # Decode payload with too little data gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01')) + + def testObjDecode4(self): + # Decode implicit tag (primitive) + der = DerObject(0x02, constructed=False, implicit=0xF) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) + der.decode(b('\x8F\x01\x00')) + self.assertEquals(der.payload, b('\x00')) + # Decode implicit tag (constructed) + der = DerObject(0x02, constructed=True, implicit=0xF) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) + der.decode(b('\xAF\x01\x00')) + self.assertEquals(der.payload, b('\x00')) + + def testObjDecode5(self): + # Decode payload with unexpected tag gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x03\x02\x01\x02')) + + def testObjDecode6(self): + # Arbitrary DER object + der = DerObject() + der.decode(b('\x65\x01\x88')) + self.assertEquals(der._tag_octet, 0x65) + self.assertEquals(der.payload, b('\x88')) + + def testObjDecode7(self): + # Decode explicit tag + der = DerObject(0x10, explicit=5) + der.decode(b("\xa5\x06\x10\x04xxll")) + self.assertEquals(der._inner_tag_octet, 0x10) + self.assertEquals(der.payload, b('xxll')) + + # Explicit tag may be 0 + der = DerObject(0x10, explicit=0) + der.decode(b("\xa0\x06\x10\x04xxll")) + self.assertEquals(der._inner_tag_octet, 0x10) + self.assertEquals(der.payload, b('xxll')) + + def testObjDecode8(self): + # Verify that decode returns the object + der = DerObject(0x02) + self.assertEqual(der, der.decode(b('\x02\x02\x01\x02'))) + +class DerIntegerTests(unittest.TestCase): + + def testInit1(self): + der = DerInteger(1) + self.assertEquals(der.encode(), b('\x02\x01\x01')) + + def testEncode1(self): + # Single-byte integers + # Value 0 + der = DerInteger(0) + self.assertEquals(der.encode(), b('\x02\x01\x00')) + # Value 1 + der = DerInteger(1) + self.assertEquals(der.encode(), b('\x02\x01\x01')) + # Value 127 + der = DerInteger(127) + self.assertEquals(der.encode(), b('\x02\x01\x7F')) + + def testEncode2(self): + # Multi-byte integers + # Value 128 + der = DerInteger(128) + self.assertEquals(der.encode(), b('\x02\x02\x00\x80')) + # Value 0x180 + der = DerInteger(0x180) + self.assertEquals(der.encode(), b('\x02\x02\x01\x80')) + # One very long integer + der = DerInteger(2**2048) + self.assertEquals(der.encode(), + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode3(self): + # Negative integers + # Value -1 + der = DerInteger(-1) + self.assertEquals(der.encode(), b('\x02\x01\xFF')) + # Value -128 + der = DerInteger(-128) + self.assertEquals(der.encode(), b('\x02\x01\x80')) + # Value + der = DerInteger(-87873) + self.assertEquals(der.encode(), b('\x02\x03\xFE\xA8\xBF')) + + def testEncode4(self): + # Explicit encoding + number = DerInteger(0x34, explicit=3) + self.assertEquals(number.encode(), b('\xa3\x03\x02\x01\x34')) + + # ----- + + def testDecode1(self): + # Single-byte integer + der = DerInteger() + # Value 0 + der.decode(b('\x02\x01\x00')) + self.assertEquals(der.value, 0) + # Value 1 + der.decode(b('\x02\x01\x01')) + self.assertEquals(der.value, 1) + # Value 127 + der.decode(b('\x02\x01\x7F')) + self.assertEquals(der.value, 127) + + def testDecode2(self): + # Multi-byte integer + der = DerInteger() + # Value 0x180L + der.decode(b('\x02\x02\x01\x80')) + self.assertEquals(der.value,0x180) + # One very long integer + der.decode( + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEquals(der.value,2**2048) + + def testDecode3(self): + # Negative integer + der = DerInteger() + # Value -1 + der.decode(b('\x02\x01\xFF')) + self.assertEquals(der.value, -1) + # Value -32768 + der.decode(b('\x02\x02\x80\x00')) + self.assertEquals(der.value, -32768) + + def testDecode5(self): + # We still accept BER integer format + der = DerInteger() + # Redundant leading zeroes + der.decode(b('\x02\x02\x00\x01')) + self.assertEquals(der.value, 1) + # Redundant leading 0xFF + der.decode(b('\x02\x02\xFF\xFF')) + self.assertEquals(der.value, -1) + # Empty payload + der.decode(b('\x02\x00')) + self.assertEquals(der.value, 0) + + def testDecode6(self): + # Explicit encoding + number = DerInteger(explicit=3) + number.decode(b('\xa3\x03\x02\x01\x34')) + self.assertEquals(number.value, 0x34) + + def testDecode7(self): + # Verify decode returns the DerInteger + der = DerInteger() + self.assertEquals(der, der.decode(b('\x02\x01\x7F'))) + + ### + + def testStrict1(self): + number = DerInteger() + + number.decode(b'\x02\x02\x00\x01') + number.decode(b'\x02\x02\x00\x7F') + self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x01', strict=True) + self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x7F', strict=True) + + ### + + def testErrDecode1(self): + # Wide length field + der = DerInteger() + self.assertRaises(ValueError, der.decode, b('\x02\x81\x01\x01')) + + +class DerSequenceTests(unittest.TestCase): + + def testInit1(self): + der = DerSequence([1, DerInteger(2), b('0\x00')]) + self.assertEquals(der.encode(), b('0\x08\x02\x01\x01\x02\x01\x020\x00')) + + def testEncode1(self): + # Empty sequence + der = DerSequence() + self.assertEquals(der.encode(), b('0\x00')) + self.failIf(der.hasOnlyInts()) + # One single-byte integer (zero) + der.append(0) + self.assertEquals(der.encode(), b('0\x03\x02\x01\x00')) + self.assertEquals(der.hasInts(),1) + self.assertEquals(der.hasInts(False),1) + self.failUnless(der.hasOnlyInts()) + self.failUnless(der.hasOnlyInts(False)) + # Invariant + self.assertEquals(der.encode(), b('0\x03\x02\x01\x00')) + + def testEncode2(self): + # Indexing + der = DerSequence() + der.append(0) + der[0] = 1 + self.assertEquals(len(der),1) + self.assertEquals(der[0],1) + self.assertEquals(der[-1],1) + self.assertEquals(der.encode(), b('0\x03\x02\x01\x01')) + # + der[:] = [1] + self.assertEquals(len(der),1) + self.assertEquals(der[0],1) + self.assertEquals(der.encode(), b('0\x03\x02\x01\x01')) + + def testEncode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.append(0x180) + self.assertEquals(der.encode(), b('0\x04\x02\x02\x01\x80')) + + def testEncode4(self): + # One very long integer + der = DerSequence() + der.append(2**2048) + self.assertEquals(der.encode(), b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode5(self): + der = DerSequence() + der += 1 + der += b('\x30\x00') + self.assertEquals(der.encode(), b('\x30\x05\x02\x01\x01\x30\x00')) + + def testEncode6(self): + # Two positive integers + der = DerSequence() + der.append(0x180) + der.append(0xFF) + self.assertEquals(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.failUnless(der.hasOnlyInts()) + self.failUnless(der.hasOnlyInts(False)) + # Two mixed integers + der = DerSequence() + der.append(2) + der.append(-2) + self.assertEquals(der.encode(), b('0\x06\x02\x01\x02\x02\x01\xFE')) + self.assertEquals(der.hasInts(), 1) + self.assertEquals(der.hasInts(False), 2) + self.failIf(der.hasOnlyInts()) + self.failUnless(der.hasOnlyInts(False)) + # + der.append(0x01) + der[1:] = [9,8] + self.assertEquals(len(der),3) + self.assertEqual(der[1:],[9,8]) + self.assertEqual(der[1:-1],[9]) + self.assertEquals(der.encode(), b('0\x09\x02\x01\x02\x02\x01\x09\x02\x01\x08')) + + def testEncode7(self): + # One integer and another type (already encoded) + der = DerSequence() + der.append(0x180) + der.append(b('0\x03\x02\x01\x05')) + self.assertEquals(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) + self.failIf(der.hasOnlyInts()) + + def testEncode8(self): + # One integer and another type (yet to encode) + der = DerSequence() + der.append(0x180) + der.append(DerSequence([5])) + self.assertEquals(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) + self.failIf(der.hasOnlyInts()) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSequence() + der.decode(b('0\x00')) + self.assertEquals(len(der),0) + # One single-byte integer (zero) + der.decode(b('0\x03\x02\x01\x00')) + self.assertEquals(len(der),1) + self.assertEquals(der[0],0) + # Invariant + der.decode(b('0\x03\x02\x01\x00')) + self.assertEquals(len(der),1) + self.assertEquals(der[0],0) + + def testDecode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x03\x02\x01\x7f')) + self.assertEquals(len(der),1) + self.assertEquals(der[0],127) + + def testDecode4(self): + # One very long integer + der = DerSequence() + der.decode(b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEquals(len(der),1) + self.assertEquals(der[0],2**2048) + + def testDecode6(self): + # Two integers + der = DerSequence() + der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEquals(len(der),2) + self.assertEquals(der[0],0x180) + self.assertEquals(der[1],0xFF) + + def testDecode7(self): + # One integer and 2 other types + der = DerSequence() + der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + self.assertEquals(len(der),3) + self.assertEquals(der[0],0x180) + self.assertEquals(der[1],b('\x24\x02\xb6\x63')) + self.assertEquals(der[2],b('\x12\x00')) + + def testDecode8(self): + # Only 2 other types + der = DerSequence() + der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) + self.assertEquals(len(der),2) + self.assertEquals(der[0],b('\x24\x02\xb6\x63')) + self.assertEquals(der[1],b('\x12\x00')) + self.assertEquals(der.hasInts(), 0) + self.assertEquals(der.hasInts(False), 0) + self.failIf(der.hasOnlyInts()) + self.failIf(der.hasOnlyInts(False)) + + def testDecode9(self): + # Verify that decode returns itself + der = DerSequence() + self.assertEqual(der, der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00'))) + + ### + + def testErrDecode1(self): + # Not a sequence + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('')) + self.assertRaises(ValueError, der.decode, b('\x00')) + self.assertRaises(ValueError, der.decode, b('\x30')) + + def testErrDecode2(self): + der = DerSequence() + # Too much data + self.assertRaises(ValueError, der.decode, b('\x30\x00\x00')) + + def testErrDecode3(self): + # Wrong length format + der = DerSequence() + # Missing length in sub-item + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) + # Valid BER, but invalid DER length + self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) + + def test_expected_nr_elements(self): + der_bin = DerSequence([1, 2, 3]).encode() + + DerSequence().decode(der_bin, nr_elements=3) + DerSequence().decode(der_bin, nr_elements=(2,3)) + self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=1) + self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=(4,5)) + + def test_expected_only_integers(self): + + der_bin1 = DerSequence([1, 2, 3]).encode() + der_bin2 = DerSequence([1, 2, DerSequence([3, 4])]).encode() + + DerSequence().decode(der_bin1, only_ints_expected=True) + DerSequence().decode(der_bin1, only_ints_expected=False) + DerSequence().decode(der_bin2, only_ints_expected=False) + self.assertRaises(ValueError, DerSequence().decode, der_bin2, only_ints_expected=True) + + +class DerOctetStringTests(unittest.TestCase): + + def testInit1(self): + der = DerOctetString(b('\xFF')) + self.assertEquals(der.encode(), b('\x04\x01\xFF')) + + def testEncode1(self): + # Empty sequence + der = DerOctetString() + self.assertEquals(der.encode(), b('\x04\x00')) + # Small payload + der.payload = b('\x01\x02') + self.assertEquals(der.encode(), b('\x04\x02\x01\x02')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerOctetString() + der.decode(b('\x04\x00')) + self.assertEquals(der.payload, b('')) + # Small payload + der.decode(b('\x04\x02\x01\x02')) + self.assertEquals(der.payload, b('\x01\x02')) + + def testDecode2(self): + # Verify that decode returns the object + der = DerOctetString() + self.assertEqual(der, der.decode(b('\x04\x00'))) + + def testErrDecode1(self): + # No leftovers allowed + der = DerOctetString() + self.assertRaises(ValueError, der.decode, b('\x04\x01\x01\xff')) + +class DerNullTests(unittest.TestCase): + + def testEncode1(self): + der = DerNull() + self.assertEquals(der.encode(), b('\x05\x00')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerNull() + self.assertEquals(der, der.decode(b('\x05\x00'))) + +class DerObjectIdTests(unittest.TestCase): + + def testInit1(self): + der = DerObjectId("1.1") + self.assertEquals(der.encode(), b('\x06\x01)')) + + def testEncode1(self): + der = DerObjectId('1.2.840.113549.1.1.1') + self.assertEquals(der.encode(), b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01')) + # + der = DerObjectId() + der.value = '1.2.840.113549.1.1.1' + self.assertEquals(der.encode(), b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerObjectId() + der.decode(b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01')) + self.assertEquals(der.value, '1.2.840.113549.1.1.1') + + def testDecode2(self): + # Verify that decode returns the object + der = DerObjectId() + self.assertEquals(der, + der.decode(b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01'))) + + def testDecode3(self): + der = DerObjectId() + der.decode(b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x00\x01')) + self.assertEquals(der.value, '1.2.840.113549.1.0.1') + + +class DerBitStringTests(unittest.TestCase): + + def testInit1(self): + der = DerBitString(b("\xFF")) + self.assertEquals(der.encode(), b('\x03\x02\x00\xFF')) + + def testInit2(self): + der = DerBitString(DerInteger(1)) + self.assertEquals(der.encode(), b('\x03\x04\x00\x02\x01\x01')) + + def testEncode1(self): + # Empty sequence + der = DerBitString() + self.assertEquals(der.encode(), b('\x03\x01\x00')) + # Small payload + der = DerBitString(b('\x01\x02')) + self.assertEquals(der.encode(), b('\x03\x03\x00\x01\x02')) + # Small payload + der = DerBitString() + der.value = b('\x01\x02') + self.assertEquals(der.encode(), b('\x03\x03\x00\x01\x02')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerBitString() + der.decode(b('\x03\x00')) + self.assertEquals(der.value, b('')) + # Small payload + der.decode(b('\x03\x03\x00\x01\x02')) + self.assertEquals(der.value, b('\x01\x02')) + + def testDecode2(self): + # Verify that decode returns the object + der = DerBitString() + self.assertEquals(der, der.decode(b('\x03\x00'))) + + +class DerSetOfTests(unittest.TestCase): + + def testInit1(self): + der = DerSetOf([DerInteger(1), DerInteger(2)]) + self.assertEquals(der.encode(), b('1\x06\x02\x01\x01\x02\x01\x02')) + + def testEncode1(self): + # Empty set + der = DerSetOf() + self.assertEquals(der.encode(), b('1\x00')) + # One single-byte integer (zero) + der.add(0) + self.assertEquals(der.encode(), b('1\x03\x02\x01\x00')) + # Invariant + self.assertEquals(der.encode(), b('1\x03\x02\x01\x00')) + + def testEncode2(self): + # Two integers + der = DerSetOf() + der.add(0x180) + der.add(0xFF) + self.assertEquals(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) + # Initialize with integers + der = DerSetOf([0x180, 0xFF]) + self.assertEquals(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) + + def testEncode3(self): + # One integer and another type (no matter what it is) + der = DerSetOf() + der.add(0x180) + self.assertRaises(ValueError, der.add, b('\x00\x02\x00\x00')) + + def testEncode4(self): + # Only non integers + der = DerSetOf() + der.add(b('\x01\x00')) + der.add(b('\x01\x01\x01')) + self.assertEquals(der.encode(), b('1\x05\x01\x00\x01\x01\x01')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSetOf() + der.decode(b('1\x00')) + self.assertEquals(len(der),0) + # One single-byte integer (zero) + der.decode(b('1\x03\x02\x01\x00')) + self.assertEquals(len(der),1) + self.assertEquals(list(der),[0]) + + def testDecode2(self): + # Two integers + der = DerSetOf() + der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEquals(len(der),2) + l = list(der) + self.failUnless(0x180 in l) + self.failUnless(0xFF in l) + + def testDecode3(self): + # One integer and 2 other types + der = DerSetOf() + #import pdb; pdb.set_trace() + self.assertRaises(ValueError, der.decode, + b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + + def testDecode4(self): + # Verify that decode returns the object + der = DerSetOf() + self.assertEquals(der, + der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff'))) + + ### + + def testErrDecode1(self): + # No leftovers allowed + der = DerSetOf() + self.assertRaises(ValueError, der.decode, + b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff\xAA')) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(DerObjectTests) + listTests += list_test_cases(DerIntegerTests) + listTests += list_test_cases(DerSequenceTests) + listTests += list_test_cases(DerOctetStringTests) + listTests += list_test_cases(DerNullTests) + listTests += list_test_cases(DerObjectIdTests) + listTests += list_test_cases(DerBitStringTests) + listTests += list_test_cases(DerSetOfTests) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_number.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_number.py new file mode 100644 index 0000000..13f9d19 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_number.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_number.py: Self-test for parts of the Crypto.Util.number module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for (some of) Crypto.Util.number""" + +from Crypto.Util.py3compat import * + +import unittest + +class MyError(Exception): + """Dummy exception used for tests""" + +# NB: In some places, we compare tuples instead of just output values so that +# if any inputs cause a test failure, we'll be able to tell which ones. + +class MiscTests(unittest.TestCase): + def setUp(self): + global number, math + from Crypto.Util import number + import math + + def test_ceil_div(self): + """Util.number.ceil_div""" + self.assertRaises(TypeError, number.ceil_div, "1", 1) + self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) + self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) + + # b = 1 + self.assertEqual(0, number.ceil_div(0, 1)) + self.assertEqual(1, number.ceil_div(1, 1)) + self.assertEqual(2, number.ceil_div(2, 1)) + self.assertEqual(3, number.ceil_div(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_div(0, 2)) + self.assertEqual(1, number.ceil_div(1, 2)) + self.assertEqual(1, number.ceil_div(2, 2)) + self.assertEqual(2, number.ceil_div(3, 2)) + self.assertEqual(2, number.ceil_div(4, 2)) + self.assertEqual(3, number.ceil_div(5, 2)) + + # b = 3 + self.assertEqual(0, number.ceil_div(0, 3)) + self.assertEqual(1, number.ceil_div(1, 3)) + self.assertEqual(1, number.ceil_div(2, 3)) + self.assertEqual(1, number.ceil_div(3, 3)) + self.assertEqual(2, number.ceil_div(4, 3)) + self.assertEqual(2, number.ceil_div(5, 3)) + self.assertEqual(2, number.ceil_div(6, 3)) + self.assertEqual(3, number.ceil_div(7, 3)) + + # b = 4 + self.assertEqual(0, number.ceil_div(0, 4)) + self.assertEqual(1, number.ceil_div(1, 4)) + self.assertEqual(1, number.ceil_div(2, 4)) + self.assertEqual(1, number.ceil_div(3, 4)) + self.assertEqual(1, number.ceil_div(4, 4)) + self.assertEqual(2, number.ceil_div(5, 4)) + self.assertEqual(2, number.ceil_div(6, 4)) + self.assertEqual(2, number.ceil_div(7, 4)) + self.assertEqual(2, number.ceil_div(8, 4)) + self.assertEqual(3, number.ceil_div(9, 4)) + + def test_getStrongPrime(self): + """Util.number.getStrongPrime""" + self.assertRaises(ValueError, number.getStrongPrime, 256) + self.assertRaises(ValueError, number.getStrongPrime, 513) + bits = 512 + x = number.getStrongPrime(bits) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+1 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD(x-1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+2 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD((x-1)>>1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + + def test_isPrime(self): + """Util.number.isPrime""" + self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCrypto to crash. + self.assertEqual(number.isPrime(2), True) + self.assertEqual(number.isPrime(3), True) + self.assertEqual(number.isPrime(4), False) + self.assertEqual(number.isPrime(2**1279-1), True) + self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime + # test some known gmp pseudo-primes taken from + # http://www.trnicely.net/misc/mpzspsp.html + for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, + 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, + 4859419 * 9718837, 2730439 * 5460877, + 245127919 * 490255837, 963939391 * 1927878781, + 4186358431 * 8372716861, 1576820467 * 3153640933): + self.assertEqual(number.isPrime(int(composite)), False) + + def test_size(self): + self.assertEqual(number.size(2),2) + self.assertEqual(number.size(3),2) + self.assertEqual(number.size(0xa2),8) + self.assertEqual(number.size(0xa2ba40),8*3) + self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) + self.assertRaises(ValueError, number.size, -1) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + tests = list_test_cases(MiscTests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_rfc1751.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_rfc1751.py new file mode 100644 index 0000000..af0aa2b --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_rfc1751.py @@ -0,0 +1,38 @@ +import unittest + +import binascii +from Crypto.Util.RFC1751 import key_to_english, english_to_key + + +class RFC1751_Tests(unittest.TestCase): + + def test1(self): + data = [ + ('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + + for key_hex, words in data: + key_bin = binascii.a2b_hex(key_hex) + + w2 = key_to_english(key_bin) + self.assertEqual(w2, words) + + k2 = english_to_key(words) + self.assertEqual(k2, key_bin) + + def test_error_key_to_english(self): + + self.assertRaises(ValueError, key_to_english, b'0' * 7) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + tests = list_test_cases(RFC1751_Tests) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/Util/test_strxor.py b/env/Lib/site-packages/Crypto/SelfTest/Util/test_strxor.py new file mode 100644 index 0000000..c91d38f --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/Util/test_strxor.py @@ -0,0 +1,280 @@ +# +# SelfTest/Util/test_strxor.py: Self-test for XORing +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify, hexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.strxor import strxor, strxor_c + + +class StrxorTests(unittest.TestCase): + + def test1(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + self.assertEqual(strxor(term1, term2), result) + self.assertEqual(strxor(term2, term1), result) + + def test2(self): + es = b"" + self.assertEqual(strxor(es, es), es) + + def test3(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + all_zeros = b"\x00" * len(term1) + self.assertEqual(strxor(term1, term1), all_zeros) + + def test_wrong_length(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"ff339a83e5cd4cdf564990") + self.assertRaises(ValueError, strxor, term1, term2) + + def test_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_ba = bytearray(term1) + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + + self.assertEqual(strxor(term1_ba, term2), result) + + def test_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_mv = memoryview(term1) + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + + self.assertEqual(strxor(term1_mv, term2), result) + + def test_output_bytearray(self): + """Verify result can be stored in pre-allocated memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + original_term1 = term1[:] + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + output = bytearray(len(term1)) + + result = strxor(term1, term2, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_xor) + self.assertEqual(term1, original_term1) + self.assertEqual(term2, original_term2) + + def test_output_memoryview(self): + """Verify result can be stored in pre-allocated memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + original_term1 = term1[:] + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + output = memoryview(bytearray(len(term1))) + + result = strxor(term1, term2, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_xor) + self.assertEqual(term1, original_term1) + self.assertEqual(term2, original_term2) + + def test_output_overlapping_bytearray(self): + """Verify result can be stored in overlapping memory""" + + term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + + result = strxor(term1, term2, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + self.assertEqual(term2, original_term2) + + def test_output_overlapping_memoryview(self): + """Verify result can be stored in overlapping memory""" + + term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) + term2 = unhexlify(b"383d4ba020573314395b") + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + + result = strxor(term1, term2, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + self.assertEqual(term2, original_term2) + + def test_output_ro_bytes(self): + """Verify result cannot be stored in read-only memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor, term1, term2, output=term1) + + def test_output_ro_memoryview(self): + """Verify result cannot be stored in read-only memory""" + + term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor, term1, term2, output=term1) + + def test_output_incorrect_length(self): + """Verify result cannot be stored in memory of incorrect length""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + output = bytearray(len(term1) - 1) + + self.assertRaises(ValueError, strxor, term1, term2, output=output) + + +class Strxor_cTests(unittest.TestCase): + + def test1(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + result = unhexlify(b"be72dbc2a48c0d9e1708") + self.assertEqual(strxor_c(term1, 65), result) + + def test2(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + self.assertEqual(strxor_c(term1, 0), term1) + + def test3(self): + self.assertEqual(strxor_c(b"", 90), b"") + + def test_wrong_range(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + self.assertRaises(ValueError, strxor_c, term1, -1) + self.assertRaises(ValueError, strxor_c, term1, 256) + + def test_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_ba = bytearray(term1) + result = unhexlify(b"be72dbc2a48c0d9e1708") + + self.assertEqual(strxor_c(term1_ba, 65), result) + + def test_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_mv = memoryview(term1) + result = unhexlify(b"be72dbc2a48c0d9e1708") + + self.assertEqual(strxor_c(term1_mv, 65), result) + + def test_output_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + original_term1 = term1[:] + expected_result = unhexlify(b"be72dbc2a48c0d9e1708") + output = bytearray(len(term1)) + + result = strxor_c(term1, 65, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_result) + self.assertEqual(term1, original_term1) + + def test_output_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + original_term1 = term1[:] + expected_result = unhexlify(b"be72dbc2a48c0d9e1708") + output = memoryview(bytearray(len(term1))) + + result = strxor_c(term1, 65, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_result) + self.assertEqual(term1, original_term1) + + def test_output_overlapping_bytearray(self): + """Verify result can be stored in overlapping memory""" + + term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) + expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") + + result = strxor_c(term1, 65, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + + def test_output_overlapping_memoryview(self): + """Verify result can be stored in overlapping memory""" + + term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) + expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") + + result = strxor_c(term1, 65, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + + def test_output_ro_bytes(self): + """Verify result cannot be stored in read-only memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + + self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) + + def test_output_ro_memoryview(self): + """Verify result cannot be stored in read-only memory""" + + term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) + + def test_output_incorrect_length(self): + """Verify result cannot be stored in memory of incorrect length""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + output = bytearray(len(term1) - 1) + + self.assertRaises(ValueError, strxor_c, term1, 65, output=output) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(StrxorTests) + tests += list_test_cases(Strxor_cTests) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/env/Lib/site-packages/Crypto/SelfTest/__init__.py b/env/Lib/site-packages/Crypto/SelfTest/__init__.py new file mode 100644 index 0000000..bc34f4a --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/__init__.py: Self-test for PyCrypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self tests + +These tests should perform quickly and can ideally be used every time an +application runs. +""" + +__revision__ = "$Id$" + +import sys +import unittest +from Crypto.Util.py3compat import BytesIO + +class SelfTestError(Exception): + def __init__(self, message, result): + Exception.__init__(self, message, result) + self.message = message + self.result = result + +def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): + """Execute self-tests. + + This raises SelfTestError if any test is unsuccessful. + + You may optionally pass in a sub-module of SelfTest if you only want to + perform some of the tests. For example, the following would test only the + hash modules: + + Crypto.SelfTest.run(Crypto.SelfTest.Hash) + + """ + + if config is None: + config = {} + suite = unittest.TestSuite() + if module is None: + if tests is None: + tests = get_tests(config=config) + suite.addTests(tests) + else: + if tests is None: + suite.addTests(module.get_tests(config=config)) + else: + raise ValueError("'module' and 'tests' arguments are mutually exclusive") + if stream is None: + kwargs['stream'] = BytesIO() + else: + kwargs['stream'] = stream + runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) + result = runner.run(suite) + if not result.wasSuccessful(): + if stream is None: + sys.stderr.write(kwargs['stream'].getvalue()) + raise SelfTestError("Self-test failed", result) + return result + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest import Cipher; tests += Cipher.get_tests(config=config) + from Crypto.SelfTest import Hash; tests += Hash.get_tests(config=config) + from Crypto.SelfTest import Protocol; tests += Protocol.get_tests(config=config) + from Crypto.SelfTest import PublicKey; tests += PublicKey.get_tests(config=config) + from Crypto.SelfTest import Random; tests += Random.get_tests(config=config) + from Crypto.SelfTest import Util; tests += Util.get_tests(config=config) + from Crypto.SelfTest import Signature; tests += Signature.get_tests(config=config) + from Crypto.SelfTest import IO; tests += IO.get_tests(config=config) + from Crypto.SelfTest import Math; tests += Math.get_tests(config=config) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/SelfTest/__main__.py b/env/Lib/site-packages/Crypto/SelfTest/__main__.py new file mode 100644 index 0000000..9ab0912 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/__main__.py @@ -0,0 +1,38 @@ +#! /usr/bin/env python +# +# __main__.py : Stand-along loader for PyCryptodome test suite +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import sys + +from Crypto import SelfTest + +slow_tests = not "--skip-slow-tests" in sys.argv +if not slow_tests: + print("Skipping slow tests") + +wycheproof_warnings = "--wycheproof-warnings" in sys.argv +if wycheproof_warnings: + print("Printing Wycheproof warnings") + +config = {'slow_tests' : slow_tests, 'wycheproof_warnings' : wycheproof_warnings } +SelfTest.run(stream=sys.stdout, verbosity=1, config=config) diff --git a/env/Lib/site-packages/Crypto/SelfTest/loader.py b/env/Lib/site-packages/Crypto/SelfTest/loader.py new file mode 100644 index 0000000..18be270 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/loader.py @@ -0,0 +1,206 @@ +# =================================================================== +# +# Copyright (c) 2016, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import json +import errno +import binascii +import warnings +from binascii import unhexlify +from Crypto.Util.py3compat import FileNotFoundError + + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +def _load_tests(dir_comps, file_in, description, conversions): + """Load and parse a test vector file + + Return a list of objects, one per group of adjacent + KV lines or for a single line in the form "[.*]". + + For a group of lines, the object has one attribute per line. + """ + + line_number = 0 + results = [] + + class TestVector(object): + def __init__(self, description, count): + self.desc = description + self.count = count + self.others = [] + + test_vector = None + count = 0 + new_group = True + + while True: + line_number += 1 + line = file_in.readline() + if not line: + if test_vector is not None: + results.append(test_vector) + break + line = line.strip() + + # Skip comments and empty lines + if line.startswith('#') or not line: + new_group = True + continue + + if line.startswith("["): + if test_vector is not None: + results.append(test_vector) + test_vector = None + results.append(line) + continue + + if new_group: + count += 1 + new_group = False + if test_vector is not None: + results.append(test_vector) + test_vector = TestVector("%s (#%d)" % (description, count), count) + + res = re.match("([A-Za-z0-9]+) = ?(.*)", line) + if not res: + test_vector.others += [line] + else: + token = res.group(1).lower() + data = res.group(2).lower() + + conversion = conversions.get(token, None) + if conversion is None: + if len(data) % 2 != 0: + data = "0" + data + setattr(test_vector, token, binascii.unhexlify(data)) + else: + setattr(test_vector, token, conversion(data)) + + # This line is ignored + return results + + +def load_test_vectors(dir_comps, file_name, description, conversions): + """Load and parse a test vector file + + This function returns a list of objects, one per group of adjacent + KV lines or for a single line in the form "[.*]". + + For a group of lines, the object has one attribute per line. + """ + + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + description = "%s test (%s)" % (description, file_name) + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name) as file_in: + results = _load_tests(dir_comps, file_in, description, conversions) + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for " + description, + UserWarning, + stacklevel=2) + + return results + + +def load_test_vectors_wycheproof(dir_comps, file_name, description, + root_tag={}, group_tag={}, unit_tag={}): + + result = [] + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name) as file_in: + tv_tree = json.load(file_in) + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for " + description, + UserWarning, + stacklevel=2) + return result + + class TestVector(object): + pass + + common_root = {} + for k, v in root_tag.items(): + common_root[k] = v(tv_tree) + + for group in tv_tree['testGroups']: + + common_group = {} + for k, v in group_tag.items(): + common_group[k] = v(group) + + for test in group['tests']: + tv = TestVector() + + for k, v in common_root.items(): + setattr(tv, k, v) + for k, v in common_group.items(): + setattr(tv, k, v) + + tv.id = test['tcId'] + tv.comment = test['comment'] + for attr in 'key', 'iv', 'aad', 'msg', 'ct', 'tag', 'label', 'ikm', 'salt', 'info', 'okm', 'sig': + if attr in test: + setattr(tv, attr, unhexlify(test[attr])) + tv.filename = file_name + + for k, v in unit_tag.items(): + setattr(tv, k, v(test)) + + tv.valid = test['result'] != "invalid" + tv.warning = test['result'] == "acceptable" + result.append(tv) + + return result + diff --git a/env/Lib/site-packages/Crypto/SelfTest/st_common.py b/env/Lib/site-packages/Crypto/SelfTest/st_common.py new file mode 100644 index 0000000..e098d81 --- /dev/null +++ b/env/Lib/site-packages/Crypto/SelfTest/st_common.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/st_common.py: Common functions for SelfTest modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Common functions for SelfTest modules""" + +import unittest +import binascii +from Crypto.Util.py3compat import b + + +def list_test_cases(class_): + """Return a list of TestCase instances given a TestCase class + + This is useful when you have defined test* methods on your TestCase class. + """ + return unittest.TestLoader().loadTestsFromTestCase(class_) + +def strip_whitespace(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return b("".join(s.split())) + else: + return b("").join(s.split()) + +def a2b_hex(s): + """Convert hexadecimal to binary, ignoring whitespace""" + return binascii.a2b_hex(strip_whitespace(s)) + +def b2a_hex(s): + """Convert binary to hexadecimal""" + # For completeness + return binascii.b2a_hex(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/env/Lib/site-packages/Crypto/Signature/DSS.py b/env/Lib/site-packages/Crypto/Signature/DSS.py new file mode 100644 index 0000000..4092d42 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/DSS.py @@ -0,0 +1,416 @@ +# +# Signature/DSS.py : DSS.py +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['new'] + + +from Crypto.Util.asn1 import DerSequence +from Crypto.Util.number import long_to_bytes +from Crypto.Math.Numbers import Integer + +from Crypto.Hash import HMAC +from Crypto.PublicKey.ECC import EccKey +from Crypto.PublicKey.DSA import DsaKey + + +class DssSigScheme(object): + """A (EC)DSA signature object. + Do not instantiate directly. + Use :func:`Crypto.Signature.DSS.new`. + """ + + def __init__(self, key, encoding, order): + """Create a new Digital Signature Standard (DSS) object. + + Do not instantiate this object directly, + use `Crypto.Signature.DSS.new` instead. + """ + + self._key = key + self._encoding = encoding + self._order = order + + self._order_bits = self._order.size_in_bits() + self._order_bytes = (self._order_bits - 1) // 8 + 1 + + def can_sign(self): + """Return ``True`` if this signature object can be used + for signing messages.""" + + return self._key.has_private() + + def _compute_nonce(self, msg_hash): + raise NotImplementedError("To be provided by subclasses") + + def _valid_hash(self, msg_hash): + raise NotImplementedError("To be provided by subclasses") + + def sign(self, msg_hash): + """Produce the DSA/ECDSA signature of a message. + + :parameter msg_hash: + The hash that was carried out over the message. + The object belongs to the :mod:`Crypto.Hash` package. + + Under mode *'fips-186-3'*, the hash must be a FIPS + approved secure hash (SHA-1 or a member of the SHA-2 family), + of cryptographic strength appropriate for the DSA key. + For instance, a 3072/256 DSA key can only be used + in combination with SHA-512. + :type msg_hash: hash object + + :return: The signature as a *byte string* + :raise ValueError: if the hash algorithm is incompatible to the (EC)DSA key + :raise TypeError: if the (EC)DSA key has no private half + """ + + if not self._valid_hash(msg_hash): + raise ValueError("Hash is not sufficiently strong") + + # Generate the nonce k (critical!) + nonce = self._compute_nonce(msg_hash) + + # Perform signature using the raw API + z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) + sig_pair = self._key._sign(z, nonce) + + # Encode the signature into a single byte string + if self._encoding == 'binary': + output = b"".join([long_to_bytes(x, self._order_bytes) + for x in sig_pair]) + else: + # Dss-sig ::= SEQUENCE { + # r INTEGER, + # s INTEGER + # } + # Ecdsa-Sig-Value ::= SEQUENCE { + # r INTEGER, + # s INTEGER + # } + output = DerSequence(sig_pair).encode() + + return output + + def verify(self, msg_hash, signature): + """Check if a certain (EC)DSA signature is authentic. + + :parameter msg_hash: + The hash that was carried out over the message. + This is an object belonging to the :mod:`Crypto.Hash` module. + + Under mode *'fips-186-3'*, the hash must be a FIPS + approved secure hash (SHA-1 or a member of the SHA-2 family), + of cryptographic strength appropriate for the DSA key. + For instance, a 3072/256 DSA key can only be used in + combination with SHA-512. + :type msg_hash: hash object + + :parameter signature: + The signature that needs to be validated + :type signature: byte string + + :raise ValueError: if the signature is not authentic + """ + + if not self._valid_hash(msg_hash): + raise ValueError("Hash is not sufficiently strong") + + if self._encoding == 'binary': + if len(signature) != (2 * self._order_bytes): + raise ValueError("The signature is not authentic (length)") + r_prime, s_prime = [Integer.from_bytes(x) + for x in (signature[:self._order_bytes], + signature[self._order_bytes:])] + else: + try: + der_seq = DerSequence().decode(signature, strict=True) + except (ValueError, IndexError): + raise ValueError("The signature is not authentic (DER)") + if len(der_seq) != 2 or not der_seq.hasOnlyInts(): + raise ValueError("The signature is not authentic (DER content)") + r_prime, s_prime = Integer(der_seq[0]), Integer(der_seq[1]) + + if not (0 < r_prime < self._order) or not (0 < s_prime < self._order): + raise ValueError("The signature is not authentic (d)") + + z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) + result = self._key._verify(z, (r_prime, s_prime)) + if not result: + raise ValueError("The signature is not authentic") + # Make PyCrypto code to fail + return False + + +class DeterministicDsaSigScheme(DssSigScheme): + # Also applicable to ECDSA + + def __init__(self, key, encoding, order, private_key): + super(DeterministicDsaSigScheme, self).__init__(key, encoding, order) + self._private_key = private_key + + def _bits2int(self, bstr): + """See 2.3.2 in RFC6979""" + + result = Integer.from_bytes(bstr) + q_len = self._order.size_in_bits() + b_len = len(bstr) * 8 + if b_len > q_len: + # Only keep leftmost q_len bits + result >>= (b_len - q_len) + return result + + def _int2octets(self, int_mod_q): + """See 2.3.3 in RFC6979""" + + assert 0 < int_mod_q < self._order + return long_to_bytes(int_mod_q, self._order_bytes) + + def _bits2octets(self, bstr): + """See 2.3.4 in RFC6979""" + + z1 = self._bits2int(bstr) + if z1 < self._order: + z2 = z1 + else: + z2 = z1 - self._order + return self._int2octets(z2) + + def _compute_nonce(self, mhash): + """Generate k in a deterministic way""" + + # See section 3.2 in RFC6979.txt + # Step a + h1 = mhash.digest() + # Step b + mask_v = b'\x01' * mhash.digest_size + # Step c + nonce_k = b'\x00' * mhash.digest_size + + for int_oct in (b'\x00', b'\x01'): + # Step d/f + nonce_k = HMAC.new(nonce_k, + mask_v + int_oct + + self._int2octets(self._private_key) + + self._bits2octets(h1), mhash).digest() + # Step e/g + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + + nonce = -1 + while not (0 < nonce < self._order): + # Step h.C (second part) + if nonce != -1: + nonce_k = HMAC.new(nonce_k, mask_v + b'\x00', + mhash).digest() + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + + # Step h.A + mask_t = b"" + + # Step h.B + while len(mask_t) < self._order_bytes: + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + mask_t += mask_v + + # Step h.C (first part) + nonce = self._bits2int(mask_t) + return nonce + + def _valid_hash(self, msg_hash): + return True + + +class FipsDsaSigScheme(DssSigScheme): + + #: List of L (bit length of p) and N (bit length of q) combinations + #: that are allowed by FIPS 186-3. The security level is provided in + #: Table 2 of FIPS 800-57 (rev3). + _fips_186_3_L_N = ( + (1024, 160), # 80 bits (SHA-1 or stronger) + (2048, 224), # 112 bits (SHA-224 or stronger) + (2048, 256), # 128 bits (SHA-256 or stronger) + (3072, 256) # 256 bits (SHA-512) + ) + + def __init__(self, key, encoding, order, randfunc): + super(FipsDsaSigScheme, self).__init__(key, encoding, order) + self._randfunc = randfunc + + L = Integer(key.p).size_in_bits() + if (L, self._order_bits) not in self._fips_186_3_L_N: + error = ("L/N (%d, %d) is not compliant to FIPS 186-3" + % (L, self._order_bits)) + raise ValueError(error) + + def _compute_nonce(self, msg_hash): + # hash is not used + return Integer.random_range(min_inclusive=1, + max_exclusive=self._order, + randfunc=self._randfunc) + + def _valid_hash(self, msg_hash): + """Verify that SHA-1, SHA-2 or SHA-3 are used""" + return (msg_hash.oid == "1.3.14.3.2.26" or + msg_hash.oid.startswith("2.16.840.1.101.3.4.2.")) + + +class FipsEcDsaSigScheme(DssSigScheme): + + def __init__(self, key, encoding, order, randfunc): + super(FipsEcDsaSigScheme, self).__init__(key, encoding, order) + self._randfunc = randfunc + + def _compute_nonce(self, msg_hash): + return Integer.random_range(min_inclusive=1, + max_exclusive=self._key._curve.order, + randfunc=self._randfunc) + + def _valid_hash(self, msg_hash): + """Verify that SHA-[23] (256|384|512) bits are used to + match the security of P-256 (128 bits), P-384 (192 bits) + or P-521 (256 bits)""" + + modulus_bits = self._key.pointQ.size_in_bits() + + sha256 = ( "2.16.840.1.101.3.4.2.1", "2.16.840.1.101.3.4.2.8" ) + sha384 = ( "2.16.840.1.101.3.4.2.2", "2.16.840.1.101.3.4.2.9" ) + sha512 = ( "2.16.840.1.101.3.4.2.3", "2.16.840.1.101.3.4.2.10") + + if msg_hash.oid in sha256: + return modulus_bits <= 256 + elif msg_hash.oid in sha384: + return modulus_bits <= 384 + else: + return msg_hash.oid in sha512 + + +def new(key, mode, encoding='binary', randfunc=None): + """Create a signature object :class:`DSS_SigScheme` that + can perform (EC)DSA signature or verification. + + .. note:: + Refer to `NIST SP 800 Part 1 Rev 4`_ (or newer release) for an + overview of the recommended key lengths. + + :parameter key: + The key to use for computing the signature (*private* keys only) + or verifying one: it must be either + :class:`Crypto.PublicKey.DSA` or :class:`Crypto.PublicKey.ECC`. + + For DSA keys, let ``L`` and ``N`` be the bit lengths of the modulus ``p`` + and of ``q``: the pair ``(L,N)`` must appear in the following list, + in compliance to section 4.2 of `FIPS 186-4`_: + + - (1024, 160) *legacy only; do not create new signatures with this* + - (2048, 224) *deprecated; do not create new signatures with this* + - (2048, 256) + - (3072, 256) + + For ECC, only keys over P-256, P384, and P-521 are accepted. + :type key: + a key object + + :parameter mode: + The parameter can take these values: + + - *'fips-186-3'*. The signature generation is randomized and carried out + according to `FIPS 186-3`_: the nonce ``k`` is taken from the RNG. + - *'deterministic-rfc6979'*. The signature generation is not + randomized. See RFC6979_. + :type mode: + string + + :parameter encoding: + How the signature is encoded. This value determines the output of + :meth:`sign` and the input to :meth:`verify`. + + The following values are accepted: + + - *'binary'* (default), the signature is the raw concatenation + of ``r`` and ``s``. It is defined in the IEEE P.1363 standard. + + For DSA, the size in bytes of the signature is ``N/4`` bytes + (e.g. 64 for ``N=256``). + + For ECDSA, the signature is always twice the length of a point + coordinate (e.g. 64 bytes for P-256). + + - *'der'*, the signature is a ASN.1 DER SEQUENCE + with two INTEGERs (``r`` and ``s``). It is defined in RFC3279_. + The size of the signature is variable. + :type encoding: string + + :parameter randfunc: + A function that returns random *byte strings*, of a given length. + If omitted, the internal RNG is used. + Only applicable for the *'fips-186-3'* mode. + :type randfunc: callable + + .. _FIPS 186-3: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + .. _NIST SP 800 Part 1 Rev 4: http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-57pt1r4.pdf + .. _RFC6979: http://tools.ietf.org/html/rfc6979 + .. _RFC3279: https://tools.ietf.org/html/rfc3279#section-2.2.2 + """ + + # The goal of the 'mode' parameter is to avoid to + # have the current version of the standard as default. + # + # Over time, such version will be superseded by (for instance) + # FIPS 186-4 and it will be odd to have -3 as default. + + if encoding not in ('binary', 'der'): + raise ValueError("Unknown encoding '%s'" % encoding) + + if isinstance(key, EccKey): + order = key._curve.order + private_key_attr = 'd' + elif isinstance(key, DsaKey): + order = Integer(key.q) + private_key_attr = 'x' + else: + raise ValueError("Unsupported key type " + str(type(key))) + + if key.has_private(): + private_key = getattr(key, private_key_attr) + else: + private_key = None + + if mode == 'deterministic-rfc6979': + return DeterministicDsaSigScheme(key, encoding, order, private_key) + elif mode == 'fips-186-3': + if isinstance(key, EccKey): + return FipsEcDsaSigScheme(key, encoding, order, randfunc) + else: + return FipsDsaSigScheme(key, encoding, order, randfunc) + else: + raise ValueError("Unknown DSS mode '%s'" % mode) diff --git a/env/Lib/site-packages/Crypto/Signature/DSS.pyi b/env/Lib/site-packages/Crypto/Signature/DSS.pyi new file mode 100644 index 0000000..08cad81 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/DSS.pyi @@ -0,0 +1,27 @@ +from typing import Union, Optional, Callable +from typing_extensions import Protocol + +from Crypto.PublicKey.DSA import DsaKey +from Crypto.PublicKey.ECC import EccKey + +class Hash(Protocol): + def digest(self) -> bytes: ... + +__all__ = ['new'] + +class DssSigScheme: + def __init__(self, key: Union[DsaKey, EccKey], encoding: str, order: int) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... + +class DeterministicDsaSigScheme(DssSigScheme): + def __init__(self, key, encoding, order, private_key) -> None: ... + +class FipsDsaSigScheme(DssSigScheme): + def __init__(self, key: DsaKey, encoding: str, order: int, randfunc: Callable) -> None: ... + +class FipsEcDsaSigScheme(DssSigScheme): + def __init__(self, key: EccKey, encoding: str, order: int, randfunc: Callable) -> None: ... + +def new(key: Union[DsaKey, EccKey], mode: str, encoding: Optional[str]='binary', randfunc: Optional[Callable]=None) -> Union[DeterministicDsaSigScheme, FipsDsaSigScheme, FipsEcDsaSigScheme]: ... diff --git a/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py b/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py new file mode 100644 index 0000000..c39d388 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py @@ -0,0 +1,55 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Legacy module for PKCS#1 PSS signatures. + +:undocumented: __package__ +""" + +import types + +from Crypto.Signature import pss + + +def _pycrypto_verify(self, hash_object, signature): + try: + self._verify(hash_object, signature) + except (ValueError, TypeError): + return False + return True + + +def new(rsa_key, mgfunc=None, saltLen=None, randfunc=None): + pkcs1 = pss.new(rsa_key, mask_func=mgfunc, + salt_bytes=saltLen, rand_func=randfunc) + pkcs1._verify = pkcs1.verify + pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) + return pkcs1 diff --git a/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.pyi b/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.pyi new file mode 100644 index 0000000..882cc8f --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/PKCS1_PSS.pyi @@ -0,0 +1,7 @@ +from typing import Optional, Callable + +from Crypto.PublicKey.RSA import RsaKey +from Crypto.Signature.pss import PSS_SigScheme + + +def new(rsa_key: RsaKey, mgfunc: Optional[Callable]=None, saltLen: Optional[int]=None, randfunc: Optional[Callable]=None) -> PSS_SigScheme: ... diff --git a/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py b/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py new file mode 100644 index 0000000..ac888ed --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py @@ -0,0 +1,53 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Legacy module for PKCS#1 v1.5 signatures. + +:undocumented: __package__ +""" + +import types + +from Crypto.Signature import pkcs1_15 + +def _pycrypto_verify(self, hash_object, signature): + try: + self._verify(hash_object, signature) + except (ValueError, TypeError): + return False + return True + +def new(rsa_key): + pkcs1 = pkcs1_15.new(rsa_key) + pkcs1._verify = pkcs1.verify + pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) + return pkcs1 + diff --git a/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.pyi b/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.pyi new file mode 100644 index 0000000..55b9637 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.pyi @@ -0,0 +1,6 @@ +from Crypto.PublicKey.RSA import RsaKey + +from Crypto.Signature.pkcs1_15 import PKCS115_SigScheme + + +def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... \ No newline at end of file diff --git a/env/Lib/site-packages/Crypto/Signature/__init__.py b/env/Lib/site-packages/Crypto/Signature/__init__.py new file mode 100644 index 0000000..da028a5 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/__init__.py @@ -0,0 +1,36 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Digital signature protocols + +A collection of standardized protocols to carry out digital signatures. +""" + +__all__ = ['PKCS1_v1_5', 'PKCS1_PSS', 'DSS', 'pkcs1_15', 'pss'] diff --git a/env/Lib/site-packages/Crypto/Signature/pkcs1_15.py b/env/Lib/site-packages/Crypto/Signature/pkcs1_15.py new file mode 100644 index 0000000..54a4bf7 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/pkcs1_15.py @@ -0,0 +1,222 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import Crypto.Util.number +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes +from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString, DerObjectId + +class PKCS115_SigScheme: + """A signature object for ``RSASSA-PKCS1-v1_5``. + Do not instantiate directly. + Use :func:`Crypto.Signature.pkcs1_15.new`. + """ + + def __init__(self, rsa_key): + """Initialize this PKCS#1 v1.5 signature scheme object. + + :Parameters: + rsa_key : an RSA key object + Creation of signatures is only possible if this is a *private* + RSA key. Verification of signatures is always possible. + """ + self._key = rsa_key + + def can_sign(self): + """Return ``True`` if this object can be used to sign messages.""" + return self._key.has_private() + + def sign(self, msg_hash): + """Create the PKCS#1 v1.5 signature of a message. + + This function is also called ``RSASSA-PKCS1-V1_5-SIGN`` and + it is specified in + `section 8.2.1 of RFC8017 `_. + + :parameter msg_hash: + This is an object from the :mod:`Crypto.Hash` package. + It has been used to digest the message to sign. + :type msg_hash: hash object + + :return: the signature encoded as a *byte string*. + :raise ValueError: if the RSA key is not long enough for the given hash algorithm. + :raise TypeError: if the RSA key has no private half. + """ + + # See 8.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + em = _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k) + # Step 2a (OS2IP) + em_int = bytes_to_long(em) + # Step 2b (RSASP1) + m_int = self._key._decrypt(em_int) + # Step 2c (I2OSP) + signature = long_to_bytes(m_int, k) + return signature + + def verify(self, msg_hash, signature): + """Check if the PKCS#1 v1.5 signature over a message is valid. + + This function is also called ``RSASSA-PKCS1-V1_5-VERIFY`` and + it is specified in + `section 8.2.2 of RFC8037 `_. + + :parameter msg_hash: + The hash that was carried out over the message. This is an object + belonging to the :mod:`Crypto.Hash` module. + :type parameter: hash object + + :parameter signature: + The signature that needs to be validated. + :type signature: byte string + + :raise ValueError: if the signature is not valid. + """ + + # See 8.2.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits, 8) # Convert from bits to bytes + + # Step 1 + if len(signature) != k: + raise ValueError("Invalid signature") + # Step 2a (O2SIP) + signature_int = bytes_to_long(signature) + # Step 2b (RSAVP1) + em_int = self._key._encrypt(signature_int) + # Step 2c (I2OSP) + em1 = long_to_bytes(em_int, k) + # Step 3 + try: + possible_em1 = [ _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, True) ] + # MD2/4/5 hashes always require NULL params in AlgorithmIdentifier. + # For all others, it is optional. + try: + algorithm_is_md = msg_hash.oid.startswith('1.2.840.113549.2.') + except AttributeError: + algorithm_is_md = False + if not algorithm_is_md: # MD2/MD4/MD5 + possible_em1.append(_EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, False)) + except ValueError: + raise ValueError("Invalid signature") + # Step 4 + # By comparing the full encodings (as opposed to checking each + # of its components one at a time) we avoid attacks to the padding + # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). + # + if em1 not in possible_em1: + raise ValueError("Invalid signature") + pass + + +def _EMSA_PKCS1_V1_5_ENCODE(msg_hash, emLen, with_hash_parameters=True): + """ + Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.2). + + ``_EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + msg_hash : hash object + The hash object that holds the digest of the message being signed. + emLen : int + The length the final encoding must have, in bytes. + with_hash_parameters : bool + If True (default), include NULL parameters for the hash + algorithm in the ``digestAlgorithm`` SEQUENCE. + + :attention: the early standard (RFC2313) stated that ``DigestInfo`` + had to be BER-encoded. This means that old signatures + might have length tags in indefinite form, which + is not supported in DER. Such encoding cannot be + reproduced by this function. + + :Return: An ``emLen`` byte long string that encodes the hash. + """ + + # First, build the ASN.1 DER object DigestInfo: + # + # DigestInfo ::= SEQUENCE { + # digestAlgorithm AlgorithmIdentifier, + # digest OCTET STRING + # } + # + # where digestAlgorithm identifies the hash function and shall be an + # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. + # + # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { + # { OID id-md2 PARAMETERS NULL }| + # { OID id-md5 PARAMETERS NULL }| + # { OID id-sha1 PARAMETERS NULL }| + # { OID id-sha256 PARAMETERS NULL }| + # { OID id-sha384 PARAMETERS NULL }| + # { OID id-sha512 PARAMETERS NULL } + # } + # + # Appendix B.1 also says that for SHA-1/-2 algorithms, the parameters + # should be omitted. They may be present, but when they are, they shall + # have NULL value. + + digestAlgo = DerSequence([ DerObjectId(msg_hash.oid).encode() ]) + + if with_hash_parameters: + digestAlgo.append(DerNull().encode()) + + digest = DerOctetString(msg_hash.digest()) + digestInfo = DerSequence([ + digestAlgo.encode(), + digest.encode() + ]).encode() + + # We need at least 11 bytes for the remaining data: 3 fixed bytes and + # at least 8 bytes of padding). + if emLen bytes: ... + +class PKCS115_SigScheme: + def __init__(self, rsa_key: RsaKey) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> None: ... + +def _EMSA_PKCS1_V1_5_ENCODE(msg_hash: Hash, emLen: int, with_hash_parameters: Optional[bool]=True) -> bytes: ... + +def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... diff --git a/env/Lib/site-packages/Crypto/Signature/pss.py b/env/Lib/site-packages/Crypto/Signature/pss.py new file mode 100644 index 0000000..5f34ace --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/pss.py @@ -0,0 +1,386 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bchr, bord, iter_range +import Crypto.Util.number +from Crypto.Util.number import (ceil_div, + long_to_bytes, + bytes_to_long + ) +from Crypto.Util.strxor import strxor +from Crypto import Random + + +class PSS_SigScheme: + """A signature object for ``RSASSA-PSS``. + Do not instantiate directly. + Use :func:`Crypto.Signature.pss.new`. + """ + + def __init__(self, key, mgfunc, saltLen, randfunc): + """Initialize this PKCS#1 PSS signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and + verification are possible. + If a public half is given, only verification is possible. + mgfunc : callable + A mask generation function that accepts two parameters: + a string to use as seed, and the lenth of the mask to + generate, in bytes. + saltLen : integer + Length of the salt, in bytes. + randfunc : callable + A function that returns random bytes. + """ + + self._key = key + self._saltLen = saltLen + self._mgfunc = mgfunc + self._randfunc = randfunc + + def can_sign(self): + """Return ``True`` if this object can be used to sign messages.""" + return self._key.has_private() + + def sign(self, msg_hash): + """Create the PKCS#1 PSS signature of a message. + + This function is also called ``RSASSA-PSS-SIGN`` and + it is specified in + `section 8.1.1 of RFC8017 `_. + + :parameter msg_hash: + This is an object from the :mod:`Crypto.Hash` package. + It has been used to digest the message to sign. + :type msg_hash: hash object + + :return: the signature encoded as a *byte string*. + :raise ValueError: if the RSA key is not long enough for the given hash algorithm. + :raise TypeError: if the RSA key has no private half. + """ + + # Set defaults for salt length and mask generation function + if self._saltLen is None: + sLen = msg_hash.digest_size + else: + sLen = self._saltLen + + if self._mgfunc is None: + mgf = lambda x, y: MGF1(x, y, msg_hash) + else: + mgf = self._mgfunc + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.1 in RFC3447 + k = ceil_div(modBits, 8) # k is length in bytes of the modulus + # Step 1 + em = _EMSA_PSS_ENCODE(msg_hash, modBits-1, self._randfunc, mgf, sLen) + # Step 2a (OS2IP) + em_int = bytes_to_long(em) + # Step 2b (RSASP1) + m_int = self._key._decrypt(em_int) + # Step 2c (I2OSP) + signature = long_to_bytes(m_int, k) + return signature + + def verify(self, msg_hash, signature): + """Check if the PKCS#1 PSS signature over a message is valid. + + This function is also called ``RSASSA-PSS-VERIFY`` and + it is specified in + `section 8.1.2 of RFC8037 `_. + + :parameter msg_hash: + The hash that was carried out over the message. This is an object + belonging to the :mod:`Crypto.Hash` module. + :type parameter: hash object + + :parameter signature: + The signature that needs to be validated. + :type signature: bytes + + :raise ValueError: if the signature is not valid. + """ + + # Set defaults for salt length and mask generation function + if self._saltLen is None: + sLen = msg_hash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x, y: MGF1(x, y, msg_hash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.2 in RFC3447 + k = ceil_div(modBits, 8) # Convert from bits to bytes + # Step 1 + if len(signature) != k: + raise ValueError("Incorrect signature") + # Step 2a (O2SIP) + signature_int = bytes_to_long(signature) + # Step 2b (RSAVP1) + em_int = self._key._encrypt(signature_int) + # Step 2c (I2OSP) + emLen = ceil_div(modBits - 1, 8) + em = long_to_bytes(em_int, emLen) + # Step 3/4 + _EMSA_PSS_VERIFY(msg_hash, em, modBits-1, mgf, sLen) + + +def MGF1(mgfSeed, maskLen, hash_gen): + """Mask Generation Function, described in `B.2.1 of RFC8017 + `_. + + :param mfgSeed: + seed from which the mask is generated + :type mfgSeed: byte string + + :param maskLen: + intended length in bytes of the mask + :type maskLen: integer + + :param hash_gen: + A module or a hash object from :mod:`Crypto.Hash` + :type hash_object: + + :return: the mask, as a *byte string* + """ + + T = b"" + for counter in iter_range(ceil_div(maskLen, hash_gen.digest_size)): + c = long_to_bytes(counter, 4) + hobj = hash_gen.new() + hobj.update(mgfSeed + c) + T = T + hobj.digest() + assert(len(T) >= maskLen) + return T[:maskLen] + + +def _EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): + r""" + Implement the ``EMSA-PSS-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.1). + + The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` + as input, and hash it internally. Here, we expect that the message + has already been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message being signed. + emBits : int + Maximum length of the final encoding, in bits. + randFunc : callable + An RNG function that accepts as only parameter an int, and returns + a string of random bytes, to be used as salt. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: An ``emLen`` byte long string that encodes the hash + (with ``emLen = \ceil(emBits/8)``). + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits, 8) + + # Bitmask of digits that fill up + lmask = 0 + for i in iter_range(8*emLen-emBits): + lmask = lmask >> 1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Digest or salt length are too long" + " for given key size.") + # Step 4 + salt = randFunc(sLen) + # Step 5 + m_prime = bchr(0)*8 + mhash.digest() + salt + # Step 6 + h = mhash.new() + h.update(m_prime) + # Step 7 + ps = bchr(0)*(emLen-sLen-mhash.digest_size-2) + # Step 8 + db = ps + bchr(1) + salt + # Step 9 + dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) + # Step 10 + maskedDB = strxor(db, dbMask) + # Step 11 + maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] + # Step 12 + em = maskedDB + h.digest() + bchr(0xBC) + return em + + +def _EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): + """ + Implement the ``EMSA-PSS-VERIFY`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.2). + + ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message to be verified. + em : string + The signature to verify, therefore proving that the sender really + signed the message that was received. + emBits : int + Length of the final encoding (em), in bits. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Raise ValueError: + When the encoding is inconsistent, or the digest or salt lengths + are too big. + """ + + emLen = ceil_div(emBits, 8) + + # Bitmask of digits that fill up + lmask = 0 + for i in iter_range(8*emLen-emBits): + lmask = lmask >> 1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Incorrect signature") + # Step 4 + if ord(em[-1:]) != 0xBC: + raise ValueError("Incorrect signature") + # Step 5 + maskedDB = em[:emLen-mhash.digest_size-1] + h = em[emLen-mhash.digest_size-1:-1] + # Step 6 + if lmask & bord(em[0]): + raise ValueError("Incorrect signature") + # Step 7 + dbMask = mgf(h, emLen-mhash.digest_size-1) + # Step 8 + db = strxor(maskedDB, dbMask) + # Step 9 + db = bchr(bord(db[0]) & ~lmask) + db[1:] + # Step 10 + if not db.startswith(bchr(0)*(emLen-mhash.digest_size-sLen-2) + bchr(1)): + raise ValueError("Incorrect signature") + # Step 11 + if sLen > 0: + salt = db[-sLen:] + else: + salt = b"" + # Step 12 + m_prime = bchr(0)*8 + mhash.digest() + salt + # Step 13 + hobj = mhash.new() + hobj.update(m_prime) + hp = hobj.digest() + # Step 14 + if h != hp: + raise ValueError("Incorrect signature") + + +def new(rsa_key, **kwargs): + """Create an object for making or verifying PKCS#1 PSS signatures. + + :parameter rsa_key: + The RSA key to use for signing or verifying the message. + This is a :class:`Crypto.PublicKey.RSA` object. + Signing is only possible when ``rsa_key`` is a **private** RSA key. + :type rsa_key: RSA object + + :Keyword Arguments: + + * *mask_func* (``callable``) -- + A function that returns the mask (as `bytes`). + It must accept two parameters: a seed (as `bytes`) + and the length of the data to return. + + If not specified, it will be the function :func:`MGF1` defined in + `RFC8017 `_ and + combined with the same hash algorithm applied to the + message to sign or verify. + + If you want to use a different function, for instance still :func:`MGF1` + but together with another hash, you can do:: + + from Crypto.Hash import SHA256 + from Crypto.Signature.pss import MGF1 + mgf = lambda x, y: MGF1(x, y, SHA256) + + * *salt_bytes* (``integer``) -- + Length of the salt, in bytes. + It is a value between 0 and ``emLen - hLen - 2``, where ``emLen`` + is the size of the RSA modulus and ``hLen`` is the size of the digest + applied to the message to sign or verify. + + The salt is generated internally, you don't need to provide it. + + If not specified, the salt length will be ``hLen``. + If it is zero, the signature scheme becomes deterministic. + + Note that in some implementations such as OpenSSL the default + salt length is ``emLen - hLen - 2`` (even though it is not more + secure than ``hLen``). + + * *rand_func* (``callable``) -- + A function that returns random ``bytes``, of the desired length. + The default is :func:`Crypto.Random.get_random_bytes`. + + :return: a :class:`PSS_SigScheme` signature object + """ + + mask_func = kwargs.pop("mask_func", None) + salt_len = kwargs.pop("salt_bytes", None) + rand_func = kwargs.pop("rand_func", None) + if rand_func is None: + rand_func = Random.get_random_bytes + if kwargs: + raise ValueError("Unknown keywords: " + str(kwargs.keys())) + return PSS_SigScheme(rsa_key, mask_func, salt_len, rand_func) diff --git a/env/Lib/site-packages/Crypto/Signature/pss.pyi b/env/Lib/site-packages/Crypto/Signature/pss.pyi new file mode 100644 index 0000000..4d216ca --- /dev/null +++ b/env/Lib/site-packages/Crypto/Signature/pss.pyi @@ -0,0 +1,30 @@ +from typing import Union, Callable, Optional +from typing_extensions import Protocol + +from Crypto.PublicKey.RSA import RsaKey + + +class Hash(Protocol): + def digest(self) -> bytes: ... + def update(self, bytes) -> None: ... + + +class HashModule(Protocol): + @staticmethod + def new(data: Optional[bytes]) -> Hash: ... + + +MaskFunction = Callable[[bytes, int, Union[Hash, HashModule]], bytes] +RndFunction = Callable[[int], bytes] + +class PSS_SigScheme: + def __init__(self, key: RsaKey, mgfunc: RndFunction, saltLen: int, randfunc: RndFunction) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> None: ... + + +MGF1 : MaskFunction +def _EMSA_PSS_ENCODE(mhash: Hash, emBits: int, randFunc: RndFunction, mgf:MaskFunction, sLen: int) -> str: ... +def _EMSA_PSS_VERIFY(mhash: Hash, em: str, emBits: int, mgf: MaskFunction, sLen: int) -> None: ... +def new(rsa_key: RsaKey, **kwargs: Union[MaskFunction, RndFunction, int]) -> PSS_SigScheme: ... diff --git a/env/Lib/site-packages/Crypto/Util/Counter.py b/env/Lib/site-packages/Crypto/Util/Counter.py new file mode 100644 index 0000000..c67bc95 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/Counter.py @@ -0,0 +1,77 @@ +# -*- coding: ascii -*- +# +# Util/Counter.py : Fast counter for use with CTR-mode ciphers +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +def new(nbits, prefix=b"", suffix=b"", initial_value=1, little_endian=False, allow_wraparound=False): + """Create a stateful counter block function suitable for CTR encryption modes. + + Each call to the function returns the next counter block. + Each counter block is made up by three parts: + + +------+--------------+-------+ + |prefix| counter value|postfix| + +------+--------------+-------+ + + The counter value is incremented by 1 at each call. + + Args: + nbits (integer): + Length of the desired counter value, in bits. It must be a multiple of 8. + prefix (byte string): + The constant prefix of the counter block. By default, no prefix is + used. + suffix (byte string): + The constant postfix of the counter block. By default, no suffix is + used. + initial_value (integer): + The initial value of the counter. Default value is 1. + Its length in bits must not exceed the argument ``nbits``. + little_endian (boolean): + If ``True``, the counter number will be encoded in little endian format. + If ``False`` (default), in big endian format. + allow_wraparound (boolean): + This parameter is ignored. + Returns: + An object that can be passed with the :data:`counter` parameter to a CTR mode + cipher. + + It must hold that *len(prefix) + nbits//8 + len(suffix)* matches the + block size of the underlying block cipher. + """ + + if (nbits % 8) != 0: + raise ValueError("'nbits' must be a multiple of 8") + + iv_bl = initial_value.bit_length() + if iv_bl > nbits: + raise ValueError("Initial value takes %d bits but it is longer than " + "the counter (%d bits)" % + (iv_bl, nbits)) + + # Ignore wraparound + return {"counter_len": nbits // 8, + "prefix": prefix, + "suffix": suffix, + "initial_value": initial_value, + "little_endian": little_endian + } diff --git a/env/Lib/site-packages/Crypto/Util/Counter.pyi b/env/Lib/site-packages/Crypto/Util/Counter.pyi new file mode 100644 index 0000000..fa2ffdd --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/Counter.pyi @@ -0,0 +1,5 @@ +from typing import Optional, Union, Dict + +def new(nbits: int, prefix: Optional[bytes]=..., suffix: Optional[bytes]=..., initial_value: Optional[int]=1, + little_endian: Optional[bool]=False, allow_wraparound: Optional[bool]=False) -> \ + Dict[str, Union[int, bytes, bool]]: ... diff --git a/env/Lib/site-packages/Crypto/Util/Padding.py b/env/Lib/site-packages/Crypto/Util/Padding.py new file mode 100644 index 0000000..da69e55 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/Padding.py @@ -0,0 +1,108 @@ +# +# Util/Padding.py : Functions to manage padding +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = [ 'pad', 'unpad' ] + +from Crypto.Util.py3compat import * + + +def pad(data_to_pad, block_size, style='pkcs7'): + """Apply standard padding. + + Args: + data_to_pad (byte string): + The data that needs to be padded. + block_size (integer): + The block boundary to use for padding. The output length is guaranteed + to be a multiple of :data:`block_size`. + style (string): + Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. + + Return: + byte string : the original data with the appropriate padding added at the end. + """ + + padding_len = block_size-len(data_to_pad)%block_size + if style == 'pkcs7': + padding = bchr(padding_len)*padding_len + elif style == 'x923': + padding = bchr(0)*(padding_len-1) + bchr(padding_len) + elif style == 'iso7816': + padding = bchr(128) + bchr(0)*(padding_len-1) + else: + raise ValueError("Unknown padding style") + return data_to_pad + padding + + +def unpad(padded_data, block_size, style='pkcs7'): + """Remove standard padding. + + Args: + padded_data (byte string): + A piece of data with padding that needs to be stripped. + block_size (integer): + The block boundary to use for padding. The input length + must be a multiple of :data:`block_size`. + style (string): + Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. + Return: + byte string : data without padding. + Raises: + ValueError: if the padding is incorrect. + """ + + pdata_len = len(padded_data) + if pdata_len == 0: + raise ValueError("Zero-length input cannot be unpadded") + if pdata_len % block_size: + raise ValueError("Input data is not padded") + if style in ('pkcs7', 'x923'): + padding_len = bord(padded_data[-1]) + if padding_len<1 or padding_len>min(block_size, pdata_len): + raise ValueError("Padding is incorrect.") + if style == 'pkcs7': + if padded_data[-padding_len:]!=bchr(padding_len)*padding_len: + raise ValueError("PKCS#7 padding is incorrect.") + else: + if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1): + raise ValueError("ANSI X.923 padding is incorrect.") + elif style == 'iso7816': + padding_len = pdata_len - padded_data.rfind(bchr(128)) + if padding_len<1 or padding_len>min(block_size, pdata_len): + raise ValueError("Padding is incorrect.") + if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1): + raise ValueError("ISO 7816-4 padding is incorrect.") + else: + raise ValueError("Unknown padding style") + return padded_data[:-padding_len] + diff --git a/env/Lib/site-packages/Crypto/Util/Padding.pyi b/env/Lib/site-packages/Crypto/Util/Padding.pyi new file mode 100644 index 0000000..4d8d30d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/Padding.pyi @@ -0,0 +1,6 @@ +from typing import Optional + +__all__ = [ 'pad', 'unpad' ] + +def pad(data_to_pad: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... +def unpad(padded_data: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... \ No newline at end of file diff --git a/env/Lib/site-packages/Crypto/Util/RFC1751.py b/env/Lib/site-packages/Crypto/Util/RFC1751.py new file mode 100644 index 0000000..9ed52d2 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/RFC1751.py @@ -0,0 +1,386 @@ +# rfc1751.py : Converts between 128-bit strings and a human-readable +# sequence of words, as defined in RFC1751: "A Convention for +# Human-Readable 128-bit Keys", by Daniel L. McDonald. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import binascii + +from Crypto.Util.py3compat import bord, bchr + +binary = {0: '0000', 1: '0001', 2: '0010', 3: '0011', 4: '0100', 5: '0101', + 6: '0110', 7: '0111', 8: '1000', 9: '1001', 10: '1010', 11: '1011', + 12: '1100', 13: '1101', 14: '1110', 15: '1111'} + + +def _key2bin(s): + "Convert a key into a string of binary digits" + kl = map(lambda x: bord(x), s) + kl = map(lambda x: binary[x >> 4] + binary[x & 15], kl) + return ''.join(kl) + + +def _extract(key, start, length): + """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its + numeric value.""" + + result = 0 + for y in key[start:start+length]: + result = result * 2 + ord(y) - 48 + return result + + +def key_to_english(key): + """Transform an arbitrary key into a string containing English words. + + Example:: + + >>> from Crypto.Util.RFC1751 import key_to_english + >>> key_to_english(b'66666666') + 'RAM LOIS GOAD CREW CARE HIT' + + Args: + key (byte string): + The key to convert. Its length must be a multiple of 8. + Return: + A string of English words. + """ + + if len(key) % 8 != 0: + raise ValueError('The length of the key must be a multiple of 8.') + + english = '' + for index in range(0, len(key), 8): # Loop over 8-byte subkeys + subkey = key[index:index + 8] + # Compute the parity of the key + skbin = _key2bin(subkey) + p = 0 + for i in range(0, 64, 2): + p = p + _extract(skbin, i, 2) + # Append parity bits to the subkey + skbin = _key2bin(subkey + bchr((p << 6) & 255)) + for i in range(0, 64, 11): + english = english + wordlist[_extract(skbin, i, 11)] + ' ' + + return english.strip() + + +def english_to_key(s): + """Transform a string into a corresponding key. + + Example:: + + >>> from Crypto.Util.RFC1751 import english_to_key + >>> english_to_key('RAM LOIS GOAD CREW CARE HIT') + b'66666666' + + Args: + s (string): the string with the words separated by whitespace; + the number of words must be a multiple of 6. + Return: + A byte string. + """ + + L = s.upper().split() + key = b'' + for index in range(0, len(L), 6): + sublist = L[index:index + 6] + char = 9 * [0] + bits = 0 + for i in sublist: + index = wordlist.index(i) + shift = (8 - (bits + 11) % 8) % 8 + y = index << shift + cl, cc, cr = (y >> 16), (y >> 8) & 0xff, y & 0xff + if (shift > 5): + char[bits >> 3] = char[bits >> 3] | cl + char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cc + char[(bits >> 3) + 2] = char[(bits >> 3) + 2] | cr + elif shift > -3: + char[bits >> 3] = char[bits >> 3] | cc + char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cr + else: + char[bits >> 3] = char[bits >> 3] | cr + bits = bits + 11 + + subkey = b'' + for y in char: + subkey = subkey + bchr(y) + + # Check the parity of the resulting key + skbin = _key2bin(subkey) + p = 0 + for i in range(0, 64, 2): + p = p + _extract(skbin, i, 2) + if (p & 3) != _extract(skbin, 64, 2): + raise ValueError("Parity error in resulting key") + key = key + subkey[0:8] + return key + + +wordlist = [ + "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", + "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", + "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", + "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", + "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", + "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", + "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", + "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", + "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", + "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", + "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", + "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", + "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", + "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", + "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", + "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", + "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", + "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", + "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", + "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", + "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", + "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", + "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", + "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", + "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", + "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", + "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", + "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", + "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", + "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", + "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", + "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", + "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", + "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", + "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", + "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", + "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", + "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", + "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", + "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", + "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", + "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", + "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", + "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", + "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", + "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", + "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", + "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", + "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", + "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", + "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", + "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", + "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", + "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", + "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", + "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", + "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", + "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", + "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", + "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", + "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", + "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", + "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", + "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", + "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", + "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", + "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", + "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", + "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", + "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", + "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", + "BEAU", "BECK", "BEEF", "BEEN", "BEER", + "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", + "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", + "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", + "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", + "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", + "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", + "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", + "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", + "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", + "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", + "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", + "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", + "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", + "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", + "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", + "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", + "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", + "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", + "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", + "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", + "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", + "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", + "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", + "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", + "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", + "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", + "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", + "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", + "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", + "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", + "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", + "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", + "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", + "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", + "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", + "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", + "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", + "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", + "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", + "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", + "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", + "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", + "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", + "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", + "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", + "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", + "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", + "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", + "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", + "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", + "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", + "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", + "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", + "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", + "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", + "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", + "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", + "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", + "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", + "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", + "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", + "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", + "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", + "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", + "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", + "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", + "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", + "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", + "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", + "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", + "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", + "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", + "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", + "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", + "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", + "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", + "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", + "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", + "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", + "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", + "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", + "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", + "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", + "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", + "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", + "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", + "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", + "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", + "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", + "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", + "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", + "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", + "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", + "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", + "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", + "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", + "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", + "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", + "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", + "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", + "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", + "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", + "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", + "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", + "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", + "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", + "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", + "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", + "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", + "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", + "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", + "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", + "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", + "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", + "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", + "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", + "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", + "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", + "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", + "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", + "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", + "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", + "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", + "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", + "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", + "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", + "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", + "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", + "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", + "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", + "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", + "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", + "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", + "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", + "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", + "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", + "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", + "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", + "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", + "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", + "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", + "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", + "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", + "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", + "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", + "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", + "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", + "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", + "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", + "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", + "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", + "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", + "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", + "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", + "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", + "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", + "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", + "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", + "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", + "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", + "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", + "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", + "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", + "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", + "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", + "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", + "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", + "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", + "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", + "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", + "YELL", "YOGA", "YOKE" ] diff --git a/env/Lib/site-packages/Crypto/Util/RFC1751.pyi b/env/Lib/site-packages/Crypto/Util/RFC1751.pyi new file mode 100644 index 0000000..6ad07ff --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/RFC1751.pyi @@ -0,0 +1,7 @@ +from typing import Dict, List + +binary: Dict[int, str] +wordlist: List[str] + +def key_to_english(key: bytes) -> str: ... +def english_to_key(s: str) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Util/__init__.py b/env/Lib/site-packages/Crypto/Util/__init__.py new file mode 100644 index 0000000..f12214d --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Miscellaneous modules + +Contains useful modules that don't belong into any of the +other Crypto.* subpackages. + +======================== ============================================= +Module Description +======================== ============================================= +`Crypto.Util.number` Number-theoretic functions (primality testing, etc.) +`Crypto.Util.Counter` Fast counter functions for CTR cipher modes. +`Crypto.Util.RFC1751` Converts between 128-bit keys and human-readable + strings of words. +`Crypto.Util.asn1` Minimal support for ASN.1 DER encoding +`Crypto.Util.Padding` Set of functions for adding and removing padding. +======================== ============================================= + +:undocumented: _galois, _number_new, cpuid, py3compat, _raw_api +""" + +__all__ = ['RFC1751', 'number', 'strxor', 'asn1', 'Counter', 'Padding'] + diff --git a/env/Lib/site-packages/Crypto/Util/_cpu_features.py b/env/Lib/site-packages/Crypto/Util/_cpu_features.py new file mode 100644 index 0000000..b3039b5 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/_cpu_features.py @@ -0,0 +1,46 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util._raw_api import load_pycryptodome_raw_lib + + +_raw_cpuid_lib = load_pycryptodome_raw_lib("Crypto.Util._cpuid_c", + """ + int have_aes_ni(void); + int have_clmul(void); + """) + + +def have_aes_ni(): + return _raw_cpuid_lib.have_aes_ni() + + +def have_clmul(): + return _raw_cpuid_lib.have_clmul() diff --git a/env/Lib/site-packages/Crypto/Util/_cpu_features.pyi b/env/Lib/site-packages/Crypto/Util/_cpu_features.pyi new file mode 100644 index 0000000..10e669e --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/_cpu_features.pyi @@ -0,0 +1,2 @@ +def have_aes_ni() -> int: ... +def have_clmul() -> int: ... diff --git a/env/Lib/site-packages/Crypto/Util/_cpuid_c.pyd b/env/Lib/site-packages/Crypto/Util/_cpuid_c.pyd new file mode 100644 index 0000000000000000000000000000000000000000..76c166cabafbce5673f41403d62c9f880805f1eb GIT binary patch literal 10240 zcmeHN4{%#WdSBU+E!z^|IHgXG2?=n0 z{eAB}B{sP*H|<=fbKK7C`~K|reY^YZx8JVTUirW-md_Y7qpB)nLx5D=+V_h;Ruf~_ z%s+Gu`*G3n1w$s!@dfRhf-xZy4R4J4x&^;46behi22qH{LqaelG~c^g=neyQYP z4#vobeG3?yL-bY3KD0MX)e{&~t|Ck98cFPt0QX*l4-J{BF%Mu`5zu>0G~knbjPVk% zz+f{_%TT8jH%48fi!n})sYQjaG$2)4T>8jJctYCeH) zG`U0;z6$cbHH2x?6ohTKhtfC1c$-7%f><)e;irL)sMzh9d?Uuru$pRsfCnLLvz z?ff{Ss^{W)=dwI=v~>Kt$P|$0i8ECLC!{5b(^n55z%3NxNKT$NS6iPfI4 zDE9)`UrcMfR(X`*xE(?(2h#SZGa6YAC7yi9Y2``DS;~{!oI=xDbRcl~oJT(EI;wmK zBmHT6{|FS7xP`HuCm(Niy4^3C5{+{!z_bs%itn(+27-p5q#@LwmhM&FC<65fqE}e} z7Ed{DMKtr2tr9>fMZ^xI@bOEIAvvyv@ns3!&-Z>_He|$}xO*k)O#g_rn>r?wuE9%Tv%FNN>04%5JFCdb8?C z($a$lVvq82C?fHmk;ygI>D24mnYk>KHR$YeryPEJlyQ8)h1u2I<|u;pG<;-K+5ma?^LGHrWt;dQ(C*#czetI;go$klI2^bChGYzU$#Aa1NcdgS;#R>T(&)$V?yyLrlH^xqGDbRuv_`%%T32V4~MqdKU`#AUc%XNeARzGtyu%+3! z#1mpUdRbYnI%a8uYANT*mQvNhK*?#onD8^j8lGAy==9q}w+K)x<(V;oyMVh|O1Ue; z4`RwFuzn*5*X6Fby*UH-`)CM=6wW^$1$j4-So!r?46%xWs+1=h6X=`83si@UjxJ~7 zWAN)Kn50iRl_$ICtdX4M#s6J4m$QVqQ9>a>R~P2_;Y zi25u0cFW)ES>gx)kpvN%ZvY&zJcF-E^EifZ5JqqYpW<$_E(hTw>XQP>c6) zzwO6BaQJ?MK>lkxme5^1?j0&Y2I*YVR32~l-296DrKU2Ty04t)j{+^=sTqBq)Qs17 zvdr1K^i8Q;zI%peX(n1M-(k(qc$TK4NQSReO}y(m!Er!Qq?9GLI@zOy6(cSS0Q0mk zEzj!y-g!zHhHG6X;FpsAcjAToTaF`?%QRQ&GJTCNgTr>7lFa=l;xm-QiqT*GO&T28YhNoU|x_EN9fSuyZ9jWbJUuB>a|EbqVx-{wH{ z0=84gst!AB;vzt&4D8Wil4_Z>?}0;_ot!#-9*&^i+{jx%5q&e@d25R2D?ina@#mbQ zB(mc$xokZG`qG_9;HkOaLQ3PQ#y)_FnYe9{1CRm*no1&T_NTrM+Kz|r zgSu|}=XBqv?tX*rD+plf`*PYSb){WL?Ssv7rp5J6v;2C~j>(WDb~WuddzToBH_IpF zN!QV))CyBe$%uWhX~(%>NNi1-oh>D=!H!wJ2+v!S?amqTHlDD38^4{@ipk}wy)j*2 zVp3IOM?oHw7Bof(%(fi3_!;h0!WJh#Xu{!3{^j<+%*4u?WlIDnLQ`VI)SR@4Kqm^p zdkDx7ay)BJ@lTgdZo7%bir$-Ly0w^decKPcR}?F5Dak;05mbgPDt_RS_2#7QGobcE zAC%b>Pw9*gz<3$XWy!f?dfN@LUP#P|#bDkv$}_(Zc*!WotOU9|2n(~fXXE$FmLEYn zF=8FI><2JlVYTWV~Fn2JS)238QznA*W zAzB`}ET0*_{)-iKdCfYl;1#B63|E%RzvtAkD_f5po6=r!4%^thhE54CpWeCBP5Ey9 zxbg|EqEpy{@-uLDX^oh5PF@$j?P5+CKWR>L*(76|Ai+=^cv7}wXI0fBw?yPMxR zK%HW&>IVJ2E(gcv8t-@WJ_mCO|4Z0btJ|G4V9p%Q_GyLqxi?4rwq$PXMs_1A{ft9t zpj`YrDAv!7E7yf0=;OX%z#RV){2h(wlKtNjnaquEfj(ZV0c&qH*7LWtbJvP?54~}Y z!ui_Ur?Wdms-w8X-U^uGIcsZK0q)SdFY$U96L1S6I!JGq&OZjq;6FxNr>3^n-?zsGcx{?o!*re|=bpxE*W~zgo+j4@ z-T@QFXT0Z9>`@Hrv~%m$t$_`g26I;!^N31*XKRygZyN6b-QHAQyDqnG8c&z&0B;}l z+haTsSL*NHHKI<_`}Z>RYxuT~d^7}J`DLDqc%$G=S%Z4wQBK=MbI{j?mQj0qb_3+z zquu!VYQD|2pZ0)_-S-Ti0>i~}~!3Iwo6Y;;OV2bQohm|>` zBIXDbGDm#@bJSfsb{@YBXsiDOv3K*e_`3o9K12C4;Ll8TcL0sEuV+UYiN`dOPM~$6V}nB@t7Ri4|=?yVcCBQFA1}Bd^vN zcdf~0X14kwW~(b`E291u;=$3dAH!n)8nqAh37YauEDt_5m04L?0JXi4m7$Nay3&Zr zVS(=sF^6f06_AYPj{;U)R}j(1gR#_Gn0;nRg#1Y3r$SI0%S>ZTQ=le*4<%&6??fz- zJg#?ItpZ(->PAf?A4E{ysH;KyQNyUDk;lCLNIV$u`fDP+0oK;r8VX9@9FJ}CZ5F*g zG3E^gb==?89q)oB#cr<5qw`C^pL-~I7$pQ=pMZX+@NUAnwhz3kl`qSA{!ivrQr8M! zM2bhn6=D4F!e6cam3W?T_>p+z&UnZ#1;e41kQD7@>^*8(8;nYEU)P$@mS8A=6}7Ah zZSsWzU1Ff6$1g@m{LWyPBu4R)ZCNc!S8W#Z?h>UH@n}>GNo~=vUqlLd-qJ2cyMrO0 zB<5J`iTu`>L4>1siar`^TU3lejjb}@7Z;WZ;&J_Tsd z*A?64Td_y-hddBqTya|7mPFZHP$V5$3$RjSFBodPkZaimc{k8y3Jb| zG=dT)-py{ijC)14oOxs7CU2*x*vOj8_U>Q=d|< zNepccM#G_QayP4=CJ_#aJ=mfWIf$`^%q#YY{naIb+A{DFZV06b~n1pmXp zCjr~=yJ-*X6YN7Jn*{e5IKiKz(m2NeAHltY@Gd~}^?<+$R-qD~;C%)j0Q}q8@B{b< z0MDQf0Vjyxl^T8v;Bnk<4uem3g@;hz2Tpf~6zUn^`v6};{RsFN;4e^50%y1f(LHbs zI6=C%?Zfy9(w&ZQf^;7voFLtuZh<_(y{J!+o&Rw$Y4>ZmqHnqhy5ne9=qvd6iy-4@ zqr34_)0H%8+47$5E@876jp2B0tXy1ETPcVke>e~fZEUPu(|%`lL!}Uta1Q&rkbD~} zd&OAg@~_#7m-%8bv3o;TuK)v~SYu^88oE8^-z0YXV%6P2e>5BmcS_a%aQE%LSa;3l z#gzh%(O@S|p0!hZBU4|^*#hw!#`vXX3HtxYDElV&UD(I=TlcTq-?9I~{?ftn!J)zQ Q;K< +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os + + +def pycryptodome_filename(dir_comps, filename): + """Return the complete file name for the module + + dir_comps : list of string + The list of directory names in the PyCryptodome package. + The first element must be "Crypto". + + filename : string + The filename (inclusing extension) in the target directory. + """ + + if dir_comps[0] != "Crypto": + raise ValueError("Only available for modules under 'Crypto'") + + dir_comps = list(dir_comps[1:]) + [filename] + + util_lib, _ = os.path.split(os.path.abspath(__file__)) + root_lib = os.path.join(util_lib, "..") + + return os.path.join(root_lib, *dir_comps) + diff --git a/env/Lib/site-packages/Crypto/Util/_file_system.pyi b/env/Lib/site-packages/Crypto/Util/_file_system.pyi new file mode 100644 index 0000000..d54a126 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/_file_system.pyi @@ -0,0 +1,4 @@ +from typing import List + + +def pycryptodome_filename(dir_comps: List[str], filename: str) -> str: ... \ No newline at end of file diff --git a/env/Lib/site-packages/Crypto/Util/_raw_api.py b/env/Lib/site-packages/Crypto/Util/_raw_api.py new file mode 100644 index 0000000..44c37c2 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/_raw_api.py @@ -0,0 +1,307 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import abc +import sys +from Crypto.Util.py3compat import byte_string +from Crypto.Util._file_system import pycryptodome_filename + +# +# List of file suffixes for Python extensions +# +if sys.version_info[0] < 3: + + import imp + extension_suffixes = [] + for ext, mod, typ in imp.get_suffixes(): + if typ == imp.C_EXTENSION: + extension_suffixes.append(ext) + +else: + + from importlib import machinery + extension_suffixes = machinery.EXTENSION_SUFFIXES + +# Which types with buffer interface we support (apart from byte strings) +_buffer_type = (bytearray, memoryview) + + +class _VoidPointer(object): + @abc.abstractmethod + def get(self): + """Return the memory location we point to""" + return + + @abc.abstractmethod + def address_of(self): + """Return a raw pointer to this pointer""" + return + + +try: + # Starting from v2.18, pycparser (used by cffi for in-line ABI mode) + # stops working correctly when PYOPTIMIZE==2 or the parameter -OO is + # passed. In that case, we fall back to ctypes. + # Note that PyPy ships with an old version of pycparser so we can keep + # using cffi there. + # See https://github.com/Legrandin/pycryptodome/issues/228 + if '__pypy__' not in sys.builtin_module_names and sys.flags.optimize == 2: + raise ImportError("CFFI with optimize=2 fails due to pycparser bug.") + + from cffi import FFI + + ffi = FFI() + null_pointer = ffi.NULL + uint8_t_type = ffi.typeof(ffi.new("const uint8_t*")) + + _Array = ffi.new("uint8_t[1]").__class__.__bases__ + + def load_lib(name, cdecl): + """Load a shared library and return a handle to it. + + @name, either an absolute path or the name of a library + in the system search path. + + @cdecl, the C function declarations. + """ + + lib = ffi.dlopen(name) + ffi.cdef(cdecl) + return lib + + def c_ulong(x): + """Convert a Python integer to unsigned long""" + return x + + c_ulonglong = c_ulong + c_uint = c_ulong + + def c_size_t(x): + """Convert a Python integer to size_t""" + return x + + def create_string_buffer(init_or_size, size=None): + """Allocate the given amount of bytes (initially set to 0)""" + + if isinstance(init_or_size, bytes): + size = max(len(init_or_size) + 1, size) + result = ffi.new("uint8_t[]", size) + result[:] = init_or_size + else: + if size: + raise ValueError("Size must be specified once only") + result = ffi.new("uint8_t[]", init_or_size) + return result + + def get_c_string(c_string): + """Convert a C string into a Python byte sequence""" + return ffi.string(c_string) + + def get_raw_buffer(buf): + """Convert a C buffer into a Python byte sequence""" + return ffi.buffer(buf)[:] + + def c_uint8_ptr(data): + if isinstance(data, _buffer_type): + # This only works for cffi >= 1.7 + return ffi.cast(uint8_t_type, ffi.from_buffer(data)) + elif byte_string(data) or isinstance(data, _Array): + return data + else: + raise TypeError("Object type %s cannot be passed to C code" % type(data)) + + class VoidPointer_cffi(_VoidPointer): + """Model a newly allocated pointer to void""" + + def __init__(self): + self._pp = ffi.new("void *[1]") + + def get(self): + return self._pp[0] + + def address_of(self): + return self._pp + + def VoidPointer(): + return VoidPointer_cffi() + + backend = "cffi" + +except ImportError: + + import ctypes + from ctypes import (CDLL, c_void_p, byref, c_ulong, c_ulonglong, c_size_t, + create_string_buffer, c_ubyte, c_uint) + from ctypes.util import find_library + from ctypes import Array as _Array + + null_pointer = None + cached_architecture = [] + + def load_lib(name, cdecl): + if not cached_architecture: + # platform.architecture() creates a subprocess, so caching the + # result makes successive imports faster. + import platform + cached_architecture[:] = platform.architecture() + bits, linkage = cached_architecture + if "." not in name and not linkage.startswith("Win"): + full_name = find_library(name) + if full_name is None: + raise OSError("Cannot load library '%s'" % name) + name = full_name + return CDLL(name) + + def get_c_string(c_string): + return c_string.value + + def get_raw_buffer(buf): + return buf.raw + + # ---- Get raw pointer --- + + _c_ssize_t = ctypes.c_ssize_t + + _PyBUF_SIMPLE = 0 + _PyObject_GetBuffer = ctypes.pythonapi.PyObject_GetBuffer + _PyBuffer_Release = ctypes.pythonapi.PyBuffer_Release + _py_object = ctypes.py_object + _c_ssize_p = ctypes.POINTER(_c_ssize_t) + + # See Include/object.h for CPython + # and https://github.com/pallets/click/blob/master/click/_winconsole.py + class _Py_buffer(ctypes.Structure): + _fields_ = [ + ('buf', c_void_p), + ('obj', ctypes.py_object), + ('len', _c_ssize_t), + ('itemsize', _c_ssize_t), + ('readonly', ctypes.c_int), + ('ndim', ctypes.c_int), + ('format', ctypes.c_char_p), + ('shape', _c_ssize_p), + ('strides', _c_ssize_p), + ('suboffsets', _c_ssize_p), + ('internal', c_void_p) + ] + + # Extra field for CPython 2.6/2.7 + if sys.version_info[0] == 2: + _fields_.insert(-1, ('smalltable', _c_ssize_t * 2)) + + def c_uint8_ptr(data): + if byte_string(data) or isinstance(data, _Array): + return data + elif isinstance(data, _buffer_type): + obj = _py_object(data) + buf = _Py_buffer() + _PyObject_GetBuffer(obj, byref(buf), _PyBUF_SIMPLE) + try: + buffer_type = c_ubyte * buf.len + return buffer_type.from_address(buf.buf) + finally: + _PyBuffer_Release(byref(buf)) + else: + raise TypeError("Object type %s cannot be passed to C code" % type(data)) + + # --- + + class VoidPointer_ctypes(_VoidPointer): + """Model a newly allocated pointer to void""" + + def __init__(self): + self._p = c_void_p() + + def get(self): + return self._p + + def address_of(self): + return byref(self._p) + + def VoidPointer(): + return VoidPointer_ctypes() + + backend = "ctypes" + del ctypes + + +class SmartPointer(object): + """Class to hold a non-managed piece of memory""" + + def __init__(self, raw_pointer, destructor): + self._raw_pointer = raw_pointer + self._destructor = destructor + + def get(self): + return self._raw_pointer + + def release(self): + rp, self._raw_pointer = self._raw_pointer, None + return rp + + def __del__(self): + try: + if self._raw_pointer is not None: + self._destructor(self._raw_pointer) + self._raw_pointer = None + except AttributeError: + pass + + +def load_pycryptodome_raw_lib(name, cdecl): + """Load a shared library and return a handle to it. + + @name, the name of the library expressed as a PyCryptodome module, + for instance Crypto.Cipher._raw_cbc. + + @cdecl, the C function declarations. + """ + + split = name.split(".") + dir_comps, basename = split[:-1], split[-1] + attempts = [] + for ext in extension_suffixes: + try: + filename = basename + ext + return load_lib(pycryptodome_filename(dir_comps, filename), + cdecl) + except OSError as exp: + attempts.append("Trying '%s': %s" % (filename, str(exp))) + raise OSError("Cannot load native module '%s': %s" % (name, ", ".join(attempts))) + + +def is_buffer(x): + """Return True if object x supports the buffer interface""" + return isinstance(x, (bytes, bytearray, memoryview)) + + +def is_writeable_buffer(x): + return (isinstance(x, bytearray) or + (isinstance(x, memoryview) and not x.readonly)) diff --git a/env/Lib/site-packages/Crypto/Util/_raw_api.pyi b/env/Lib/site-packages/Crypto/Util/_raw_api.pyi new file mode 100644 index 0000000..2bc5301 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/_raw_api.pyi @@ -0,0 +1,27 @@ +from typing import Any, Optional, Union + +def load_lib(name: str, cdecl: str) -> Any : ... +def c_ulong(x: int ) -> Any : ... +def c_ulonglong(x: int ) -> Any : ... +def c_size_t(x: int) -> Any : ... +def create_string_buffer(init_or_size: Union[bytes,int], size: Optional[int]) -> Any : ... +def get_c_string(c_string: Any) -> bytes : ... +def get_raw_buffer(buf: Any) -> bytes : ... +def c_uint8_ptr(data: Union[bytes, memoryview, bytearray]) -> Any : ... + +class VoidPointer(object): + def get(self) -> Any : ... + def address_of(self) -> Any : ... + +class SmartPointer(object): + def __init__(self, raw_pointer: Any, destructor: Any) -> None : ... + def get(self) -> Any : ... + def release(self) -> Any : ... + +backend : str +null_pointer : Any +ffi: Any + +def load_pycryptodome_raw_lib(name: str, cdecl: str) -> Any : ... +def is_buffer(x: Any) -> bool : ... +def is_writeable_buffer(x: Any) -> bool : ... diff --git a/env/Lib/site-packages/Crypto/Util/_strxor.pyd b/env/Lib/site-packages/Crypto/Util/_strxor.pyd new file mode 100644 index 0000000000000000000000000000000000000000..7b0b438b6e7dcf87ae2da201d61e514eaf9e8c46 GIT binary patch literal 10240 zcmeHN4{%e*nP1tGE!zkgVo`@ccn~=@P3#JbqbA@YgPg2;a^ip?OcEjs>lxJ8k}5qV zHkTBr>=f~;3rR_vq`92oZtiYw+79hy5<_}ZDI&2U&=AtQgG)Q{G`;2-`*Mcf5Xhgt z`+e^{89OPR&fH|?uIbL~`~K{=-+ue;x8Lr2QtRFOSRrG~jH;@P4FOW|XwTpOu$mZ~ zz39kn_FT!y#X~0F$;Iv4LvbM*i)@PpdW6nEI2@6Lt)dW1goRL8@ZPaO=!pcyI-9N3 zX{i3!p4j7$&svbp*Y8S?X7_=<`Qe2bf{$!ln0ZWVUzk0l@vqH1slm$3(;8f)wU>Ua zBJ(ZKrC(cMz_+tHeZ5An4|Q&*F|IbFwr0kHj});T-&@>~mziWsgtCI!3s^Zg^L1|f zHGo7jwhPnO@dCz*2uI0776n00MJZq%5Y;>7!6#{*GkGIp68Z$I8Ed1?cw8drVeF7L zAQ31XjFAs_EoN*1(N`&hXm6O_Phd>BiY&3~B(ayq@12bYnM~D~2QaM&7`-kQ3`haS zTxDQ^!DgXWpiV0uSY4-!F;0$gp~6?1JV3s97|YZ}bwO>8U}$nn@)9}(+fx^d$2vi3 zK7nsExqL9`V&p2wiQN$hYE2!69DF;ycouVnJagTf z$+u0)S0OFu#t-U7+(t(1JHh3nJhfTyr973XHmnb~Zn{fbAWw@tDZh^;8aSP>uXOzP zLdJF$yE9|&Vu2G_ zfGSy%YGJ}NHDcYg5v8T*xzaGXB)!>$6>yDLy{?&tsD$EXsG`1fM)CzriU$rh)+L?;m>R7^b1%5~5;W4dh49 zhE3!^50}rXjt$t3T>dA@`);)vdm!EJ?0^qdM-#PRvMYHq=xl%qKIe8Wqtz#mkXu|> zK4M0RsBs^oZ6&CVx#*@2C)I+7en^Ywzwe&>+6f!EJfSavQA;CSa>_ z35h4gN{rH4sXFFqMm1ORRCBrNV4&m-Uq<*usfMRp1)csG(ai$7mGkVVz}>*z&E?#k z<%cn46llKzg!{@+-0sc7{Q)uok;3_Vr$Bz0NUZ#p989dCpepCd#w5mOi6Ye@W1uUU z_yqiV7Wo`evI7msDeyDsF^28U)0UIKb5judly_hXPg0df#H@dE}$NHo0=|r&-0x~`^t-o>? zILdlF3?}uboXS((bk<1DN@IgwreuaaI;I>&5=3U1m;At#n4_f*E}!J`uGjGfy}>081k(Y7!3)#kcWQzDha+53^3X+mQ^0Z%!uYquybXrgeeG0*QC^w{45 z(f7aodCMJ?@sGVod(H06?Xj$(ME?NNpz<)p3=dQ&QJpA(zeJ$p=k`J^QiHySFoLkF z(TuK4KAYHj0tBh(VT zywmm^5FEZ=B~W;L_e#2pC;UTY$RJ%Sn<^9SzMEdMztB{{(|1<#!c#ztczV_@UwYPY zo~m%Rta@Fll)pU7w<;SemDgGev%XcC7?R;jHREr&Pjehl6e$(S9ZvQzVWo)6BESMI zOe=G`zxSS1hT&TGY51kA|II`(|Aylg%4M1>b(vj_FM`ANo|Vk~rxUZ3i|By!^uGY| z6Q&qTTmA!Lxn$MLB%RY@ZX-{B&*}L9Q`|j47Zmxdo~iFgUyNv$Up=|>FmR^4jEx7g z)2)+DL#OEQz8h>JRmbg!WiI~}UEq28i8E-#(ST&AI#yE)^sLS&!StxsstH|5gxKq8 zg&HUol1@SdI%f(s)dM5ch-BHuQv);%8KNG z^a8e1$f^!IbmAgFrwsJbVUlj1wC{&QyiQJoJ_|?CXddJ(poqR%@Vqt63zeT}$M`eO zQzWwcD7kD40($CBB=GdYZy=@dbmK07@wvEdkpqwd1)54SXXUHjijCO0@B$?OS+vQmZD{sP@K8 zk%>t)jU7b=Oj_I+B{1J|=<oCr?@GP4f>{O+IuZ*^1GocU6nNPwj#&wGc@UQ5ZpkI?k}W@i zbaKQxY&iggf;?sUYhXyLP9!=Pi zwryzjq!w-gpp~tm%dmu-bni}BT}e(m{)_S~aA zAJ?Aywdc3B=abs=Y3=!p_Iy@*4&r&pl*1Ug6RiWsB%$S|k^KA9TKn^6Q)}8|s(-0z zk4J4uj+qkU(<~tQMf|4Yl+qS?Vc+xSIrRlid(23?>rI;Y#58ejj49)%kSp(N>gW`6 z8ZxcDYA&qxY@}0w%jfpCdMKZ58M7i2otVZJl#B1H>OQS8ccnoWzUy*c7_ThnxNMTK z4Uj-+oqcKB#CcWq$<0xDBkoR zm~Y>1(2@baZovHp{DA>a8F0dYX57}NmzwMJ?{#^YS*X*aAA{5T{BQ6!m#%-(fO%aw z+h-Kw=f64P-I96ewdn>_dW}PApj`YZ6zj+A$`7Fky4{xynCDNyPsyGs{eK`bnYVAT zZm-pVt`$1{2dZJWt!wwuH_k(FzV_|Y*%PL|hj58q0hs4GT`u-(@E)Jy^)e>l7DRN2 zzFj*13@C&D7;Tw4m#edD8$O>dX3R?Ty7hQS%G;Xi{-8cW;yH3<;`u@Ed;~KuLCm#)hS9yi!Cf*Qu)7D@) z@hGS5qdDm7Ld&Rqd-ee2zejuU`f9~hrfzFIPR-0?_`&!NP2b%+z}CJuJ6+3wvJUX* z5^V4^n~4AW3Z}^BIIPSel`u!Jm^qdgF-QHJ(TjL9psoI6#NJJFm=;^4=P!XjHH|jm z`d3t6&^8C3@k*ClS?T(sDD}lPoxK6M3#cC?) zHI+F<)3PzV zJrE9di^1mJPBBX2w}!eUF@`VJmJOnG)n=jKHc?uah{eRP)E0|$ibxgDTH3`}PbeIa z#5{|Av#=#@5RureVt{OIi-~b`W9!X#Cd61@n;7eg#CihZPRt^KX~fv#Pik)ovZ`s^ z_U$n-5CoI`hPWI0;*!|Y9_kUB;+TL)7^^aQL-D{?P@1qWv^5rp^{oqZcgI-_Pk*cs{yb^3MZ z!CO_}$EBcO>Whm0P`E4N4~hu)NT0tO${G6->k)h6BCt*C)^EJMy`{Bz*>abza|J_> zkc5f%urFT0{UTd~-6U@JcWF!FCj%;(zcUi<3T=ZQ{oz0?7J1Mgh;4fSUnBIM`jqlb zV)%hjEE4V^ceCX)BqCw47kg782QjvU`NdwbGa-ro84dF=9?3Blh=yu=;zm&ZjR3#>6h^kobQpe>@&2 zFK28SD&5DH1NNhS2%O;4xK9LuKLaS>F7OY)3EqjjfcZwo=)Vw;p)Lo$A8-V9GjM`W zRp1j6@Mi#R_$ufFUJm$U)DGYT-$JFaX3b-)8kO)Gz<_}V0iQH*g5NjrLx5)toZx>N z_#|K(-kbJAKfzt7q?6!&11IeLPz>~P&90ecmnCvCgGr;klson8Lfzx{g-N^=F8$r6S5l)cqT7(m% zd(q91CrH2XeiQhoTTI&h8m`zia~!(!XjkY@@bQZv<7lJ1@nbVPY1Ha9y*=H+17a+W zW4Ez-S)Hp|5W}63U?{w;v3g_st+fr+LR`Z69Oy<;ZmjMTQ9HKhrEh|Bs9^I5~J}kR7ld*nFVlz@-D_2P+Q_9n2gYIe6;eXWBo1= 0 + + +class BytesIO_EOF(object): + """This class differs from BytesIO in that a ValueError exception is + raised whenever EOF is reached.""" + + def __init__(self, initial_bytes): + self._buffer = initial_bytes + self._index = 0 + self._bookmark = None + + def set_bookmark(self): + self._bookmark = self._index + + def data_since_bookmark(self): + assert self._bookmark is not None + return self._buffer[self._bookmark:self._index] + + def remaining_data(self): + return len(self._buffer) - self._index + + def read(self, length): + new_index = self._index + length + if new_index > len(self._buffer): + raise ValueError("Not enough data for DER decoding: expected %d bytes and found %d" % (new_index, len(self._buffer))) + + result = self._buffer[self._index:new_index] + self._index = new_index + return result + + def read_byte(self): + return bord(self.read(1)[0]) + + +class DerObject(object): + """Base class for defining a single DER object. + + This class should never be directly instantiated. + """ + + def __init__(self, asn1Id=None, payload=b'', implicit=None, + constructed=False, explicit=None): + """Initialize the DER object according to a specific ASN.1 type. + + :Parameters: + asn1Id : integer + The universal DER tag number for this object + (e.g. 0x10 for a SEQUENCE). + If None, the tag is not known yet. + + payload : byte string + The initial payload of the object (that it, + the content octets). + If not specified, the payload is empty. + + implicit : integer + The IMPLICIT tag number to use for the encoded object. + It overrides the universal tag *asn1Id*. + + constructed : bool + True when the ASN.1 type is *constructed*. + False when it is *primitive*. + + explicit : integer + The EXPLICIT tag number to use for the encoded object. + """ + + if asn1Id is None: + # The tag octet will be read in with ``decode`` + self._tag_octet = None + return + asn1Id = self._convertTag(asn1Id) + + self.payload = payload + + # In a BER/DER identifier octet: + # * bits 4-0 contain the tag value + # * bit 5 is set if the type is 'constructed' + # and unset if 'primitive' + # * bits 7-6 depend on the encoding class + # + # Class | Bit 7, Bit 6 + # ---------------------------------- + # universal | 0 0 + # application | 0 1 + # context-spec | 1 0 (default for IMPLICIT/EXPLICIT) + # private | 1 1 + # + if None not in (explicit, implicit): + raise ValueError("Explicit and implicit tags are" + " mutually exclusive") + + if implicit is not None: + self._tag_octet = 0x80 | 0x20 * constructed | self._convertTag(implicit) + return + + if explicit is not None: + self._tag_octet = 0xA0 | self._convertTag(explicit) + self._inner_tag_octet = 0x20 * constructed | asn1Id + return + + self._tag_octet = 0x20 * constructed | asn1Id + + def _convertTag(self, tag): + """Check if *tag* is a real DER tag. + Convert it from a character to number if necessary. + """ + if not _is_number(tag): + if len(tag) == 1: + tag = bord(tag[0]) + # Ensure that tag is a low tag + if not (_is_number(tag) and 0 <= tag < 0x1F): + raise ValueError("Wrong DER tag") + return tag + + @staticmethod + def _definite_form(length): + """Build length octets according to BER/DER + definite form. + """ + if length > 127: + encoding = long_to_bytes(length) + return bchr(len(encoding) + 128) + encoding + return bchr(length) + + def encode(self): + """Return this DER element, fully encoded as a binary byte string.""" + + # Concatenate identifier octets, length octets, + # and contents octets + + output_payload = self.payload + + # In case of an EXTERNAL tag, first encode the inner + # element. + if hasattr(self, "_inner_tag_octet"): + output_payload = (bchr(self._inner_tag_octet) + + self._definite_form(len(self.payload)) + + self.payload) + + return (bchr(self._tag_octet) + + self._definite_form(len(output_payload)) + + output_payload) + + def _decodeLen(self, s): + """Decode DER length octets from a file.""" + + length = s.read_byte() + + if length > 127: + encoded_length = s.read(length & 0x7F) + if bord(encoded_length[0]) == 0: + raise ValueError("Invalid DER: length has leading zero") + length = bytes_to_long(encoded_length) + if length <= 127: + raise ValueError("Invalid DER: length in long form but smaller than 128") + + return length + + def decode(self, der_encoded, strict=False): + """Decode a complete DER element, and re-initializes this + object with it. + + Args: + der_encoded (byte string): A complete DER element. + + Raises: + ValueError: in case of parsing errors. + """ + + if not byte_string(der_encoded): + raise ValueError("Input is not a byte string") + + s = BytesIO_EOF(der_encoded) + self._decodeFromStream(s, strict) + + # There shouldn't be other bytes left + if s.remaining_data() > 0: + raise ValueError("Unexpected extra data after the DER structure") + + return self + + def _decodeFromStream(self, s, strict): + """Decode a complete DER element from a file.""" + + idOctet = s.read_byte() + if self._tag_octet is not None: + if idOctet != self._tag_octet: + raise ValueError("Unexpected DER tag") + else: + self._tag_octet = idOctet + length = self._decodeLen(s) + self.payload = s.read(length) + + # In case of an EXTERNAL tag, further decode the inner + # element. + if hasattr(self, "_inner_tag_octet"): + p = BytesIO_EOF(self.payload) + inner_octet = p.read_byte() + if inner_octet != self._inner_tag_octet: + raise ValueError("Unexpected internal DER tag") + length = self._decodeLen(p) + self.payload = p.read(length) + + # There shouldn't be other bytes left + if p.remaining_data() > 0: + raise ValueError("Unexpected extra data after the DER structure") + + +class DerInteger(DerObject): + """Class to model a DER INTEGER. + + An example of encoding is:: + + >>> from Crypto.Util.asn1 import DerInteger + >>> from binascii import hexlify, unhexlify + >>> int_der = DerInteger(9) + >>> print hexlify(int_der.encode()) + + which will show ``020109``, the DER encoding of 9. + + And for decoding:: + + >>> s = unhexlify(b'020109') + >>> try: + >>> int_der = DerInteger() + >>> int_der.decode(s) + >>> print int_der.value + >>> except ValueError: + >>> print "Not a valid DER INTEGER" + + the output will be ``9``. + + :ivar value: The integer value + :vartype value: integer + """ + + def __init__(self, value=0, implicit=None, explicit=None): + """Initialize the DER object as an INTEGER. + + :Parameters: + value : integer + The value of the integer. + + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for INTEGER (2). + """ + + DerObject.__init__(self, 0x02, b'', implicit, + False, explicit) + self.value = value # The integer value + + def encode(self): + """Return the DER INTEGER, fully encoded as a + binary string.""" + + number = self.value + self.payload = b'' + while True: + self.payload = bchr(int(number & 255)) + self.payload + if 128 <= number <= 255: + self.payload = bchr(0x00) + self.payload + if -128 <= number <= 255: + break + number >>= 8 + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a complete DER INTEGER DER, and re-initializes this + object with it. + + Args: + der_encoded (byte string): A complete INTEGER DER element. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict=strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER INTEGER from a file.""" + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + if strict: + if len(self.payload) == 0: + raise ValueError("Invalid encoding for DER INTEGER: empty payload") + if len(self.payload) >= 2 and struct.unpack('>H', self.payload[:2])[0] < 0x80: + raise ValueError("Invalid encoding for DER INTEGER: leading zero") + + # Derive self.value from self.payload + self.value = 0 + bits = 1 + for i in self.payload: + self.value *= 256 + self.value += bord(i) + bits <<= 8 + if self.payload and bord(self.payload[0]) & 0x80: + self.value -= bits + + +class DerSequence(DerObject): + """Class to model a DER SEQUENCE. + + This object behaves like a dynamic Python sequence. + + Sub-elements that are INTEGERs behave like Python integers. + + Any other sub-element is a binary string encoded as a complete DER + sub-element (TLV). + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerSequence, DerInteger + >>> from binascii import hexlify, unhexlify + >>> obj_der = unhexlify('070102') + >>> seq_der = DerSequence([4]) + >>> seq_der.append(9) + >>> seq_der.append(obj_der.encode()) + >>> print hexlify(seq_der.encode()) + + which will show ``3009020104020109070102``, the DER encoding of the + sequence containing ``4``, ``9``, and the object with payload ``02``. + + For decoding: + + >>> s = unhexlify(b'3009020104020109070102') + >>> try: + >>> seq_der = DerSequence() + >>> seq_der.decode(s) + >>> print len(seq_der) + >>> print seq_der[0] + >>> print seq_der[:] + >>> except ValueError: + >>> print "Not a valid DER SEQUENCE" + + the output will be:: + + 3 + 4 + [4, 9, b'\x07\x01\x02'] + + """ + + def __init__(self, startSeq=None, implicit=None): + """Initialize the DER object as a SEQUENCE. + + :Parameters: + startSeq : Python sequence + A sequence whose element are either integers or + other DER objects. + + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for SEQUENCE (16). + """ + + DerObject.__init__(self, 0x10, b'', implicit, True) + if startSeq is None: + self._seq = [] + else: + self._seq = startSeq + + # A few methods to make it behave like a python sequence + + def __delitem__(self, n): + del self._seq[n] + + def __getitem__(self, n): + return self._seq[n] + + def __setitem__(self, key, value): + self._seq[key] = value + + def __setslice__(self, i, j, sequence): + self._seq[i:j] = sequence + + def __delslice__(self, i, j): + del self._seq[i:j] + + def __getslice__(self, i, j): + return self._seq[max(0, i):max(0, j)] + + def __len__(self): + return len(self._seq) + + def __iadd__(self, item): + self._seq.append(item) + return self + + def append(self, item): + self._seq.append(item) + return self + + def hasInts(self, only_non_negative=True): + """Return the number of items in this sequence that are + integers. + + Args: + only_non_negative (boolean): + If ``True``, negative integers are not counted in. + """ + + items = [x for x in self._seq if _is_number(x, only_non_negative)] + return len(items) + + def hasOnlyInts(self, only_non_negative=True): + """Return ``True`` if all items in this sequence are integers + or non-negative integers. + + This function returns False is the sequence is empty, + or at least one member is not an integer. + + Args: + only_non_negative (boolean): + If ``True``, the presence of negative integers + causes the method to return ``False``.""" + return self._seq and self.hasInts(only_non_negative) == len(self._seq) + + def encode(self): + """Return this DER SEQUENCE, fully encoded as a + binary string. + + Raises: + ValueError: if some elements in the sequence are neither integers + nor byte strings. + """ + self.payload = b'' + for item in self._seq: + if byte_string(item): + self.payload += item + elif _is_number(item): + self.payload += DerInteger(item).encode() + else: + self.payload += item.encode() + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False, nr_elements=None, only_ints_expected=False): + """Decode a complete DER SEQUENCE, and re-initializes this + object with it. + + Args: + der_encoded (byte string): + A complete SEQUENCE DER element. + nr_elements (None or integer or list of integers): + The number of members the SEQUENCE can have + only_ints_expected (boolean): + Whether the SEQUENCE is expected to contain only integers. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + + DER INTEGERs are decoded into Python integers. Any other DER + element is not decoded. Its validity is not checked. + """ + + self._nr_elements = nr_elements + result = DerObject.decode(self, der_encoded, strict=strict) + + if only_ints_expected and not self.hasOnlyInts(): + raise ValueError("Some members are not INTEGERs") + + return result + + def _decodeFromStream(self, s, strict): + """Decode a complete DER SEQUENCE from a file.""" + + self._seq = [] + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Add one item at a time to self.seq, by scanning self.payload + p = BytesIO_EOF(self.payload) + while p.remaining_data() > 0: + p.set_bookmark() + + der = DerObject() + der._decodeFromStream(p, strict) + + # Parse INTEGERs differently + if der._tag_octet != 0x02: + self._seq.append(p.data_since_bookmark()) + else: + derInt = DerInteger() + #import pdb; pdb.set_trace() + data = p.data_since_bookmark() + derInt.decode(data, strict=strict) + self._seq.append(derInt.value) + + ok = True + if self._nr_elements is not None: + try: + ok = len(self._seq) in self._nr_elements + except TypeError: + ok = len(self._seq) == self._nr_elements + + if not ok: + raise ValueError("Unexpected number of members (%d)" + " in the sequence" % len(self._seq)) + + +class DerOctetString(DerObject): + """Class to model a DER OCTET STRING. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerOctetString + >>> from binascii import hexlify, unhexlify + >>> os_der = DerOctetString(b'\\xaa') + >>> os_der.payload += b'\\xbb' + >>> print hexlify(os_der.encode()) + + which will show ``0402aabb``, the DER encoding for the byte string + ``b'\\xAA\\xBB'``. + + For decoding: + + >>> s = unhexlify(b'0402aabb') + >>> try: + >>> os_der = DerOctetString() + >>> os_der.decode(s) + >>> print hexlify(os_der.payload) + >>> except ValueError: + >>> print "Not a valid DER OCTET STRING" + + the output will be ``aabb``. + + :ivar payload: The content of the string + :vartype payload: byte string + """ + + def __init__(self, value=b'', implicit=None): + """Initialize the DER object as an OCTET STRING. + + :Parameters: + value : byte string + The initial payload of the object. + If not specified, the payload is empty. + + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for OCTET STRING (4). + """ + DerObject.__init__(self, 0x04, value, implicit, False) + + +class DerNull(DerObject): + """Class to model a DER NULL element.""" + + def __init__(self): + """Initialize the DER object as a NULL.""" + + DerObject.__init__(self, 0x05, b'', None, False) + + +class DerObjectId(DerObject): + """Class to model a DER OBJECT ID. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerObjectId + >>> from binascii import hexlify, unhexlify + >>> oid_der = DerObjectId("1.2") + >>> oid_der.value += ".840.113549.1.1.1" + >>> print hexlify(oid_der.encode()) + + which will show ``06092a864886f70d010101``, the DER encoding for the + RSA Object Identifier ``1.2.840.113549.1.1.1``. + + For decoding: + + >>> s = unhexlify(b'06092a864886f70d010101') + >>> try: + >>> oid_der = DerObjectId() + >>> oid_der.decode(s) + >>> print oid_der.value + >>> except ValueError: + >>> print "Not a valid DER OBJECT ID" + + the output will be ``1.2.840.113549.1.1.1``. + + :ivar value: The Object ID (OID), a dot separated list of integers + :vartype value: string + """ + + def __init__(self, value='', implicit=None, explicit=None): + """Initialize the DER object as an OBJECT ID. + + :Parameters: + value : string + The initial Object Identifier (e.g. "1.2.0.0.6.2"). + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for OBJECT ID (6). + explicit : integer + The EXPLICIT tag to use for the encoded object. + """ + DerObject.__init__(self, 0x06, b'', implicit, False, explicit) + self.value = value + + def encode(self): + """Return the DER OBJECT ID, fully encoded as a + binary string.""" + + comps = [int(x) for x in self.value.split(".")] + if len(comps) < 2: + raise ValueError("Not a valid Object Identifier string") + self.payload = bchr(40*comps[0]+comps[1]) + for v in comps[2:]: + if v == 0: + enc = [0] + else: + enc = [] + while v: + enc.insert(0, (v & 0x7F) | 0x80) + v >>= 7 + enc[-1] &= 0x7F + self.payload += b''.join([bchr(x) for x in enc]) + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a complete DER OBJECT ID, and re-initializes this + object with it. + + Args: + der_encoded (byte string): + A complete DER OBJECT ID. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER OBJECT ID from a file.""" + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Derive self.value from self.payload + p = BytesIO_EOF(self.payload) + comps = [str(x) for x in divmod(p.read_byte(), 40)] + v = 0 + while p.remaining_data(): + c = p.read_byte() + v = v*128 + (c & 0x7F) + if not (c & 0x80): + comps.append(str(v)) + v = 0 + self.value = '.'.join(comps) + + +class DerBitString(DerObject): + """Class to model a DER BIT STRING. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerBitString + >>> from binascii import hexlify, unhexlify + >>> bs_der = DerBitString(b'\\xaa') + >>> bs_der.value += b'\\xbb' + >>> print hexlify(bs_der.encode()) + + which will show ``040300aabb``, the DER encoding for the bit string + ``b'\\xAA\\xBB'``. + + For decoding: + + >>> s = unhexlify(b'040300aabb') + >>> try: + >>> bs_der = DerBitString() + >>> bs_der.decode(s) + >>> print hexlify(bs_der.value) + >>> except ValueError: + >>> print "Not a valid DER BIT STRING" + + the output will be ``aabb``. + + :ivar value: The content of the string + :vartype value: byte string + """ + + def __init__(self, value=b'', implicit=None, explicit=None): + """Initialize the DER object as a BIT STRING. + + :Parameters: + value : byte string or DER object + The initial, packed bit string. + If not specified, the bit string is empty. + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for OCTET STRING (3). + explicit : integer + The EXPLICIT tag to use for the encoded object. + """ + DerObject.__init__(self, 0x03, b'', implicit, False, explicit) + + # The bitstring value (packed) + if isinstance(value, DerObject): + self.value = value.encode() + else: + self.value = value + + def encode(self): + """Return the DER BIT STRING, fully encoded as a + binary string.""" + + # Add padding count byte + self.payload = b'\x00' + self.value + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a complete DER BIT STRING, and re-initializes this + object with it. + + Args: + der_encoded (byte string): a complete DER BIT STRING. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER BIT STRING DER from a file.""" + + # Fill-up self.payload + DerObject._decodeFromStream(self, s, strict) + + if self.payload and bord(self.payload[0]) != 0: + raise ValueError("Not a valid BIT STRING") + + # Fill-up self.value + self.value = b'' + # Remove padding count byte + if self.payload: + self.value = self.payload[1:] + + +class DerSetOf(DerObject): + """Class to model a DER SET OF. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerBitString + >>> from binascii import hexlify, unhexlify + >>> so_der = DerSetOf([4,5]) + >>> so_der.add(6) + >>> print hexlify(so_der.encode()) + + which will show ``3109020104020105020106``, the DER encoding + of a SET OF with items 4,5, and 6. + + For decoding: + + >>> s = unhexlify(b'3109020104020105020106') + >>> try: + >>> so_der = DerSetOf() + >>> so_der.decode(s) + >>> print [x for x in so_der] + >>> except ValueError: + >>> print "Not a valid DER SET OF" + + the output will be ``[4, 5, 6]``. + """ + + def __init__(self, startSet=None, implicit=None): + """Initialize the DER object as a SET OF. + + :Parameters: + startSet : container + The initial set of integers or DER encoded objects. + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for SET OF (17). + """ + DerObject.__init__(self, 0x11, b'', implicit, True) + self._seq = [] + + # All elements must be of the same type (and therefore have the + # same leading octet) + self._elemOctet = None + + if startSet: + for e in startSet: + self.add(e) + + def __getitem__(self, n): + return self._seq[n] + + def __iter__(self): + return iter(self._seq) + + def __len__(self): + return len(self._seq) + + def add(self, elem): + """Add an element to the set. + + Args: + elem (byte string or integer): + An element of the same type of objects already in the set. + It can be an integer or a DER encoded object. + """ + + if _is_number(elem): + eo = 0x02 + elif isinstance(elem, DerObject): + eo = self._tag_octet + else: + eo = bord(elem[0]) + + if self._elemOctet != eo: + if self._elemOctet is not None: + raise ValueError("New element does not belong to the set") + self._elemOctet = eo + + if elem not in self._seq: + self._seq.append(elem) + + def decode(self, der_encoded, strict=False): + """Decode a complete SET OF DER element, and re-initializes this + object with it. + + DER INTEGERs are decoded into Python integers. Any other DER + element is left undecoded; its validity is not checked. + + Args: + der_encoded (byte string): a complete DER BIT SET OF. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER SET OF from a file.""" + + self._seq = [] + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Add one item at a time to self.seq, by scanning self.payload + p = BytesIO_EOF(self.payload) + setIdOctet = -1 + while p.remaining_data() > 0: + p.set_bookmark() + + der = DerObject() + der._decodeFromStream(p, strict) + + # Verify that all members are of the same type + if setIdOctet < 0: + setIdOctet = der._tag_octet + else: + if setIdOctet != der._tag_octet: + raise ValueError("Not all elements are of the same DER type") + + # Parse INTEGERs differently + if setIdOctet != 0x02: + self._seq.append(p.data_since_bookmark()) + else: + derInt = DerInteger() + derInt.decode(p.data_since_bookmark(), strict) + self._seq.append(derInt.value) + # end + + def encode(self): + """Return this SET OF DER element, fully encoded as a + binary string. + """ + + # Elements in the set must be ordered in lexicographic order + ordered = [] + for item in self._seq: + if _is_number(item): + bys = DerInteger(item).encode() + elif isinstance(item, DerObject): + bys = item.encode() + else: + bys = item + ordered.append(bys) + ordered.sort() + self.payload = b''.join(ordered) + return DerObject.encode(self) diff --git a/env/Lib/site-packages/Crypto/Util/asn1.pyi b/env/Lib/site-packages/Crypto/Util/asn1.pyi new file mode 100644 index 0000000..dac023b --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/asn1.pyi @@ -0,0 +1,74 @@ +from typing import Optional, Sequence, Union, Set, Iterable + +__all__ = ['DerObject', 'DerInteger', 'DerOctetString', 'DerNull', + 'DerSequence', 'DerObjectId', 'DerBitString', 'DerSetOf'] + +# TODO: Make the encoded DerObjects their own type, so that DerSequence and +# DerSetOf can check their contents better + +class BytesIO_EOF: + def __init__(self, initial_bytes: bytes) -> None: ... + def set_bookmark(self) -> None: ... + def data_since_bookmark(self) -> bytes: ... + def remaining_data(self) -> int: ... + def read(self, length: int) -> bytes: ... + def read_byte(self) -> bytes: ... + +class DerObject: + payload: bytes + def __init__(self, asn1Id: Optional[int]=None, payload: Optional[bytes]=..., implicit: Optional[int]=None, + constructed: Optional[bool]=False, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False) -> DerObject: ... + +class DerInteger(DerObject): + value: int + def __init__(self, value: Optional[int]= 0, implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False) -> DerInteger: ... + +class DerSequence(DerObject): + def __init__(self, startSeq: Optional[Sequence[Union[int, DerInteger, DerObject]]]=None, implicit: Optional[int]=None) -> None: ... + def __delitem__(self, n: int) -> None: ... + def __getitem__(self, n: int) -> None: ... + def __setitem__(self, key: int, value: DerObject) -> None: ... + def __setslice__(self, i: int, j: int, sequence: Sequence) -> None: ... + def __delslice__(self, i: int, j: int) -> None: ... + def __getslice__(self, i: int, j: int) -> DerSequence: ... + def __len__(self) -> int: ... + def __iadd__(self, item: DerObject) -> DerSequence: ... + def append(self, item: DerObject) -> DerSequence: ... + def hasInts(self, only_non_negative: Optional[bool]=True) -> int: ... + def hasOnlyInts(self, only_non_negative: Optional[bool]=True) -> bool: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False, nr_elements: Optional[int]=None, only_ints_expected: Optional[bool]=False) -> DerSequence: ... + +class DerOctetString(DerObject): + payload: bytes + def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None) -> None: ... + +class DerNull(DerObject): + def __init__(self) -> None: ... + +class DerObjectId(DerObject): + value: str + def __init__(self, value: Optional[str]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False) -> DerObjectId: ... + +class DerBitString(DerObject): + value: bytes + def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False) -> DerBitString: ... + +DerSetElement = Union[bytes, int] + +class DerSetOf(DerObject): + def __init__(self, startSet: Optional[Set[DerSetElement]]=None, implicit: Optional[int]=None) -> None: ... + def __getitem__(self, n: int) -> DerSetElement: ... + def __iter__(self) -> Iterable: ... + def __len__(self) -> int: ... + def add(self, elem: DerSetElement) -> None: ... + def decode(self, der_encoded: bytes, strict: Optional[bool]=False) -> DerObject: ... + def encode(self) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Util/number.py b/env/Lib/site-packages/Crypto/Util/number.py new file mode 100644 index 0000000..56df021 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/number.py @@ -0,0 +1,1469 @@ +# +# number.py : Number-theoretic functions +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Barry A. Warsaw, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +import math +import sys +import struct +from Crypto import Random +from Crypto.Util.py3compat import iter_range + +# Backward compatibility +_fastmath = None + + +def ceil_div(n, d): + """Return ceil(n/d), that is, the smallest integer r such that r*d >= n""" + + if d == 0: + raise ZeroDivisionError() + if (n < 0) or (d < 0): + raise ValueError("Non positive values") + r, q = divmod(n, d) + if (n != 0) and (q != 0): + r += 1 + return r + + +def size (N): + """Returns the size of the number N in bits.""" + + if N < 0: + raise ValueError("Size in bits only avialable for non-negative numbers") + + bits = 0 + while N >> bits: + bits += 1 + return bits + + +def getRandomInteger(N, randfunc=None): + """Return a random number at most N bits long. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. Use :func:`Crypto.Random.random.getrandbits` instead. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + S = randfunc(N>>3) + odd_bits = N % 8 + if odd_bits != 0: + rand_bits = ord(randfunc(1)) >> (8-odd_bits) + S = struct.pack('B', rand_bits) + S + value = bytes_to_long(S) + return value + +def getRandomRange(a, b, randfunc=None): + """Return a random number *n* so that *a <= n < b*. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. Use :func:`Crypto.Random.random.randrange` instead. + """ + + range_ = b - a - 1 + bits = size(range_) + value = getRandomInteger(bits, randfunc) + while value > range_: + value = getRandomInteger(bits, randfunc) + return a + value + +def getRandomNBitInteger(N, randfunc=None): + """Return a random number with exactly N-bits, + i.e. a random number between 2**(N-1) and (2**N)-1. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. + """ + + value = getRandomInteger (N-1, randfunc) + value |= 2 ** (N-1) # Ensure high bit is set + assert size(value) >= N + return value + +def GCD(x,y): + """Greatest Common Denominator of :data:`x` and :data:`y`. + """ + + x = abs(x) ; y = abs(y) + while x > 0: + x, y = y % x, x + return y + +def inverse(u, v): + """The inverse of :data:`u` *mod* :data:`v`.""" + + u3, v3 = u, v + u1, v1 = 1, 0 + while v3 > 0: + q = u3 // v3 + u1, v1 = v1, u1 - v1*q + u3, v3 = v3, u3 - v3*q + while u1<0: + u1 = u1 + v + return u1 + +# Given a number of bits to generate and a random generation function, +# find a prime number of the appropriate size. + +def getPrime(N, randfunc=None): + """Return a random N-bit prime number. + + If randfunc is omitted, then :meth:`Random.get_random_bytes` is used. + """ + if randfunc is None: + randfunc = Random.get_random_bytes + + number=getRandomNBitInteger(N, randfunc) | 1 + while (not isPrime(number, randfunc=randfunc)): + number=number+2 + return number + + +def _rabinMillerTest(n, rounds, randfunc=None): + """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int + Tests if n is prime. + Returns 0 when n is definitely composite. + Returns 1 when n is probably prime. + Returns 2 when n is definitely prime. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + # check special cases (n==2, n even, n < 2) + if n < 3 or (n & 1) == 0: + return n == 2 + # n might be very large so it might be beneficial to precalculate n-1 + n_1 = n - 1 + # determine m and b so that 2**b * m = n - 1 and b maximal + b = 0 + m = n_1 + while (m & 1) == 0: + b += 1 + m >>= 1 + + tested = [] + # we need to do at most n-2 rounds. + for i in iter_range (min (rounds, n-2)): + # randomly choose a < n and make sure it hasn't been tested yet + a = getRandomRange (2, n, randfunc) + while a in tested: + a = getRandomRange (2, n, randfunc) + tested.append (a) + # do the rabin-miller test + z = pow (a, m, n) # (a**m) % n + if z == 1 or z == n_1: + continue + composite = 1 + for r in iter_range(b): + z = (z * z) % n + if z == 1: + return 0 + elif z == n_1: + composite = 0 + break + if composite: + return 0 + return 1 + +def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): + r""" + Return a random strong *N*-bit prime number. + In this context, *p* is a strong prime if *p-1* and *p+1* have at + least one large prime factor. + + Args: + N (integer): the exact length of the strong prime. + It must be a multiple of 128 and > 512. + e (integer): if provided, the returned prime (minus 1) + will be coprime to *e* and thus suitable for RSA where + *e* is the public exponent. + false_positive_prob (float): + The statistical probability for the result not to be actually a + prime. It defaults to 10\ :sup:`-6`. + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + randfunc (callable): + A function that takes a parameter *N* and that returns + a random byte string of such length. + If omitted, :func:`Crypto.Random.get_random_bytes` is used. + Return: + The new strong prime. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. + """ + + # This function was implemented following the + # instructions found in the paper: + # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" + # by Robert D. Silverman + # RSA Laboratories + # May 17, 1997 + # which by the time of writing could be freely downloaded here: + # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf + + if randfunc is None: + randfunc = Random.get_random_bytes + + # Use the accelerator if available + if _fastmath is not None: + return _fastmath.getStrongPrime(long(N), long(e), false_positive_prob, + randfunc) + + if (N < 512) or ((N % 128) != 0): + raise ValueError ("bits must be multiple of 128 and > 512") + + rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + + # calculate range for X + # lower_bound = sqrt(2) * 2^{511 + 128*x} + # upper_bound = 2^{512 + 128*x} - 1 + x = (N - 512) >> 7; + # We need to approximate the sqrt(2) in the lower_bound by an integer + # expression because floating point math overflows with these numbers + lower_bound = (14142135623730950489 * (2 ** (511 + 128*x))) // 10000000000000000000 + upper_bound = (1 << (512 + 128*x)) - 1 + # Randomly choose X in calculated range + X = getRandomRange (lower_bound, upper_bound, randfunc) + + # generate p1 and p2 + p = [0, 0] + for i in (0, 1): + # randomly choose 101-bit y + y = getRandomNBitInteger (101, randfunc) + # initialize the field for sieving + field = [0] * 5 * len (sieve_base) + # sieve the field + for prime in sieve_base: + offset = y % prime + for j in iter_range((prime - offset) % prime, len (field), prime): + field[j] = 1 + + # look for suitable p[i] starting at y + result = 0 + for j in range(len(field)): + composite = field[j] + # look for next canidate + if composite: + continue + tmp = y + j + result = _rabinMillerTest (tmp, rabin_miller_rounds) + if result > 0: + p[i] = tmp + break + if result == 0: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + + # Calculate R + # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 + tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 + tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 + R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 + + # search for final prime number starting by Y0 + # Y0 = X + (R - X mod p1p2) + increment = p[0] * p[1] + X = X + (R - (X % increment)) + while 1: + is_possible_prime = 1 + # first check candidate against sieve_base + for prime in sieve_base: + if (X % prime) == 0: + is_possible_prime = 0 + break + # if e is given make sure that e and X-1 are coprime + # this is not necessarily a strong prime criterion but useful when + # creating them for RSA where the p-1 and q-1 should be coprime to + # the public exponent e + if e and is_possible_prime: + if e & 1: + if GCD(e, X-1) != 1: + is_possible_prime = 0 + else: + if GCD(e, (X-1) // 2) != 1: + is_possible_prime = 0 + + # do some Rabin-Miller-Tests + if is_possible_prime: + result = _rabinMillerTest (X, rabin_miller_rounds) + if result > 0: + break + X += increment + # abort when X has more bits than requested + # TODO: maybe we shouldn't abort but rather start over. + if X >= 1 << N: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + return X + +def isPrime(N, false_positive_prob=1e-6, randfunc=None): + r"""Test if a number *N* is a prime. + + Args: + false_positive_prob (float): + The statistical probability for the result not to be actually a + prime. It defaults to 10\ :sup:`-6`. + Note that the real probability of a false-positive is far less. + This is just the mathematically provable limit. + randfunc (callable): + A function that takes a parameter *N* and that returns + a random byte string of such length. + If omitted, :func:`Crypto.Random.get_random_bytes` is used. + + Return: + `True` is the input is indeed prime. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + if _fastmath is not None: + return _fastmath.isPrime(long(N), false_positive_prob, randfunc) + + if N < 3 or N & 1 == 0: + return N == 2 + for p in sieve_base: + if N == p: + return 1 + if N % p == 0: + return 0 + + rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + return _rabinMillerTest(N, rounds, randfunc) + + +# Improved conversion functions contributed by Barry Warsaw, after +# careful benchmarking + +import struct + +def long_to_bytes(n, blocksize=0): + """Convert an integer to a byte string. + + In Python 3.2+, use the native method instead:: + + >>> n.to_bytes(blocksize, 'big') + + For instance:: + + >>> n = 80 + >>> n.to_bytes(2, 'big') + b'\x00P' + + If the optional :data:`blocksize` is provided and greater than zero, + the byte string is padded with binary zeros (on the front) so that + the total length of the output is a multiple of blocksize. + + If :data:`blocksize` is zero or not provided, the byte string will + be of minimal length. + """ + # after much testing, this algorithm was deemed to be the fastest + s = b'' + n = int(n) + pack = struct.pack + while n > 0: + s = pack('>I', n & 0xffffffff) + s + n = n >> 32 + # strip off leading zeros + for i in range(len(s)): + if s[i] != b'\x00'[0]: + break + else: + # only happens when n == 0 + s = b'\x00' + i = 0 + s = s[i:] + # add back some pad bytes. this could be done more efficiently w.r.t. the + # de-padding being done above, but sigh... + if blocksize > 0 and len(s) % blocksize: + s = (blocksize - len(s) % blocksize) * b'\x00' + s + return s + +def bytes_to_long(s): + """Convert a byte string to a long integer (big endian). + + In Python 3.2+, use the native method instead:: + + >>> int.from_bytes(s, 'big') + + For instance:: + + >>> int.from_bytes(b'\x00P', 'big') + 80 + + This is (essentially) the inverse of :func:`long_to_bytes`. + """ + acc = 0 + + unpack = struct.unpack + + # Up to Python 2.7.4, struct.unpack can't work with bytearrays nor + # memoryviews + if sys.version_info[0:3] < (2, 7, 4): + if isinstance(s, bytearray): + s = bytes(s) + elif isinstance(s, memoryview): + s = s.tobytes() + + length = len(s) + if length % 4: + extra = (4 - length % 4) + s = b'\x00' * extra + s + length = length + extra + for i in range(0, length, 4): + acc = (acc << 32) + unpack('>I', s[i:i+4])[0] + return acc + + +# For backwards compatibility... +import warnings +def long2str(n, blocksize=0): + warnings.warn("long2str() has been replaced by long_to_bytes()") + return long_to_bytes(n, blocksize) +def str2long(s): + warnings.warn("str2long() has been replaced by bytes_to_long()") + return bytes_to_long(s) + + +# The first 10000 primes used for checking primality. +# This should be enough to eliminate most of the odd +# numbers before needing to do a Rabin-Miller test at all. +sieve_base = ( + 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, + 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, + 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, + 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, + 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, + 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, + 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, + 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, + 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, + 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, + 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, + 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, + 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, + 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, + 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, + 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, + 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, + 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, + 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, + 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, + 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, + 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, + 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, + 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, + 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, + 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, + 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, + 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, + 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, + 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, + 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, + 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, + 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, + 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, + 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, + 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, + 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, + 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, + 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, + 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, + 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, + 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, + 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, + 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, + 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, + 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, + 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, + 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, + 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, + 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, + 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, + 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, + 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, + 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, + 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, + 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, + 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, + 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, + 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, + 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, + 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, + 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, + 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, + 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, + 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, + 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, + 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, + 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, + 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, + 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, + 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, + 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, + 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, + 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, + 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, + 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, + 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, + 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, + 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, + 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, + 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, + 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, + 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, + 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, + 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, + 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, + 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, + 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, + 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, + 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, + 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, + 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, + 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, + 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, + 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, + 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, + 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, + 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, + 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, + 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, + 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, + 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, + 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, + 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, + 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, + 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, + 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, + 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, + 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, + 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, + 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, + 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, + 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, + 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, + 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, + 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, + 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, + 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, + 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, + 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, + 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, + 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, + 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, + 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, + 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, + 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, + 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, + 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, + 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, + 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, + 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, + 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, + 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, + 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, + 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, + 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, + 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, + 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, + 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, + 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, + 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, + 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, + 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, + 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, + 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, + 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, + 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, + 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, + 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, + 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, + 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, + 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, + 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, + 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, + 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, + 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, + 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, + 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, + 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, + 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, + 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, + 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, + 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, + 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, + 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, + 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, + 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, + 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, + 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, + 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, + 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, + 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, + 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, + 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, + 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, + 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, + 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, + 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, + 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, + 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, + 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, + 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, + 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, + 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, + 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, + 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, + 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, + 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, + 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, + 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, + 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, + 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, + 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, + 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, + 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, + 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, + 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, + 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, + 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, + 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, + 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, + 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, + 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, + 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, + 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, + 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, + 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, + 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, + 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, + 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, + 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, + 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, + 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, + 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, + 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, + 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, + 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, + 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, + 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, + 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, + 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, + 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, + 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, + 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, + 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, + 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, + 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, + 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, + 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, + 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, + 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, + 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, + 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, + 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, + 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, + 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, + 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, + 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, + 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, + 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, + 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, + 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, + 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, + 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, + 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, + 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, + 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, + 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, + 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, + 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, + 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, + 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, + 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, + 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, + 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, + 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, + 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, + 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, + 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, + 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, + 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, + 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, + 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, + 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, + 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, + 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, + 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, + 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, + 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, + 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, + 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, + 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, + 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, + 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, + 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, + 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, + 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, + 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, + 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, + 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, + 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, + 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, + 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, + 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, + 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, + 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, + 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, + 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, + 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, + 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, + 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, + 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, + 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, + 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, + 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, + 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, + 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, + 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, + 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, + 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, + 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, + 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, + 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, + 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, + 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, + 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, + 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, + 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, + 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, + 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, + 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, + 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, + 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, + 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, + 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, + 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, + 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, + 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, + 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, + 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, + 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, + 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, + 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, + 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, + 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, + 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, + 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, + 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, + 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, + 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, + 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, + 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, + 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, + 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, + 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, + 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, + 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, + 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, + 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, + 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, + 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, + 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, + 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, + 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, + 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, + 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, + 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, + 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, + 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, + 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, + 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, + 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, + 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, + 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, + 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, + 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, + 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, + 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, + 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, + 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, + 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, + 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, + 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, + 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, + 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, + 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, + 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, + 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, + 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, + 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, + 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, + 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, + 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, + 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, + 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, + 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, + 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, + 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, + 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, + 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, + 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, + 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, + 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, + 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, + 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, + 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, + 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, + 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, + 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, + 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, + 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, + 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, + 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, + 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, + 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, + 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, + 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, + 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, + 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, + 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, + 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, + 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, + 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, + 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, + 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, + 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, + 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, + 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, + 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, + 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, + 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, + 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, + 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, + 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, + 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, + 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, + 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, + 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, + 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, + 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, + 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, + 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, + 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, + 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, + 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, + 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, + 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, + 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, + 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, + 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, + 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, + 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, + 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, + 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, + 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, + 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, + 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, + 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, + 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, + 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, + 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, + 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, + 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, + 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, + 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, + 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, + 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, + 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, + 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, + 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, + 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, + 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, + 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, + 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, + 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, + 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, + 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, + 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, + 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, + 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, + 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, + 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, + 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, + 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, + 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, + 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, + 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, + 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, + 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, + 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, + 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, + 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, + 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, + 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, + 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, + 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, + 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, + 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, + 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, + 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, + 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, + 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, + 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, + 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, + 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, + 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, + 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, + 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, + 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, + 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, + 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, + 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, + 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, + 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, + 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, + 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, + 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, + 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, + 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, + 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, + 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, + 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, + 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, + 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, + 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, + 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, + 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, + 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, + 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, + 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, + 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, + 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, + 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, + 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, + 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, + 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, + 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, + 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, + 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, + 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, + 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, + 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, + 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, + 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, + 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, + 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, + 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, + 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, + 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, + 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, + 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, + 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, + 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, + 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, + 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, + 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, + 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, + 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, + 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, + 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, + 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, + 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, + 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, + 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, + 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, + 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, + 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, + 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, + 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, + 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, + 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, + 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, + 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, + 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, + 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, + 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, + 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, + 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, + 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, + 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, + 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, + 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, + 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, + 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, + 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, + 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, + 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, + 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, + 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, + 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, + 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, + 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, + 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, + 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, + 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, + 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, + 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, + 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, + 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, + 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, + 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, + 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, + 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, + 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, + 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, + 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, + 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, + 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, + 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, + 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, + 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, + 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, + 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, + 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, + 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, + 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, + 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, + 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, + 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, + 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, + 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, + 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, + 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, + 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, + 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, + 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, + 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, + 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, + 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, + 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, + 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, + 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, + 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, + 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, + 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, + 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, + 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, + 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, + 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, + 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, + 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, + 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, + 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, + 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, + 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, + 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, + 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, + 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, + 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, + 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, + 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, + 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, + 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, + 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, + 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, + 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, + 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, + 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, + 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, + 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, + 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, + 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, + 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, + 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, + 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, + 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, + 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, + 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, + 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, + 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, + 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, + 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, + 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, + 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, + 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, + 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, + 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, + 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, + 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, + 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, + 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, + 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, + 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, + 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, + 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, + 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, + 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, + 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, + 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, + 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, + 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, + 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, + 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, + 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, + 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, + 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, + 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, + 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, + 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, + 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, + 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, + 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, + 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, + 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, + 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, + 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, + 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, + 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, + 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, + 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, + 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, + 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, + 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, + 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, + 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, + 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, + 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, + 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, + 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, + 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, + 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, + 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, + 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, + 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, + 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, + 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, + 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, + 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, + 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, + 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, + 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, + 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, + 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, + 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, + 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, + 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, + 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, + 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, + 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, + 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, + 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, + 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, + 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, + 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, + 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, + 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, + 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, + 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, + 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, + 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, + 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, + 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, + 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, + 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, + 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, + 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, + 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, + 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, + 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, + 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, + 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, + 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, + 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, + 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, + 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, + 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, + 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, + 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, + 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, + 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, + 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, + 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, + 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, + 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, + 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, + 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, + 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, + 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, + 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, + 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, + 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, + 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, + 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, + 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, + 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, + 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, + 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, + 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, + 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, + 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, + 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, + 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, + 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, + 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, + 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, + 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, + 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, + 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, + 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, + 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, + 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, + 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, + 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, + 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, + 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, + 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, + 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, + 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, + 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, + 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, + 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, + 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, + 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, + 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, + 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, + 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, + 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, + 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, + 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, + 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, + 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, + 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, + 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, + 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, + 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, + 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, + 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, + 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, + 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, + 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, + 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, + 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, + 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, + 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, + 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, + 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, + 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, + 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, + 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, + 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, + 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, + 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, + 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, + 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, + 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, + 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, + 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, + 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, + 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, + 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, + 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, + 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, + 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, + 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, + 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, + 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, + 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, + 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, + 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, + 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, + 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, + 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, + 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, + 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, + 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, + 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, + 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, + 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, + 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, + 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, + 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, + 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, + 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, + 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, + 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, + 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, + 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, + 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, + 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, + 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, + 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, + 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, + 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, + 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, + 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, + 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, + 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, + 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, + 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, + 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, + 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, + 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, + 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, + 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, + 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, + 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, + 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, + 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, + 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, + 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, + 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, + 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, + 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, + 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, + 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, + 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, + 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, + 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, + 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, + 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, + 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, + 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, + 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, + 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, + 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, + 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, + 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, + 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, + 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, + 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, + 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, + 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, + 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, + 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, + 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, + 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, + 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, + 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, + 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, + 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, + 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, + 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, + 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, + 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, + 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, + 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, + 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, + 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, + 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, + 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, + 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, + 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, + 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, + 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, + 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, + 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, + 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, + 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, + 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, + 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, + 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, + 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, + 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, + 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, + 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, + 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, + 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, + 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, + 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, + 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, + 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, + 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, + 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, + 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, + 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, + 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, + 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, + 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, + 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, + 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, + 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, + 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, + 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, + 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, + 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, + 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, +100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, +100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, +100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, +100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, +100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, +100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, +100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, +100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, +101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, +101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, +101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, +101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, +101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, +101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, +101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, +101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, +101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, +102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, +102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, +102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, +102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, +102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, +102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, +102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, +102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, +102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, +103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, +103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, +103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, +103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, +103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, +103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, +103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, +103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, +104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, +104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, +104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, +104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, +104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, +104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, +) diff --git a/env/Lib/site-packages/Crypto/Util/number.pyi b/env/Lib/site-packages/Crypto/Util/number.pyi new file mode 100644 index 0000000..f8680bf --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/number.pyi @@ -0,0 +1,19 @@ +from typing import List, Optional, Callable + + +def ceil_div(n: int, d: int) -> int: ... +def size (N: int) -> int: ... +def getRandomInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... +def getRandomRange(a: int, b: int, randfunc: Optional[Callable]=None) -> int: ... +def getRandomNBitInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... +def GCD(x: int,y: int) -> int: ... +def inverse(u: int, v: int) -> int: ... +def getPrime(N: int, randfunc: Optional[Callable]=None) -> int: ... +def getStrongPrime(N: int, e: Optional[int]=0, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> int: ... +def isPrime(N: int, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> bool: ... +def long_to_bytes(n: int, blocksize: Optional[int]=0) -> bytes: ... +def bytes_to_long(s: bytes) -> int: ... +def long2str(n: int, blocksize: Optional[int]=0) -> bytes: ... +def str2long(s: bytes) -> int: ... + +sieve_base: List[int] diff --git a/env/Lib/site-packages/Crypto/Util/py3compat.py b/env/Lib/site-packages/Crypto/Util/py3compat.py new file mode 100644 index 0000000..cf9bb92 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/py3compat.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x +# +# Written in 2010 by Thorsten Behrens +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k + +In Python 2.x, strings (of type ''str'') contain binary data, including encoded +Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. +Unicode literals are specified via the u'...' prefix. Indexing or slicing +either type always produces a string of the same type as the original. +Data read from a file is always of '''str'' type. + +In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' +prefix and the ''unicode'' type are now redundant. A new type (called +''bytes'') has to be used for binary data (including any particular +''encoding'' of a string). The b'...' prefix allows one to specify a binary +literal. Indexing or slicing a string produces another string. Slicing a byte +string produces another byte string, but the indexing operation produces an +integer. Data read from a file is of '''str'' type if the file was opened in +text mode, or of ''bytes'' type otherwise. + +Since PyCrypto aims at supporting both Python 2.x and 3.x, the following helper +functions are used to keep the rest of the library as independent as possible +from the actual Python version. + +In general, the code should always deal with binary strings, and use integers +instead of 1-byte character strings. + +b(s) + Take a text string literal (with no prefix or with u'...' prefix) and + make a byte string. +bchr(c) + Take an integer and make a 1-character byte string. +bord(c) + Take the result of indexing on a byte string and make an integer. +tobytes(s) + Take a text string, a byte string, or a sequence of character taken from + a byte string and make a byte string. +""" + +import sys +import abc + + +if sys.version_info[0] == 2: + def b(s): + return s + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + def tobytes(s, encoding="latin-1"): + if isinstance(s, unicode): + return s.encode(encoding) + elif isinstance(s, str): + return s + elif isinstance(s, bytearray): + return bytes(s) + else: + return ''.join(s) + def tostr(bs): + return bs + def byte_string(s): + return isinstance(s, str) + + # In Pyton 2.x, StringIO is a stand-alone module + from StringIO import StringIO as BytesIO + + from sys import maxint + + iter_range = xrange + + def is_native_int(x): + return isinstance(x, (int, long)) + + def is_string(x): + return isinstance(x, basestring) + + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) + + FileNotFoundError = IOError + +else: + def b(s): + return s.encode("latin-1") # utf-8 would cause some side-effects we don't want + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s,str): + return bytes(s,"latin-1") + else: + return bytes(s) + def bord(s): + return s + def tobytes(s, encoding="latin-1"): + if isinstance(s, bytes): + return s + elif isinstance(s, bytearray): + return bytes(s) + elif isinstance(s,str): + return s.encode(encoding) + else: + return bytes([s]) + def tostr(bs): + return bs.decode("latin-1") + def byte_string(s): + return isinstance(s, bytes) + + # In Python 3.x, StringIO is a sub-module of io + from io import BytesIO + from sys import maxsize as maxint + + iter_range = range + + def is_native_int(x): + return isinstance(x, int) + + def is_string(x): + return isinstance(x, str) + + from abc import ABC + + FileNotFoundError = FileNotFoundError + + +def _copy_bytes(start, end, seq): + """Return an immutable copy of a sequence (byte string, byte array, memoryview) + in a certain interval [start:seq]""" + + if isinstance(seq, memoryview): + return seq[start:end].tobytes() + elif isinstance(seq, bytearray): + return bytes(seq[start:end]) + else: + return seq[start:end] + +del sys +del abc diff --git a/env/Lib/site-packages/Crypto/Util/py3compat.pyi b/env/Lib/site-packages/Crypto/Util/py3compat.pyi new file mode 100644 index 0000000..52de77f --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/py3compat.pyi @@ -0,0 +1,31 @@ +from typing import Union, Any, Optional, IO + +Buffer = Union[bytes, bytearray, memoryview] + +import sys + +def b(s: str) -> bytes: ... +def bchr(s: int) -> bytes: ... +def bord(s: bytes) -> int: ... +def tobytes(s: Union[bytes, str]) -> bytes: ... +def tostr(b: bytes) -> str: ... +def bytestring(x: Any) -> bool: ... + +def is_native_int(s: Any) -> bool: ... +def is_string(x: Any) -> bool: ... + +def BytesIO(b: bytes) -> IO[bytes]: ... + +if sys.version_info[0] == 2: + from sys import maxint + iter_range = xrange + +else: + from sys import maxsize as maxint + iter_range = range + +class FileNotFoundError: + def __init__(self, err: int, msg: str, filename: str) -> None: + pass + +def _copy_bytes(start: Optional[int], end: Optional[int], seq: Buffer) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/Util/strxor.py b/env/Lib/site-packages/Crypto/Util/strxor.py new file mode 100644 index 0000000..2bff250 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/strxor.py @@ -0,0 +1,137 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, c_size_t, + create_string_buffer, get_raw_buffer, + c_uint8_ptr, is_writeable_buffer) + +_raw_strxor = load_pycryptodome_raw_lib("Crypto.Util._strxor", + """ + void strxor(const uint8_t *in1, + const uint8_t *in2, + uint8_t *out, size_t len); + void strxor_c(const uint8_t *in, + uint8_t c, + uint8_t *out, + size_t len); + """) + + +def strxor(term1, term2, output=None): + """XOR two byte strings. + + Args: + term1 (bytes/bytearray/memoryview): + The first term of the XOR operation. + term2 (bytes/bytearray/memoryview): + The second term of the XOR operation. + output (bytearray/memoryview): + The location where the result must be written to. + If ``None``, the result is returned. + :Return: + If ``output`` is ``None``, a new ``bytes`` string with the result. + Otherwise ``None``. + """ + + if len(term1) != len(term2): + raise ValueError("Only byte strings of equal length can be xored") + + if output is None: + result = create_string_buffer(len(term1)) + else: + # Note: output may overlap with either input + result = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(term1) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(term1)) + + _raw_strxor.strxor(c_uint8_ptr(term1), + c_uint8_ptr(term2), + c_uint8_ptr(result), + c_size_t(len(term1))) + + if output is None: + return get_raw_buffer(result) + else: + return None + + +def strxor_c(term, c, output=None): + """XOR a byte string with a repeated sequence of characters. + + Args: + term(bytes/bytearray/memoryview): + The first term of the XOR operation. + c (bytes): + The byte that makes up the second term of the XOR operation. + output (None or bytearray/memoryview): + If not ``None``, the location where the result is stored into. + + Return: + If ``output`` is ``None``, a new ``bytes`` string with the result. + Otherwise ``None``. + """ + + if not 0 <= c < 256: + raise ValueError("c must be in range(256)") + + if output is None: + result = create_string_buffer(len(term)) + else: + # Note: output may overlap with either input + result = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(term) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(term)) + + _raw_strxor.strxor_c(c_uint8_ptr(term), + c, + c_uint8_ptr(result), + c_size_t(len(term)) + ) + + if output is None: + return get_raw_buffer(result) + else: + return None + + +def _strxor_direct(term1, term2, result): + """Very fast XOR - check conditions!""" + _raw_strxor.strxor(term1, term2, result, c_size_t(len(term1))) + diff --git a/env/Lib/site-packages/Crypto/Util/strxor.pyi b/env/Lib/site-packages/Crypto/Util/strxor.pyi new file mode 100644 index 0000000..ca896f3 --- /dev/null +++ b/env/Lib/site-packages/Crypto/Util/strxor.pyi @@ -0,0 +1,6 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +def strxor(term1: bytes, term2: bytes, output: Optional[Buffer]=...) -> bytes: ... +def strxor_c(term: bytes, c: int, output: Optional[Buffer]=...) -> bytes: ... diff --git a/env/Lib/site-packages/Crypto/__init__.py b/env/Lib/site-packages/Crypto/__init__.py new file mode 100644 index 0000000..e7713fe --- /dev/null +++ b/env/Lib/site-packages/Crypto/__init__.py @@ -0,0 +1,6 @@ +__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature', + 'IO', 'Math'] + +version_info = (3, 10, 1) + +__version__ = ".".join([str(x) for x in version_info]) diff --git a/env/Lib/site-packages/Crypto/__init__.pyi b/env/Lib/site-packages/Crypto/__init__.pyi new file mode 100644 index 0000000..bc73446 --- /dev/null +++ b/env/Lib/site-packages/Crypto/__init__.pyi @@ -0,0 +1,4 @@ +from typing import Tuple, Union + +version_info : Tuple[int, int, Union[int, str]] +__version__ : str diff --git a/env/Lib/site-packages/Crypto/py.typed b/env/Lib/site-packages/Crypto/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/INSTALLER b/env/Lib/site-packages/certifi-2021.5.30.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/LICENSE b/env/Lib/site-packages/certifi-2021.5.30.dist-info/LICENSE new file mode 100644 index 0000000..c2fda9a --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/METADATA b/env/Lib/site-packages/certifi-2021.5.30.dist-info/METADATA new file mode 100644 index 0000000..df1cc0e --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2021.5.30 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://certifiio.readthedocs.io/en/latest/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/ +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/ +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/RECORD b/env/Lib/site-packages/certifi-2021.5.30.dist-info/RECORD new file mode 100644 index 0000000..fcdc940 --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/RECORD @@ -0,0 +1,13 @@ +certifi-2021.5.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2021.5.30.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049 +certifi-2021.5.30.dist-info/METADATA,sha256=RDzuah_IZxjVhKootR1Ha1BrDovPSA-xF-rcaD90PTo,2994 +certifi-2021.5.30.dist-info/RECORD,, +certifi-2021.5.30.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +certifi-2021.5.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=-b78tXibbl0qtgCzv9tc9v6ozwcNX915lT9Tf4a9lds,62 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-39.pyc,, +certifi/__pycache__/__main__.cpython-39.pyc,, +certifi/__pycache__/core.cpython-39.pyc,, +certifi/cacert.pem,sha256=3i-hfE2K5o3CBKG2tYt6ehJWk2fP64o6Th83fHPoPp4,259465 +certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303 diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/WHEEL b/env/Lib/site-packages/certifi-2021.5.30.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/Lib/site-packages/certifi-2021.5.30.dist-info/top_level.txt b/env/Lib/site-packages/certifi-2021.5.30.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/env/Lib/site-packages/certifi-2021.5.30.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/env/Lib/site-packages/certifi/__init__.py b/env/Lib/site-packages/certifi/__init__.py new file mode 100644 index 0000000..eebdf88 --- /dev/null +++ b/env/Lib/site-packages/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2021.05.30" diff --git a/env/Lib/site-packages/certifi/__main__.py b/env/Lib/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/env/Lib/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/env/Lib/site-packages/certifi/cacert.pem b/env/Lib/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..96e2fc6 --- /dev/null +++ b/env/Lib/site-packages/certifi/cacert.pem @@ -0,0 +1,4257 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- diff --git a/env/Lib/site-packages/certifi/core.py b/env/Lib/site-packages/certifi/core.py new file mode 100644 index 0000000..5d2b8cd --- /dev/null +++ b/env/Lib/site-packages/certifi/core.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + +try: + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/env/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c0f044d --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,70 @@ +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER b/env/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/METADATA b/env/Lib/site-packages/chardet-3.0.4.dist-info/METADATA new file mode 100644 index 0000000..1427867 --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.0 +Name: chardet +Version: 3.0.4 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Daniel Blanchard +Author-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/RECORD b/env/Lib/site-packages/chardet-3.0.4.dist-info/RECORD new file mode 100644 index 0000000..bc818ca --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/RECORD @@ -0,0 +1,91 @@ +../../Scripts/chardetect.exe,sha256=iwXSyeKzGyT2h9aky7y0DqctXC_-kjDkVrguAaRuSVU,106361 +chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 +chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 +chardet-3.0.4.dist-info/RECORD,, +chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 +chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +chardet/__pycache__/__init__.cpython-39.pyc,, +chardet/__pycache__/big5freq.cpython-39.pyc,, +chardet/__pycache__/big5prober.cpython-39.pyc,, +chardet/__pycache__/chardistribution.cpython-39.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-39.pyc,, +chardet/__pycache__/charsetprober.cpython-39.pyc,, +chardet/__pycache__/codingstatemachine.cpython-39.pyc,, +chardet/__pycache__/compat.cpython-39.pyc,, +chardet/__pycache__/cp949prober.cpython-39.pyc,, +chardet/__pycache__/enums.cpython-39.pyc,, +chardet/__pycache__/escprober.cpython-39.pyc,, +chardet/__pycache__/escsm.cpython-39.pyc,, +chardet/__pycache__/eucjpprober.cpython-39.pyc,, +chardet/__pycache__/euckrfreq.cpython-39.pyc,, +chardet/__pycache__/euckrprober.cpython-39.pyc,, +chardet/__pycache__/euctwfreq.cpython-39.pyc,, +chardet/__pycache__/euctwprober.cpython-39.pyc,, +chardet/__pycache__/gb2312freq.cpython-39.pyc,, +chardet/__pycache__/gb2312prober.cpython-39.pyc,, +chardet/__pycache__/hebrewprober.cpython-39.pyc,, +chardet/__pycache__/jisfreq.cpython-39.pyc,, +chardet/__pycache__/jpcntx.cpython-39.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-39.pyc,, +chardet/__pycache__/langcyrillicmodel.cpython-39.pyc,, +chardet/__pycache__/langgreekmodel.cpython-39.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-39.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-39.pyc,, +chardet/__pycache__/langthaimodel.cpython-39.pyc,, +chardet/__pycache__/langturkishmodel.cpython-39.pyc,, +chardet/__pycache__/latin1prober.cpython-39.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-39.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-39.pyc,, +chardet/__pycache__/mbcssm.cpython-39.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-39.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-39.pyc,, +chardet/__pycache__/sjisprober.cpython-39.pyc,, +chardet/__pycache__/universaldetector.cpython-39.pyc,, +chardet/__pycache__/utf8prober.cpython-39.pyc,, +chardet/__pycache__/version.cpython-39.pyc,, +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/__pycache__/__init__.cpython-39.pyc,, +chardet/cli/__pycache__/chardetect.cpython-39.pyc,, +chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL b/env/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/env/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json b/env/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json new file mode 100644 index 0000000..8cdf025 --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/env/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt b/env/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/env/Lib/site-packages/chardet-3.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/env/Lib/site-packages/chardet/__init__.py b/env/Lib/site-packages/chardet/__init__.py new file mode 100644 index 0000000..0f9f820 --- /dev/null +++ b/env/Lib/site-packages/chardet/__init__.py @@ -0,0 +1,39 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .compat import PY2, PY3 +from .universaldetector import UniversalDetector +from .version import __version__, VERSION + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{0}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() diff --git a/env/Lib/site-packages/chardet/big5freq.py b/env/Lib/site-packages/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/env/Lib/site-packages/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/env/Lib/site-packages/chardet/big5prober.py b/env/Lib/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/env/Lib/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/env/Lib/site-packages/chardet/chardistribution.py b/env/Lib/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/env/Lib/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/env/Lib/site-packages/chardet/charsetgroupprober.py b/env/Lib/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/env/Lib/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/env/Lib/site-packages/chardet/charsetprober.py b/env/Lib/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/env/Lib/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/env/Lib/site-packages/chardet/cli/__init__.py b/env/Lib/site-packages/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/Lib/site-packages/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/env/Lib/site-packages/chardet/cli/chardetect.py b/env/Lib/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/env/Lib/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/env/Lib/site-packages/chardet/codingstatemachine.py b/env/Lib/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/env/Lib/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/env/Lib/site-packages/chardet/compat.py b/env/Lib/site-packages/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/env/Lib/site-packages/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/env/Lib/site-packages/chardet/cp949prober.py b/env/Lib/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/env/Lib/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/env/Lib/site-packages/chardet/enums.py b/env/Lib/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/env/Lib/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/env/Lib/site-packages/chardet/escprober.py b/env/Lib/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/env/Lib/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/env/Lib/site-packages/chardet/escsm.py b/env/Lib/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/env/Lib/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/env/Lib/site-packages/chardet/eucjpprober.py b/env/Lib/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/env/Lib/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/Lib/site-packages/chardet/euckrfreq.py b/env/Lib/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/env/Lib/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/env/Lib/site-packages/chardet/euckrprober.py b/env/Lib/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/env/Lib/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/env/Lib/site-packages/chardet/euctwfreq.py b/env/Lib/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/env/Lib/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/env/Lib/site-packages/chardet/euctwprober.py b/env/Lib/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/env/Lib/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/env/Lib/site-packages/chardet/gb2312freq.py b/env/Lib/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/env/Lib/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/env/Lib/site-packages/chardet/gb2312prober.py b/env/Lib/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/env/Lib/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/env/Lib/site-packages/chardet/hebrewprober.py b/env/Lib/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/env/Lib/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/env/Lib/site-packages/chardet/jisfreq.py b/env/Lib/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/env/Lib/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/env/Lib/site-packages/chardet/jpcntx.py b/env/Lib/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/env/Lib/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/env/Lib/site-packages/chardet/langbulgarianmodel.py b/env/Lib/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/env/Lib/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/env/Lib/site-packages/chardet/langcyrillicmodel.py b/env/Lib/site-packages/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/env/Lib/site-packages/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/env/Lib/site-packages/chardet/langgreekmodel.py b/env/Lib/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/env/Lib/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/env/Lib/site-packages/chardet/langhebrewmodel.py b/env/Lib/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/env/Lib/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/env/Lib/site-packages/chardet/langhungarianmodel.py b/env/Lib/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/env/Lib/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/env/Lib/site-packages/chardet/langthaimodel.py b/env/Lib/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/env/Lib/site-packages/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/env/Lib/site-packages/chardet/langturkishmodel.py b/env/Lib/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/env/Lib/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/env/Lib/site-packages/chardet/latin1prober.py b/env/Lib/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/env/Lib/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/env/Lib/site-packages/chardet/mbcharsetprober.py b/env/Lib/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/env/Lib/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/env/Lib/site-packages/chardet/mbcsgroupprober.py b/env/Lib/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/env/Lib/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/env/Lib/site-packages/chardet/mbcssm.py b/env/Lib/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/env/Lib/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/env/Lib/site-packages/chardet/sbcharsetprober.py b/env/Lib/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/env/Lib/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/env/Lib/site-packages/chardet/sbcsgroupprober.py b/env/Lib/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/env/Lib/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/env/Lib/site-packages/chardet/sjisprober.py b/env/Lib/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/env/Lib/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/Lib/site-packages/chardet/universaldetector.py b/env/Lib/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/env/Lib/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/env/Lib/site-packages/chardet/utf8prober.py b/env/Lib/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/env/Lib/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/env/Lib/site-packages/chardet/version.py b/env/Lib/site-packages/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/env/Lib/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/env/Lib/site-packages/easy_install.py b/env/Lib/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/env/Lib/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/INSTALLER b/env/Lib/site-packages/gcloud-0.17.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/Lib/site-packages/gcloud-0.17.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/METADATA b/env/Lib/site-packages/gcloud-0.17.0.dist-info/METADATA new file mode 100644 index 0000000..279971d --- /dev/null +++ b/env/Lib/site-packages/gcloud-0.17.0.dist-info/METADATA @@ -0,0 +1,327 @@ +Metadata-Version: 2.1 +Name: gcloud +Version: 0.17.0 +Summary: API Client library for Google Cloud +Home-page: https://github.com/GoogleCloudPlatform/gcloud-python +Author: Google Cloud Platform +Author-email: jjg+gcloud-python@google.com +License: Apache 2.0 +Platform: Posix; MacOS X; Windows +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet +Requires-Dist: httplib2 (>=0.9.1) +Requires-Dist: googleapis-common-protos +Requires-Dist: oauth2client (>=2.0.1) +Requires-Dist: protobuf (!=3.0.0.b2.post1,>=3.0.0b2) +Requires-Dist: six +Provides-Extra: grpc +Requires-Dist: grpcio (>=0.14.0) ; extra == 'grpc' +Requires-Dist: google-gax (>=0.12.1) ; extra == 'grpc' +Requires-Dist: gax-google-pubsub-v1 (>=0.7.10) ; extra == 'grpc' +Requires-Dist: gax-google-logging-v2 (>=0.7.10) ; extra == 'grpc' + +Google Cloud Python Client +========================== + + Python idiomatic client for `Google Cloud Platform`_ services. + +.. _Google Cloud Platform: https://cloud.google.com/ + +|pypi| |build| |coverage| |versions| + +- `Homepage`_ +- `API Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/gcloud-python/ +.. _API Documentation: http://googlecloudplatform.github.io/gcloud-python/stable/ + +This client supports the following Google Cloud Platform services: + +- `Google Cloud Datastore`_ +- `Google Cloud Storage`_ +- `Google Cloud Pub/Sub`_ +- `Google BigQuery`_ +- `Google Cloud Resource Manager`_ +- `Google Cloud Logging`_ + +.. _Google Cloud Datastore: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-datastore +.. _Google Cloud Storage: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-storage +.. _Google Cloud Pub/Sub: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-pubsub +.. _Google BigQuery: https://github.com/GoogleCloudPlatform/gcloud-python#google-bigquery +.. _Google Cloud Resource Manager: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-resource-manager +.. _Google Cloud Logging: https://github.com/GoogleCloudPlatform/gcloud-python#google-cloud-logging + +If you need support for other Google APIs, check out the +`Google APIs Python Client library`_. + +.. _Google APIs Python Client library: https://github.com/google/google-api-python-client + +Quick Start +----------- + +:: + + $ pip install --upgrade gcloud + +Example Applications +-------------------- + +- `getting-started-python`_ - A sample and `tutorial`_ that demonstrates how to build a complete web application using Cloud Datastore, Cloud Storage, and Cloud Pub/Sub and deploy it to Google App Engine or Google Compute Engine. +- `gcloud-python-expenses-demo`_ - A sample expenses demo using Cloud Datastore and Cloud Storage + +.. _getting-started-python: https://github.com/GoogleCloudPlatform/getting-started-python +.. _tutorial: https://cloud.google.com/python +.. _gcloud-python-expenses-demo: https://github.com/GoogleCloudPlatform/gcloud-python-expenses-demo + +Authentication +-------------- + +With ``gcloud-python`` we try to make authentication as painless as possible. +Check out the `Authentication section`_ in our documentation to learn more. +You may also find the `authentication document`_ shared by all the ``gcloud-*`` +libraries to be helpful. + +.. _Authentication section: http://gcloud-python.readthedocs.org/en/latest/gcloud-auth.html +.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication + +Google Cloud Datastore +---------------------- + +Google `Cloud Datastore`_ (`Datastore API docs`_) is a fully managed, schemaless +database for storing non-relational data. Cloud Datastore automatically scales +with your users and supports ACID transactions, high availability of reads and +writes, strong consistency for reads and ancestor queries, and eventual +consistency for all other queries. + +.. _Cloud Datastore: https://cloud.google.com/datastore/docs +.. _Datastore API docs: https://cloud.google.com/datastore/docs/apis/v1beta3/ + +See the ``gcloud-python`` API `datastore documentation`_ to learn how to +interact with the Cloud Datastore using this Client Library. + +.. _datastore documentation: https://googlecloudplatform.github.io/gcloud-python/stable/datastore-client.html + +See the `official Google Cloud Datastore documentation`_ for more details on how +to activate Cloud Datastore for your project. + +.. _official Google Cloud Datastore documentation: https://cloud.google.com/datastore/docs/activate + +.. code:: python + + from gcloud import datastore + # Create, populate and persist an entity + entity = datastore.Entity(key=datastore.Key('EntityKind')) + entity.update({ + 'foo': u'bar', + 'baz': 1337, + 'qux': False, + }) + # Then query for entities + query = datastore.Query(kind='EntityKind') + for result in query.fetch(): + print result + +Google Cloud Storage +-------------------- + +Google `Cloud Storage`_ (`Storage API docs`_) allows you to store data on Google +infrastructure with very high reliability, performance and availability, and can +be used to distribute large data objects to users via direct download. + +.. _Cloud Storage: https://cloud.google.com/storage/docs +.. _Storage API docs: https://cloud.google.com/storage/docs/json_api/v1 + +See the ``gcloud-python`` API `storage documentation`_ to learn how to connect +to Cloud Storage using this Client Library. + +.. _storage documentation: https://googlecloudplatform.github.io/gcloud-python/stable/storage-client.html + +You need to create a Google Cloud Storage bucket to use this client library. +Follow along with the `official Google Cloud Storage documentation`_ to learn +how to create a bucket. + +.. _official Google Cloud Storage documentation: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets + +.. code:: python + + from gcloud import storage + client = storage.Client() + bucket = client.get_bucket('bucket-id-here') + # Then do other things... + blob = bucket.get_blob('remote/path/to/file.txt') + print blob.download_as_string() + blob.upload_from_string('New contents!') + blob2 = bucket.blob('remote/path/storage.txt') + blob2.upload_from_filename(filename='/local/path.txt') + +Google Cloud Pub/Sub +-------------------- + +Google `Cloud Pub/Sub`_ (`Pub/Sub API docs`_) is designed to provide reliable, +many-to-many, asynchronous messaging between applications. Publisher +applications can send messages to a ``topic`` and other applications can +subscribe to that topic to receive the messages. By decoupling senders and +receivers, Google Cloud Pub/Sub allows developers to communicate between +independently written applications. + +.. _Cloud Pub/Sub: https://cloud.google.com/pubsub/docs +.. _Pub/Sub API docs: https://cloud.google.com/pubsub/reference/rest/ + +See the ``gcloud-python`` API `Pub/Sub documentation`_ to learn how to connect +to Cloud Pub/Sub using this Client Library. + +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/gcloud-python/stable/pubsub-usage.html + +To get started with this API, you'll need to create + +.. code:: python + + from gcloud import pubsub + + client = pubsub.Client() + topic = client.topic('topic_name') + topic.create() + + topic.publish('this is the message_payload', + attr1='value1', attr2='value2') + +Google BigQuery +--------------- + +Querying massive datasets can be time consuming and expensive without the +right hardware and infrastructure. Google `BigQuery`_ (`BigQuery API docs`_) +solves this problem by enabling super-fast, SQL-like queries against +append-only tables, using the processing power of Google's infrastructure. + +.. _BigQuery: https://cloud.google.com/bigquery/what-is-bigquery +.. _BigQuery API docs: https://cloud.google.com/bigquery/docs/reference/v2/ + +This package is still being implemented, but it is almost complete! + +Load data from CSV +~~~~~~~~~~~~~~~~~~ + +.. code:: python + + import csv + + from gcloud import bigquery + from gcloud.bigquery import SchemaField + + client = bigquery.Client() + + dataset = client.dataset('dataset_name') + dataset.create() # API request + + SCHEMA = [ + SchemaField('full_name', 'STRING', mode='required'), + SchemaField('age', 'INTEGER', mode='required'), + ] + table = dataset.table('table_name', SCHEMA) + table.create() + + with open('csv_file', 'rb') as readable: + table.upload_from_file( + readable, source_format='CSV', skip_leading_rows=1) + +Perform a synchronous query +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code:: python + + # Perform a synchronous query. + QUERY = ( + 'SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] ' + 'WHERE state = "TX"') + query = client.run_sync_query('%s LIMIT 100' % QUERY) + query.timeout_ms = TIMEOUT_MS + query.run() + + for row in query.rows: + print row + + +See the ``gcloud-python`` API `BigQuery documentation`_ to learn how to connect +to BigQuery using this Client Library. + +.. _BigQuery documentation: https://googlecloudplatform.github.io/gcloud-python/stable/bigquery-usage.html + +Google Cloud Resource Manager +----------------------------- + +The Cloud `Resource Manager`_ API (`Resource Manager API docs`_) provides +methods that you can use to programmatically manage your projects in the +Google Cloud Platform. + +.. _Resource Manager: https://cloud.google.com/resource-manager/ +.. _Resource Manager API docs: https://cloud.google.com/resource-manager/reference/rest/ + +See the ``gcloud-python`` API `Resource Manager documentation`_ to learn how to +manage projects using this Client Library. + +.. _Resource Manager documentation: https://googlecloudplatform.github.io/gcloud-python/stable/resource-manager-api.html + +Google Cloud Logging +-------------------- + +`Stackdriver Logging`_ API (`Logging API docs`_) allows you to store, search, +analyze, monitor, and alert on log data and events from Google Cloud Platform. + +.. _Stackdriver Logging: https://cloud.google.com/logging/ +.. _Logging API docs: https://cloud.google.com/logging/docs/ + +.. code:: python + + from gcloud import logging + client = logging.Client() + logger = client.logger('log_name') + logger.log_text("A simple entry") # API call + +Example of fetching entries: + +.. code:: python + + entries, token = logger.list_entries() + for entry in entries: + print entry.payload + +See the ``gcloud-python`` API `logging documentation`_ to learn how to connect +to Cloud logging using this Client Library. + +.. _logging documentation: https://googlecloudplatform.github.io/gcloud-python/stable/logging-usage.html + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See `CONTRIBUTING`_ for more information on how to get started. + +.. _CONTRIBUTING: https://github.com/GoogleCloudPlatform/gcloud-python/blob/master/CONTRIBUTING.rst + +License +------- + +Apache 2.0 - See `LICENSE`_ for more information. + +.. _LICENSE: https://github.com/GoogleCloudPlatform/gcloud-python/blob/master/LICENSE + +.. |build| image:: https://travis-ci.org/GoogleCloudPlatform/gcloud-python.svg?branch=master + :target: https://travis-ci.org/GoogleCloudPlatform/gcloud-python +.. |coverage| image:: https://coveralls.io/repos/GoogleCloudPlatform/gcloud-python/badge.png?branch=master + :target: https://coveralls.io/r/GoogleCloudPlatform/gcloud-python?branch=master +.. |pypi| image:: https://img.shields.io/pypi/v/gcloud.svg + :target: https://pypi.python.org/pypi/gcloud +.. |versions| image:: https://img.shields.io/pypi/pyversions/gcloud.svg + :target: https://pypi.python.org/pypi/gcloud + + diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/RECORD b/env/Lib/site-packages/gcloud-0.17.0.dist-info/RECORD new file mode 100644 index 0000000..7f9a8f0 --- /dev/null +++ b/env/Lib/site-packages/gcloud-0.17.0.dist-info/RECORD @@ -0,0 +1,425 @@ +gcloud-0.17.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +gcloud-0.17.0.dist-info/METADATA,sha256=_XOiD0QLDi1YuJSDDGf75uTQftPIyQVmG1Jfwl1i1Yw,11958 +gcloud-0.17.0.dist-info/RECORD,, +gcloud-0.17.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gcloud-0.17.0.dist-info/WHEEL,sha256=S6zePDbUAjzMmpYOg2cHDxuYFWw7WiOXt6ogM6hIB5Q,92 +gcloud-0.17.0.dist-info/top_level.txt,sha256=9c97nNeK4FgiT4lthMrjFbOCwQK5jHQT-GvZ2xCLqxo,7 +gcloud/__init__.py,sha256=asXjSYzy1Q3TYaIws0MI9pYyiDT7ms-J62CF-RUJN7A,736 +gcloud/__pycache__/__init__.cpython-39.pyc,, +gcloud/__pycache__/_helpers.cpython-39.pyc,, +gcloud/__pycache__/_testing.cpython-39.pyc,, +gcloud/__pycache__/client.cpython-39.pyc,, +gcloud/__pycache__/connection.cpython-39.pyc,, +gcloud/__pycache__/credentials.cpython-39.pyc,, +gcloud/__pycache__/environment_vars.cpython-39.pyc,, +gcloud/__pycache__/exceptions.cpython-39.pyc,, +gcloud/__pycache__/iterator.cpython-39.pyc,, +gcloud/__pycache__/test__helpers.cpython-39.pyc,, +gcloud/__pycache__/test_client.cpython-39.pyc,, +gcloud/__pycache__/test_connection.cpython-39.pyc,, +gcloud/__pycache__/test_credentials.cpython-39.pyc,, +gcloud/__pycache__/test_exceptions.cpython-39.pyc,, +gcloud/__pycache__/test_iterator.cpython-39.pyc,, +gcloud/_helpers.py,sha256=2Hu21FRPCbKFSreXmzJNIHxLBbV6V29Xb6ZSHf8FWLg,17401 +gcloud/_testing.py,sha256=QWg58bp7ZRF4Hqmfa6_pntVEr_eJ92zorbTaioZHRxg,1851 +gcloud/bigquery/__init__.py,sha256=CG_rJkmWfyH0fMUF1mTQCL3h5WMv_gTxS1y4cP1G81c,1127 +gcloud/bigquery/__pycache__/__init__.cpython-39.pyc,, +gcloud/bigquery/__pycache__/_helpers.cpython-39.pyc,, +gcloud/bigquery/__pycache__/client.cpython-39.pyc,, +gcloud/bigquery/__pycache__/connection.cpython-39.pyc,, +gcloud/bigquery/__pycache__/dataset.cpython-39.pyc,, +gcloud/bigquery/__pycache__/job.cpython-39.pyc,, +gcloud/bigquery/__pycache__/query.cpython-39.pyc,, +gcloud/bigquery/__pycache__/table.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test__helpers.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_client.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_connection.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_dataset.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_job.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_query.cpython-39.pyc,, +gcloud/bigquery/__pycache__/test_table.cpython-39.pyc,, +gcloud/bigquery/_helpers.py,sha256=ATyxqTmB0LW6b6elzu46euh8z5OVDeQ3uNmYWYR-ft8,5275 +gcloud/bigquery/client.py,sha256=yKW7tSPrfj1_qwWvbtVgVOe-YFHfrjVToFQ_zePNE9M,10779 +gcloud/bigquery/connection.py,sha256=wqaGaVnPEwge5JzpvF26nzX0vWScVQKoOR2CA_-kmXA,1341 +gcloud/bigquery/dataset.py,sha256=L8L5w3DP-cEoaja4Q_AfAndsCChaHoJ3i8S4IGK9c9A,20672 +gcloud/bigquery/job.py,sha256=mdwGAZIyYe153mZWiXN2y6Yeu4nL8nXhAhyjK_vAcww,36226 +gcloud/bigquery/query.py,sha256=rHX7RgRJBSd153BR5PI-Enfj3S8Pgi-IVty4eGHroxA,11966 +gcloud/bigquery/table.py,sha256=5pwRVfvaB9JfjVeMrouU-W_eDKKTyMVRC-6teWTWnyc,37063 +gcloud/bigquery/test__helpers.py,sha256=B2U4YwYDP9W8zTVQQ0paKh___2xnIqe_ulJjobHZ07o,3318 +gcloud/bigquery/test_client.py,sha256=MTKJc5UFb-GQnz6mASnrIL55IzQ5hd1gp1K-6n7Aw_Q,15788 +gcloud/bigquery/test_connection.py,sha256=8YlpCC5NUKNScUJ1Y_QfEfI6FVDZwPXkECSYu5zvAhg,1715 +gcloud/bigquery/test_dataset.py,sha256=F41EczrNh0V3hQkC0J2_ENhgWwh2VD4KGGOPGc48XU0,31243 +gcloud/bigquery/test_job.py,sha256=vS4tDaaGZfDIDC5gkeHIyex1j10XwAKUB9S5VNwEtN0,59767 +gcloud/bigquery/test_query.py,sha256=ZgbDl2GTZos--szNrO_r80IMzUGQjvsnutTfas0gAvc,12294 +gcloud/bigquery/test_table.py,sha256=opvTLXbcbS8AdYIhdA_ltZosnSU3MjZ9nr7QfiJhWSg,69265 +gcloud/bigtable/__init__.py,sha256=L_bCkZ_iu553aJziLG3kC9IC8GHGhGfwKYhLpYXYxvo,1329 +gcloud/bigtable/__pycache__/__init__.cpython-39.pyc,, +gcloud/bigtable/__pycache__/_testing.cpython-39.pyc,, +gcloud/bigtable/__pycache__/client.cpython-39.pyc,, +gcloud/bigtable/__pycache__/cluster.cpython-39.pyc,, +gcloud/bigtable/__pycache__/column_family.cpython-39.pyc,, +gcloud/bigtable/__pycache__/instance.cpython-39.pyc,, +gcloud/bigtable/__pycache__/row.cpython-39.pyc,, +gcloud/bigtable/__pycache__/row_data.cpython-39.pyc,, +gcloud/bigtable/__pycache__/row_filters.cpython-39.pyc,, +gcloud/bigtable/__pycache__/table.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_client.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_cluster.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_column_family.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_instance.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_row.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_row_data.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_row_filters.cpython-39.pyc,, +gcloud/bigtable/__pycache__/test_table.cpython-39.pyc,, +gcloud/bigtable/_generated/__init__.py,sha256=-pTFZY3xUo1rqHrkP5Z0wkBz0UhemJfwdPC_hd_jBb0,661 +gcloud/bigtable/_generated/__pycache__/__init__.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_cluster_data_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_cluster_service_messages_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_cluster_service_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_data_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_service_messages_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_service_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_table_data_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_table_service_messages_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/bigtable_table_service_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/__pycache__/operations_grpc_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated/_bigtable_cluster_data.proto,sha256=GwWKg5T02i77Mr2q24DkQNdMCyRgbL01r9YdSzBTBbE,3009 +gcloud/bigtable/_generated/_bigtable_cluster_service.proto,sha256=KC8zbnc66MACUeeVgThFXjx32pn75E0g6elXY4aPCBQ,6795 +gcloud/bigtable/_generated/_bigtable_cluster_service_messages.proto,sha256=tExP0UIwR2dGo0Tptky-vjn9R6hQOkNjXzA_G-vGxWM,4639 +gcloud/bigtable/_generated/_bigtable_data.proto,sha256=lQKLttVuYwQ5cAW_-LEoSYcNi7Qb4Lo1Y5v_yLw0Qnw,22045 +gcloud/bigtable/_generated/_bigtable_service.proto,sha256=izWpYCUiXVWe6ZowueFIgAAQs3vBrKJt6WzffNAGCKs,3473 +gcloud/bigtable/_generated/_bigtable_service_messages.proto,sha256=NS3eFndbGr4nECfogwhvqEUHBF64PWRIApMKB3ZPRck,8324 +gcloud/bigtable/_generated/_bigtable_table_data.proto,sha256=20YFQC2JAno5WfsaRzmsAE_xhxdIuVvH7Sr9sJajecM,4769 +gcloud/bigtable/_generated/_bigtable_table_service.proto,sha256=cnFE1ipltXNoF0ELfl48ShgQZrzKTj-nVsr8cK2api4,3343 +gcloud/bigtable/_generated/_bigtable_table_service_messages.proto,sha256=4Bvk_dy0al4c-BwiEiJ8GEt09Wag6Ss0c5epznFooe8,3500 +gcloud/bigtable/_generated/_operations.proto,sha256=NRDg8XEMtPg77utZdRx72gPxsw_W94I6DE6J1y4YhaQ,5933 +gcloud/bigtable/_generated/bigtable_cluster_data_pb2.py,sha256=2hkDDoPWYamKeb7XB3pUTtu_JFPmrOrYeyGh_a4F3Y8,8921 +gcloud/bigtable/_generated/bigtable_cluster_service_messages_pb2.py,sha256=8_SBh9A4jfGgBU3Z4ELC1n9914aHJ_tWaXETupiitLY,22481 +gcloud/bigtable/_generated/bigtable_cluster_service_pb2.py,sha256=Uksy1YkH70ZcLbJ47PKGdk1ktSt_5wLCEcZ8FXSLhNU,14903 +gcloud/bigtable/_generated/bigtable_data_pb2.py,sha256=TGCoJKud02QGIzAnIZDRdnL7mbgGJIGSbY64gOUig08,52839 +gcloud/bigtable/_generated/bigtable_service_messages_pb2.py,sha256=RrNuV4v1AANrVvWolYRAeTHw9s-Et_wSOKU3wjoq4kA,28610 +gcloud/bigtable/_generated/bigtable_service_pb2.py,sha256=DfQ0pDnfK-t8yv2kPc4SUz0FO39KeWVOVoscQDNlw6U,12199 +gcloud/bigtable/_generated/bigtable_table_data_pb2.py,sha256=tSWQp8bL0J5Ot0THmC4VMwLHvWXps7Dw4v6U1bQs-BA,15493 +gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py,sha256=o9PbSOSicEflo0vmr14CZI4tKio4Lf4RxrLw4BoMhX4,15201 +gcloud/bigtable/_generated/bigtable_table_service_pb2.py,sha256=GikjtQnyh-y2BkGzoleAPbGXiE_A8lOpVsuW4OK5Qr0,15984 +gcloud/bigtable/_generated/operations_grpc_pb2.py,sha256=BwU02P4-Yh6WuHSYGN1Q2Fjx_yur0Od3MIWeocGMVSQ,5734 +gcloud/bigtable/_generated_v2/__init__.py,sha256=-pTFZY3xUo1rqHrkP5Z0wkBz0UhemJfwdPC_hd_jBb0,661 +gcloud/bigtable/_generated_v2/__pycache__/__init__.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/bigtable_instance_admin_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/bigtable_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/bigtable_table_admin_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/common_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/data_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/instance_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/operations_grpc_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/__pycache__/table_pb2.cpython-39.pyc,, +gcloud/bigtable/_generated_v2/_bigtable.proto,sha256=uxYh-FUJSj9FMMJMxo7u9qQEF1COHHVM29x0ZEH61gk,13593 +gcloud/bigtable/_generated_v2/_bigtable_instance_admin.proto,sha256=H-MEJKUFp_OXbI5SRKKcNadaQPhcslCpUbu5PIxicUI,8894 +gcloud/bigtable/_generated_v2/_bigtable_table_admin.proto,sha256=Tz2jiiiJs6fNYH4LMhf_-X2iBFccMTCpsKBIbprsTzE,7954 +gcloud/bigtable/_generated_v2/_common.proto,sha256=4vlju1S0HDW3uRVUxpWqCBhNJC1F43448A57K8kdK8g,1129 +gcloud/bigtable/_generated_v2/_data.proto,sha256=16run4bOVgCUNIoyMAyear3sqe4yHrVQRYQtlxlTJNk,22637 +gcloud/bigtable/_generated_v2/_instance.proto,sha256=Vby1sSXoPW4dmg-3wHc1sD9eLxolr45CI71YgsyqSo8,3912 +gcloud/bigtable/_generated_v2/_operations.proto,sha256=NRDg8XEMtPg77utZdRx72gPxsw_W94I6DE6J1y4YhaQ,5933 +gcloud/bigtable/_generated_v2/_table.proto,sha256=9O-IlzMdDevGgq9NKnmqDdfJcUgwZ0ibokYh_sEC_Rk,3890 +gcloud/bigtable/_generated_v2/bigtable_instance_admin_pb2.py,sha256=3hmwt-Rn7KELP_qcEMXHpYUN1CQ8DxKK8Wkd6lSkWvo,51814 +gcloud/bigtable/_generated_v2/bigtable_pb2.py,sha256=ZECIpIgnafUcIzHROztvYtr5O_X8ZD2fnUC-rf3XJGo,49225 +gcloud/bigtable/_generated_v2/bigtable_table_admin_pb2.py,sha256=fZdwzk_KNjTQjVadb4gb8HbILmNAP4MW6xflHoueImY,37797 +gcloud/bigtable/_generated_v2/common_pb2.py,sha256=6aCWL6xi1IYQOVBx0t7Yvk3AbkhoMxw-Q1_wWBAPVBo,2470 +gcloud/bigtable/_generated_v2/data_pb2.py,sha256=iZTrOKss12-qwisys-781E5as4M07uV722FnnV4OStY,54670 +gcloud/bigtable/_generated_v2/instance_pb2.py,sha256=six-M5zaZgPPTtRyyKhMGtqP_uJmYgAcP-olPAYAQfc,8554 +gcloud/bigtable/_generated_v2/operations_grpc_pb2.py,sha256=Q_SiPLdPP2rvx3lp1y2-sO62v3rOTNOxE0zCujYnb28,13771 +gcloud/bigtable/_generated_v2/table_pb2.py,sha256=Cqf6N4NvUGN_CoEhtn-ydjWhv54o0imKZF-vWvbjNGo,14893 +gcloud/bigtable/_testing.py,sha256=biD0HYEnp5koCQoaJr5ssJYyiDc99-qqTTtJ1TcJYFU,1905 +gcloud/bigtable/client.py,sha256=Ki_7Xu-GCG7LVsOXCFgjxqYuEOOTwL2ElImVRX6o4YM,17937 +gcloud/bigtable/cluster.py,sha256=HBQVBNiZr6lkFbi94Es4VR7-b8gRMAtbjGunl-eugnk,13850 +gcloud/bigtable/column_family.py,sha256=3SOCSQ4sRxVPgATb61CZelzIsSlOdJRO_GJK97wvdRU,11430 +gcloud/bigtable/happybase/__init__.py,sha256=iERFXZ7pedIvpM88ETRTCr-qe4SRT66MX9gTGFxeLdE,6628 +gcloud/bigtable/happybase/__pycache__/__init__.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/batch.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/connection.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/pool.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/table.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/test_batch.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/test_connection.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/test_pool.cpython-39.pyc,, +gcloud/bigtable/happybase/__pycache__/test_table.cpython-39.pyc,, +gcloud/bigtable/happybase/batch.py,sha256=11RisE6f2TmYgL5zud0U__qootm5sGlop-V6dmQzEZA,13154 +gcloud/bigtable/happybase/connection.py,sha256=IQfZ9anO4rkDtpDHovOGdYTOBMZqbUGHaOBMIE8WHzY,18352 +gcloud/bigtable/happybase/pool.py,sha256=fEF-9ykwQXcJsczXM1Oqyn6cQ9xZymI8eMzPGyPzKKw,5918 +gcloud/bigtable/happybase/table.py,sha256=RyZ0UfDNc7o9AWysEE_09amgTyTncUyexNyhRnCK72g,38661 +gcloud/bigtable/happybase/test_batch.py,sha256=C1hYEcBKEHNLM_VRnN-AlzDWk8fEvUkrc5pnFr7raBU,17700 +gcloud/bigtable/happybase/test_connection.py,sha256=34if7QqUYmZnCbkTSO7eTUpGVByD2IJivIuiBtsKYOs,24528 +gcloud/bigtable/happybase/test_pool.py,sha256=qjEekMADI0eLl4FuZjoQE7J8_XEosW1vSxZ0vOge2RU,9402 +gcloud/bigtable/happybase/test_table.py,sha256=0ScsIeeT-LC-Cc3C71-3DWBMvQBMYTpzUc4ShdDBR4w,51404 +gcloud/bigtable/instance.py,sha256=KCbAa8IfGkuKyjcCON71iaIy80J1ITp86b_khjlBi40,18454 +gcloud/bigtable/read-rows-acceptance-test.json,sha256=zUGgwfUBcgWO-sCWnozXUabuwJsYgo9oTlpbqYGj3JQ,36170 +gcloud/bigtable/row.py,sha256=sU7dc9TNY7maHc5Vrg-5_uI-YWZfPMxJhVAgMMmGAn4,34306 +gcloud/bigtable/row_data.py,sha256=9PZRnKro92VzuGlMGJDet6d87NuCr86lmVre_5glkZI,15592 +gcloud/bigtable/row_filters.py,sha256=bIVRm5FL00DWP12kpsTR3Q3uy8mvdX1UIBnHwSsmLm0,26718 +gcloud/bigtable/table.py,sha256=IpGgUKNOnKqPzMcRLIn1IgKa1X8S23wDJcwpx6ayXRc,15661 +gcloud/bigtable/test_client.py,sha256=0AjUr5T7SsC6lnp98gqaXuSBM5v83VW8OsKrPf712Nc,28815 +gcloud/bigtable/test_cluster.py,sha256=GqZ1GoJNT4TQ4XIHMG2hA1ifRGhYW6uJytl8V4i_sLw,22089 +gcloud/bigtable/test_column_family.py,sha256=mRHstdvOqONTsGoWpqnFk-nBAMjnwLDK7pfh7_4RmSo,23208 +gcloud/bigtable/test_instance.py,sha256=CVa6VLbugUgnq_4chJEKKpohkRMobaP8OoyKlsfChSw,31982 +gcloud/bigtable/test_row.py,sha256=4OgOtBVVIYlRRMnllLNP4JnsFOeuZEHM9vV0oPUae3c,30605 +gcloud/bigtable/test_row_data.py,sha256=JszVN1_q73s0kz-aSm_tLntXtqJmtMaegGcYh5iU_sg,25373 +gcloud/bigtable/test_row_filters.py,sha256=0SfKcKRUotzyMt8oosSXaHG1TcflC7wEfY0a54_c4l0,34722 +gcloud/bigtable/test_table.py,sha256=Mjkk_em9HY0MKy7CpunXpGt7H7zEIbIoeoRkMr3Li3g,19496 +gcloud/client.py,sha256=VamrSK4SVNYOq9jjUCtZclQT30-2o-5J1fE72xZoHxg,7713 +gcloud/connection.py,sha256=fIFgty6iIura7wzw7EJkoy3FhsvxDeu4KTPW4KQsIAE,13125 +gcloud/credentials.py,sha256=JXi3qsJg5RqMMPVjxkzqb_gCQYi67g4OBcMximcWxHw,8721 +gcloud/datastore/__init__.py,sha256=W-Xfybog50c6VrGu4IjdgSBafC-72ItWipA-Pu_vLa0,2223 +gcloud/datastore/__pycache__/__init__.cpython-39.pyc,, +gcloud/datastore/__pycache__/batch.cpython-39.pyc,, +gcloud/datastore/__pycache__/client.cpython-39.pyc,, +gcloud/datastore/__pycache__/connection.cpython-39.pyc,, +gcloud/datastore/__pycache__/entity.cpython-39.pyc,, +gcloud/datastore/__pycache__/helpers.cpython-39.pyc,, +gcloud/datastore/__pycache__/key.cpython-39.pyc,, +gcloud/datastore/__pycache__/query.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_batch.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_client.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_connection.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_entity.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_helpers.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_key.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_query.cpython-39.pyc,, +gcloud/datastore/__pycache__/test_transaction.cpython-39.pyc,, +gcloud/datastore/__pycache__/transaction.cpython-39.pyc,, +gcloud/datastore/_generated/__init__.py,sha256=Sz7qsw1-V4FADUtX4XcsCkigdHEbqa50ukhVOnHd3qs,662 +gcloud/datastore/_generated/__pycache__/__init__.cpython-39.pyc,, +gcloud/datastore/_generated/__pycache__/datastore_pb2.cpython-39.pyc,, +gcloud/datastore/_generated/__pycache__/entity_pb2.cpython-39.pyc,, +gcloud/datastore/_generated/__pycache__/query_pb2.cpython-39.pyc,, +gcloud/datastore/_generated/_datastore.proto,sha256=ldObgkig-mx86vNDuW8D0rNONjij4Kn92tkDfFYb23M,9903 +gcloud/datastore/_generated/_entity.proto,sha256=BIzchnzcxxH_SJYalwdmuKUvgD8krQUBxM_S9i0PLEk,6879 +gcloud/datastore/_generated/_query.proto,sha256=GXlGwmp29hBTklYRDC4a_GC-Gd1JprQeZiRpXONMqSs,7821 +gcloud/datastore/_generated/datastore_grpc_pb2.py,sha256=11EE_UdNaNzVPy6i1qs1LGuMyz3lZsRrSJPYCFcpNno,15139 +gcloud/datastore/_generated/datastore_pb2.py,sha256=5dFzOr9KT8aFKBv_atKvZ2C4aI3VWYdLowtrpsLQyMY,36282 +gcloud/datastore/_generated/entity_pb2.py,sha256=6AyP5ceQmAWeJ98YI-5VG4sqckdpnxOlA6klr0QWeiU,21396 +gcloud/datastore/_generated/query_pb2.py,sha256=CuWsfUBYh2mUP5WbXMoPfEnEQ8dZtirt8-2oV3aJp10,36990 +gcloud/datastore/batch.py,sha256=teiiTH8V3fP9ZqvLq_mRgdsLecydR0v9NO5hj1sAQLg,10523 +gcloud/datastore/client.py,sha256=gLMwKc-RoeVGm2S2hauDRDzujzD-4l9JdYaYLJDnd4U,16369 +gcloud/datastore/connection.py,sha256=o4CMbr4GEOVqrTxU8bl7DeHFIBhoPx3xDFHCEVc-QsE,16936 +gcloud/datastore/entity.py,sha256=ebZGcXyuYRuufhn2jmrvzsJHJLRu8hU4NrDaTvEaDjA,5111 +gcloud/datastore/helpers.py,sha256=yk62U092xCBCXrP0LUVAUtSKUKbDhMoNyWqZGFIu744,16169 +gcloud/datastore/key.py,sha256=vN1_7f053vWfWr0bM4TELpUhNEQov_8z_R49LmKqSls,13622 +gcloud/datastore/query.py,sha256=iQwEDgikM-Dwy2VxkVGQsAS_Z4HiqhahkTE-F8f1sF0,17896 +gcloud/datastore/test_batch.py,sha256=kGBeQryhGEaTVHta-ivsiNrGW0nlA5JT49Ckcnf32CM,13180 +gcloud/datastore/test_client.py,sha256=lNuig8wibVMrsD31VGnFs3oOYP8TRhqXSGd505eckVk,34078 +gcloud/datastore/test_connection.py,sha256=a4--0Rn1xw8Y93VhS8U9P7xcdWWkBGxu_-nlcPR4ero,31745 +gcloud/datastore/test_entity.py,sha256=kbLpWQvJFoof9p3r6pfrjY_9vdyr2lagLooMGHTXSCY,7223 +gcloud/datastore/test_helpers.py,sha256=8ZP8XRAZSszbrYILUYQACxm6l7jWqWDopO17E2A4Lhs,31407 +gcloud/datastore/test_key.py,sha256=xOJmW7NPmHuK3YRp76MtcfQbep7sMufx1hfVu6vxbSc,16555 +gcloud/datastore/test_query.py,sha256=qxtaaP1HMKK4Y03q4s4uTKD7_KC4h8qRKl-N2srIRrs,29017 +gcloud/datastore/test_transaction.py,sha256=caWgXGKAxjnmnnhRC5uy5TMoMEkevFsr1Z6ndAD5IHM,7632 +gcloud/datastore/transaction.py,sha256=FBIznONQ0n4POV0a7sZfeEGHUXjN_0ggc1OgT_by444,5314 +gcloud/dns/__init__.py,sha256=WocIjdQCrokmNdTjP7OZ7wELELE-q6WGPwxu4KqrOT0,1231 +gcloud/dns/__pycache__/__init__.cpython-39.pyc,, +gcloud/dns/__pycache__/changes.cpython-39.pyc,, +gcloud/dns/__pycache__/client.cpython-39.pyc,, +gcloud/dns/__pycache__/connection.cpython-39.pyc,, +gcloud/dns/__pycache__/resource_record_set.cpython-39.pyc,, +gcloud/dns/__pycache__/test_changes.cpython-39.pyc,, +gcloud/dns/__pycache__/test_client.cpython-39.pyc,, +gcloud/dns/__pycache__/test_connection.cpython-39.pyc,, +gcloud/dns/__pycache__/test_resource_record_set.cpython-39.pyc,, +gcloud/dns/__pycache__/test_zone.cpython-39.pyc,, +gcloud/dns/__pycache__/zone.cpython-39.pyc,, +gcloud/dns/changes.py,sha256=xAdH4Dw9CaCBe1tkLiDh_UCz8VJ-2bZmICUpWUO_Z2s,8788 +gcloud/dns/client.py,sha256=6pevdgKkWdofo_rT4YGnXFwdUe1JZ-P4HHLjZ6BRKOw,4644 +gcloud/dns/connection.py,sha256=QQ5CvHlVZkwstot_cxZEq3-ht34A42ZQFIYwx6cQLxU,1274 +gcloud/dns/resource_record_set.py,sha256=56z5dOMmn-86hTDWYM687tpl-iz50SRKf_6kovqyzaY,2214 +gcloud/dns/test_changes.py,sha256=-GtP2SMLqYBZMruSizv33rydg_Py4dcWNqP0BdW6jSA,12595 +gcloud/dns/test_client.py,sha256=pD3r4FKpwSOlj7WrJ7GyxxUttuYYp1u1e4Myo6DCbWo,9024 +gcloud/dns/test_connection.py,sha256=sIaKSFxd1hAvKbU6AxkJfEUmR6lTc8V9ybrIVZ6vyjs,1700 +gcloud/dns/test_resource_record_set.py,sha256=XrfoVF3MV_LeB5xDKB-s8Gm1Nzi9fEBmTjocO5npUoI,3284 +gcloud/dns/test_zone.py,sha256=tOfxLFM2xQMTKKkXSJwWov0av_SCmvgnpaXDUiNK_-E,26983 +gcloud/dns/zone.py,sha256=LLl5-1WSzr2B1WOq0AWWo3EZ-vVqWy3awUy5NBTGFuE,14040 +gcloud/environment_vars.py,sha256=9A-Fp3nfP3KXXeTUCf6254nqYoY0N3I1_SjSyjNAntk,1335 +gcloud/exceptions.py,sha256=R2vGA2c2buneZAnwvPPv73CzRKPKeciCaSdN2Ikd1UA,5995 +gcloud/iterator.py,sha256=IzsACtrg2oRju4NMM3u5C5TBUDiGkOD4J-pfP8u9pWs,6646 +gcloud/logging/__init__.py,sha256=o36-W-peIpGHcuPF20sAWu4A2GwDhLCxk1ZY_MkXf38,813 +gcloud/logging/__pycache__/__init__.cpython-39.pyc,, +gcloud/logging/__pycache__/_gax.cpython-39.pyc,, +gcloud/logging/__pycache__/client.cpython-39.pyc,, +gcloud/logging/__pycache__/connection.cpython-39.pyc,, +gcloud/logging/__pycache__/entries.cpython-39.pyc,, +gcloud/logging/__pycache__/logger.cpython-39.pyc,, +gcloud/logging/__pycache__/metric.cpython-39.pyc,, +gcloud/logging/__pycache__/sink.cpython-39.pyc,, +gcloud/logging/__pycache__/test__gax.cpython-39.pyc,, +gcloud/logging/__pycache__/test_client.cpython-39.pyc,, +gcloud/logging/__pycache__/test_connection.cpython-39.pyc,, +gcloud/logging/__pycache__/test_entries.cpython-39.pyc,, +gcloud/logging/__pycache__/test_logger.cpython-39.pyc,, +gcloud/logging/__pycache__/test_metric.cpython-39.pyc,, +gcloud/logging/__pycache__/test_sink.cpython-39.pyc,, +gcloud/logging/_gax.py,sha256=vgy5vEg1Nvqe_BcV5spSFswrj6m8IyrP5zs8X10bboI,20506 +gcloud/logging/client.py,sha256=n0vjUHeQmRRSDVSgU71_di1Y65D13p6iCNxmpfxi8As,12256 +gcloud/logging/connection.py,sha256=tk8WqSKPzWRiTYSyNptU6GUmn7HBkdNh5Ao1pAHz36Q,15836 +gcloud/logging/entries.py,sha256=DPDk_pxgGa5cV_3Z0Hcje_LJ0gtCGlZxx6oWlec9CDA,5437 +gcloud/logging/logger.py,sha256=XeSTO6hwUJA9al7-uQTxWBrcA1CctMj_xuC92oq3mco,16493 +gcloud/logging/metric.py,sha256=gBfT9SCUnWNJ6fuo8bhkYELL9PLuo3xRRtKjSpPFHY8,6524 +gcloud/logging/sink.py,sha256=u6IFBVdKOEjc0HN7xNg-kfokYOvBNlZtN_iKiT8A4Ng,6725 +gcloud/logging/test__gax.py,sha256=kKzt-iCNmufpFPduwWG8J1THKF3RGUEVScWnsKySYHY,36233 +gcloud/logging/test_client.py,sha256=Go5TgP8-GzJj5YGh2XTIAmwmnO1DVv5ukar7LrmiWnU,17803 +gcloud/logging/test_connection.py,sha256=1UIT7yJA3iQn4E7SvHx-8Cw0F87r-YOp6WHEdzIdrOo,20987 +gcloud/logging/test_entries.py,sha256=VrOTglAvOBMP1Y2P2VFdhVWfR1xh234IXI_rCLLaXk8,8528 +gcloud/logging/test_logger.py,sha256=rf0gEAnMI9S7L__FMp9uKzGg7XcoWguHN49YfPmU9NE,25885 +gcloud/logging/test_metric.py,sha256=HtHu6cTnn6n8hofKQYWTWeH5oI41WsqoTXZHnSbz-YI,9664 +gcloud/logging/test_sink.py,sha256=7gbEQsKbrDDev1t0tqdaGZ55Uu-38m3o_Qj0EbTs9Xo,10039 +gcloud/monitoring/__init__.py,sha256=K81m2dMrRwTvOXnolcjPeU1EV_iOPjMPmrLM49DGsj4,1651 +gcloud/monitoring/__pycache__/__init__.cpython-39.pyc,, +gcloud/monitoring/__pycache__/_dataframe.cpython-39.pyc,, +gcloud/monitoring/__pycache__/client.cpython-39.pyc,, +gcloud/monitoring/__pycache__/connection.cpython-39.pyc,, +gcloud/monitoring/__pycache__/label.cpython-39.pyc,, +gcloud/monitoring/__pycache__/metric.cpython-39.pyc,, +gcloud/monitoring/__pycache__/query.cpython-39.pyc,, +gcloud/monitoring/__pycache__/resource.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test__dataframe.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_client.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_connection.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_label.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_metric.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_query.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_resource.cpython-39.pyc,, +gcloud/monitoring/__pycache__/test_timeseries.cpython-39.pyc,, +gcloud/monitoring/__pycache__/timeseries.cpython-39.pyc,, +gcloud/monitoring/_dataframe.py,sha256=cCo-Jb1TwACjqDcj48QpOWZcOGWU5gnX96SGmn0pWyg,4353 +gcloud/monitoring/client.py,sha256=3yN1lCnJdLaNyKy2fuxpMBTHndxvIIzVamcT1oRoCBw,11063 +gcloud/monitoring/connection.py,sha256=sarQT_hrmkVeR7Q0ESnmV9Y-m7jjPhRX_JI97QK1w6g,1886 +gcloud/monitoring/label.py,sha256=A5slgfWt8JPL4yiQvcUQcnPF_qhSFH90KEU-aBmeVVA,2932 +gcloud/monitoring/metric.py,sha256=BmQafxntN_aSo6RgUyiwKcWAuLQhYVZBNXyI3iEl1zs,11815 +gcloud/monitoring/query.py,sha256=7DsshVSP_tY5KeQ4I7ehmjCs7v_yNsSBlRLvNOPqWGQ,24004 +gcloud/monitoring/resource.py,sha256=wTCHkczfx2Cn4jWccJ-Om7nhXzWgRz2VhwhoDRH4LeM,6054 +gcloud/monitoring/test__dataframe.py,sha256=Aw2HZZCD6xY90a5VqMsMU6cidQ70BP2YjFkFwjMEbi4,8218 +gcloud/monitoring/test_client.py,sha256=2ecFzYnAUmJordM5xutlkoLaZvhMyV1nkxxrvQk5Qo0,13277 +gcloud/monitoring/test_connection.py,sha256=zhMyWN8hguRKJmGoplKTBqOuh98aEA2Zz39SvCCZCqQ,1308 +gcloud/monitoring/test_label.py,sha256=3UnKMF-s7g1gk6KN1qtzHshu4TWla-g-V42pGqHxITw,4111 +gcloud/monitoring/test_metric.py,sha256=egCjM4MHyptdIJNWTeF-QhnMqRbAAdZpSNLhJE6zGdI,18995 +gcloud/monitoring/test_query.py,sha256=fzLSyHF29iRabktkMr3Vp3UTH5h4Ew1KB7-ztfFuamo,23503 +gcloud/monitoring/test_resource.py,sha256=yJzvXf-R0cmxsIXFYG8D1RfhJ-9Vf746UGefkbzuTwU,12160 +gcloud/monitoring/test_timeseries.py,sha256=cmupS6FC-AgeAV7hU0ZPE2UD2xE0o_-i-TWgTHZ9vBU,6799 +gcloud/monitoring/timeseries.py,sha256=UAWgdNvDsd7F7-UbvUqz0QcIR4PN8TJd47k1btgCLFo,5095 +gcloud/pubsub/__init__.py,sha256=YCDQMbCYGpgdZ7DmbHA6EvjjP8_O4IJPjZ31Poifi54,1135 +gcloud/pubsub/__pycache__/__init__.cpython-39.pyc,, +gcloud/pubsub/__pycache__/_gax.cpython-39.pyc,, +gcloud/pubsub/__pycache__/_helpers.cpython-39.pyc,, +gcloud/pubsub/__pycache__/client.cpython-39.pyc,, +gcloud/pubsub/__pycache__/connection.cpython-39.pyc,, +gcloud/pubsub/__pycache__/iam.cpython-39.pyc,, +gcloud/pubsub/__pycache__/message.cpython-39.pyc,, +gcloud/pubsub/__pycache__/subscription.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test__gax.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test__helpers.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_client.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_connection.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_iam.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_message.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_subscription.cpython-39.pyc,, +gcloud/pubsub/__pycache__/test_topic.cpython-39.pyc,, +gcloud/pubsub/__pycache__/topic.cpython-39.pyc,, +gcloud/pubsub/_gax.py,sha256=Gn_Vuu_uCXdPv3VM2ryYyYJIcmYEQK4vrDlerY_nGSg,19334 +gcloud/pubsub/_helpers.py,sha256=ivl95H1N7GAPbSKmJhdin87IYBr71YQ8C1roOX49cYY,2558 +gcloud/pubsub/client.py,sha256=vHX2jhbfbv6t_pnIm68Ti3HQVKunng0OAiQ8kivNIjU,7368 +gcloud/pubsub/connection.py,sha256=RzVfalvlKzaW-4_ZRjQPHOZJWBrS6XVG4VOnlBbTKyw,20680 +gcloud/pubsub/iam.py,sha256=SZRkzFzMOE0a4gGwJMF_cvbzDqpGvrKlBvb4nlKM40I,7889 +gcloud/pubsub/message.py,sha256=WQVD3mnFc97jQNdZe20oKKVeQ0hG9CWVF4s72sEvLXI,2952 +gcloud/pubsub/subscription.py,sha256=VccwWuOVklRBGO7t1JBmNR-qmBPrsOtnyJ_QXqpPRjQ,16627 +gcloud/pubsub/test__gax.py,sha256=21dObg28WQRWGUZmwsq6_YY-bkTHzWQPvxvkdhUpLEY,35497 +gcloud/pubsub/test__helpers.py,sha256=X8yt5j7OcJ77LAF5fPHtl9lo9nORwzlOoKcXC0F6IaY,2277 +gcloud/pubsub/test_client.py,sha256=5KPUdJcA3TOhbZz4T0ScJhkKdLXzPhsDRqackrgFD-k,10873 +gcloud/pubsub/test_connection.py,sha256=s8vX_QbtyA-D_TX_8Wah5b-X8O6w2_2RlKBOLmXAtvY,28135 +gcloud/pubsub/test_iam.py,sha256=PmG2Se9ggm9lhPPCYVP1bngB3XetFzdhGy_Yxm1Q1X4,7163 +gcloud/pubsub/test_message.py,sha256=gnB8ul3KVZYagWVdD-JNpuKyPEJm2SaN4nfDLghKOaY,4693 +gcloud/pubsub/test_subscription.py,sha256=cdRukaIuzDICMIkaQlCJjq__CvRZpK8H7khdrpqm6F8,28779 +gcloud/pubsub/test_topic.py,sha256=QbiqGja0tqtfMCzJ0hKpoLw-hitQu3jWHPQQYvd-M0k,30458 +gcloud/pubsub/topic.py,sha256=GJE9on-N3ACq8Dmd_Bciouok48_snV1GrqbL-mltsdQ,16599 +gcloud/resource_manager/__init__.py,sha256=8NgAd6edBXOXwum5Vii_PDWs-PzWkjbSU0D-FrtBOws,834 +gcloud/resource_manager/__pycache__/__init__.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/client.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/connection.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/project.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/test_client.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/test_connection.cpython-39.pyc,, +gcloud/resource_manager/__pycache__/test_project.cpython-39.pyc,, +gcloud/resource_manager/client.py,sha256=xm54bK8Y_uxnru12IALWCGc7U_fcZNpAb8rl_51PgTM,6993 +gcloud/resource_manager/connection.py,sha256=NMKVod8MSCj5ihJp_C1z82l2c5Yy2NNUAZm9u54CC9g,1637 +gcloud/resource_manager/project.py,sha256=FsvYQNMTCJuSgUV2YAAkCl2xr800bzyRKvVcT-UFXVM,10463 +gcloud/resource_manager/test_client.py,sha256=S-Sy20BHrjz_pTcPPLQG8otaSAdayEaUFRi91G1Ojmw,10152 +gcloud/resource_manager/test_connection.py,sha256=rwR5_5OCw3X1rO0MHon9NTqYlM28s-4AYdVkz1mZYZc,1687 +gcloud/resource_manager/test_project.py,sha256=RU1aD1VgnDKjElEeUf6h5rVyoByAi0CouRHib1K6VnU,12207 +gcloud/storage/__init__.py,sha256=UxnJrTi37ByQy7wOwGmwAK7B7b7jbMbSZnrVV-7oSjE,1785 +gcloud/storage/__pycache__/__init__.cpython-39.pyc,, +gcloud/storage/__pycache__/_helpers.cpython-39.pyc,, +gcloud/storage/__pycache__/acl.cpython-39.pyc,, +gcloud/storage/__pycache__/batch.cpython-39.pyc,, +gcloud/storage/__pycache__/blob.cpython-39.pyc,, +gcloud/storage/__pycache__/bucket.cpython-39.pyc,, +gcloud/storage/__pycache__/client.cpython-39.pyc,, +gcloud/storage/__pycache__/connection.cpython-39.pyc,, +gcloud/storage/__pycache__/test__helpers.cpython-39.pyc,, +gcloud/storage/__pycache__/test_acl.cpython-39.pyc,, +gcloud/storage/__pycache__/test_batch.cpython-39.pyc,, +gcloud/storage/__pycache__/test_blob.cpython-39.pyc,, +gcloud/storage/__pycache__/test_bucket.cpython-39.pyc,, +gcloud/storage/__pycache__/test_client.cpython-39.pyc,, +gcloud/storage/__pycache__/test_connection.cpython-39.pyc,, +gcloud/storage/_helpers.py,sha256=4ToN-byeaa0uEgbENdA7P0Qb03b3WWlEOo7-63d6YKk,5939 +gcloud/storage/acl.py,sha256=lRWUHdOXSx2g-oCUkdgogsOvkFZw-bUVkT1bD53fWt4,18600 +gcloud/storage/batch.py,sha256=KQHBkFn6JsFD3xbtP7QKF0fWD4DoK2YmmXGm1xr_Lh8,11217 +gcloud/storage/blob.py,sha256=6ZMNxabVgs8vjUs8u9JSU71nrbjGUuHqo6YnyNAqh2A,35912 +gcloud/storage/bucket.py,sha256=0MdoSFS3LuJItYiicOErgR-w4F8rAdIIqOrQwwun2rg,31283 +gcloud/storage/client.py,sha256=EExdc6Ci-T-Nd59uLGb6etrWizpP4pkcdxyt6gl9KMU,10772 +gcloud/storage/connection.py,sha256=aMLPzfTO6trbLyNawFUSu7OUJTsy76FJuRhb8zHZCuw,1749 +gcloud/storage/test__helpers.py,sha256=wwN0gGze7-ZDUU1F3ZDt1NyhBPz2MlxlLXSCs3A0Gjc,6915 +gcloud/storage/test_acl.py,sha256=KZ3_ONRH6I50XZY9U5kpvmcoJs6gl4LyvsNiKsIHEng,26625 +gcloud/storage/test_batch.py,sha256=3YRSQU07POVExQD_7uJo56BN4AgfhGaJIhLIf55rMOU,22872 +gcloud/storage/test_blob.py,sha256=iVX60woY6h2HwBS9vLO-b2TwG7ClWG-gON_30diApGQ,55510 +gcloud/storage/test_bucket.py,sha256=PiNfqbSOuuGHpt-St24SxAmXNx_1jWzALtm6fbQfWO0,38881 +gcloud/storage/test_client.py,sha256=Lso-ZtHoZMSbcWSA0fBTe4zoGBzDVxK-J5oCQ0yYLzU,15598 +gcloud/storage/test_connection.py,sha256=2aChxndyd3cEEYPMEZlmmbL88QjJgEc8XyP_SsBl1rc,1712 +gcloud/streaming/__init__.py,sha256=4PIykaGSXeDZJfWNN9DdVakg-KO5l6eFYJLkhymaDP4,46 +gcloud/streaming/__pycache__/__init__.cpython-39.pyc,, +gcloud/streaming/__pycache__/buffered_stream.cpython-39.pyc,, +gcloud/streaming/__pycache__/exceptions.cpython-39.pyc,, +gcloud/streaming/__pycache__/http_wrapper.cpython-39.pyc,, +gcloud/streaming/__pycache__/stream_slice.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_buffered_stream.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_exceptions.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_http_wrapper.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_stream_slice.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_transfer.cpython-39.pyc,, +gcloud/streaming/__pycache__/test_util.cpython-39.pyc,, +gcloud/streaming/__pycache__/transfer.cpython-39.pyc,, +gcloud/streaming/__pycache__/util.cpython-39.pyc,, +gcloud/streaming/buffered_stream.py,sha256=0a-B_p763Rju0RXVWKH327gBfx0dQ5pHFYDGxRT5YWY,2444 +gcloud/streaming/exceptions.py,sha256=-Iq21BXWwKSkOltiY7CuGDz-RQJ49c_frvEKRjRSF3Y,3124 +gcloud/streaming/http_wrapper.py,sha256=UREfVK8i_TH1N4ZbH39a1atw-_L01zaa-AODHXdVnvI,14654 +gcloud/streaming/stream_slice.py,sha256=G30no1AciX14RCQ6akNeqsnmqskQSC6uGC8ENzfUbqc,2260 +gcloud/streaming/test_buffered_stream.py,sha256=iBgLMVUbFn9ZGw1UqRg9HgURbMxKY1VXsSc8kL8YpKM,3822 +gcloud/streaming/test_exceptions.py,sha256=UAUMdVL5pSmnFykSMvGJ-HPF-I3Xsj7i37zKnuLR-sU,2973 +gcloud/streaming/test_http_wrapper.py,sha256=cErLeujVvBevSW7AeBGl3FlT7iaXDMYBCKcq2OU5ISY,19463 +gcloud/streaming/test_stream_slice.py,sha256=KSiwmNa4ckDjYLICCCvhKNNECPHoSKT3dOMEG-U-6rA,2308 +gcloud/streaming/test_transfer.py,sha256=3Fsio2zfPSplnymeigOPCU5BU-fh0As_FwHr3QRnjTg,76107 +gcloud/streaming/test_util.py,sha256=WFRahsFZszaUHS025NugqvL6gPeVP3vy0GB0czv5fvY,1660 +gcloud/streaming/transfer.py,sha256=Yztn_BSoJWY3a0eDLIOxBqSk_g-yRQCqEj4TOWwLyZw,43281 +gcloud/streaming/util.py,sha256=Q5VG6hXQGCfRaltu3EuCsbYoI4uqMeGjtTt_LypUrmQ,2112 +gcloud/test__helpers.py,sha256=J1T59jKfl_1EGaijY4uFUtNzn5tKIrpu430i_LF81mM,26553 +gcloud/test_client.py,sha256=pCejBh4yEvNSJ3u8szPgwOgYH50Od2gbNPEAS1BMhlI,7833 +gcloud/test_connection.py,sha256=YRDX6F1TwRS0LRj6ZB4UmHJDTBAFZ6bDksgO98oqqKI,13522 +gcloud/test_credentials.py,sha256=9hWoFf2NCBcfCEkTjE8ggyjdWvunfHSCW6sCV5FsrSA,8629 +gcloud/test_exceptions.py,sha256=xXY43ajLOd8sXUrk75vBZJrn-6XzZKm5OQjYG2Q_HWg,3405 +gcloud/test_iterator.py,sha256=vCPhz9or1H4aY1aIad9-8j_j9x_WMvfJrdvnYTjQf9Y,9979 +gcloud/translate/__init__.py,sha256=fTHlScBG1hMljHkGPUZFfuxrWY-Fg2M9UD3CNEcOuFQ,734 +gcloud/translate/__pycache__/__init__.cpython-39.pyc,, +gcloud/translate/__pycache__/client.cpython-39.pyc,, +gcloud/translate/__pycache__/connection.cpython-39.pyc,, +gcloud/translate/__pycache__/test_client.cpython-39.pyc,, +gcloud/translate/__pycache__/test_connection.cpython-39.pyc,, +gcloud/translate/client.py,sha256=NcE4wVcPShM14oNwRrx58T1MW5FPY9Fu7f6tDozczhg,9199 +gcloud/translate/connection.py,sha256=zzN8aXNSn4zg6406BafCqlxCc5ZNMUJQFpyuP7PTcVI,1159 +gcloud/translate/test_client.py,sha256=QXlNByhij_qiulozNOFoyIPLpuUEQUzPWeq3I47sbx4,12125 +gcloud/translate/test_connection.py,sha256=rgWzRVyYCndh2R6SKEwbQIRzct9xzRYPD6Yr-yIK-ns,1866 diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/REQUESTED b/env/Lib/site-packages/gcloud-0.17.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/WHEEL b/env/Lib/site-packages/gcloud-0.17.0.dist-info/WHEEL new file mode 100644 index 0000000..3e5d84c --- /dev/null +++ b/env/Lib/site-packages/gcloud-0.17.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/env/Lib/site-packages/gcloud-0.17.0.dist-info/top_level.txt b/env/Lib/site-packages/gcloud-0.17.0.dist-info/top_level.txt new file mode 100644 index 0000000..d87916c --- /dev/null +++ b/env/Lib/site-packages/gcloud-0.17.0.dist-info/top_level.txt @@ -0,0 +1 @@ +gcloud diff --git a/env/Lib/site-packages/gcloud/__init__.py b/env/Lib/site-packages/gcloud/__init__.py new file mode 100644 index 0000000..8c1c7b4 --- /dev/null +++ b/env/Lib/site-packages/gcloud/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GCloud API access in idiomatic Python.""" + +from pkg_resources import get_distribution + +__version__ = get_distribution('gcloud').version diff --git a/env/Lib/site-packages/gcloud/_helpers.py b/env/Lib/site-packages/gcloud/_helpers.py new file mode 100644 index 0000000..834197a --- /dev/null +++ b/env/Lib/site-packages/gcloud/_helpers.py @@ -0,0 +1,553 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Thread-local resource stack. + +This module is not part of the public API surface of `gcloud`. +""" + +import calendar +import datetime +import json +import os +import re +import socket +import sys +from threading import local as Local + +from google.protobuf import timestamp_pb2 +import six +from six.moves.http_client import HTTPConnection +from six.moves import configparser + +from gcloud.environment_vars import PROJECT +from gcloud.environment_vars import CREDENTIALS + +try: + from google.appengine.api import app_identity +except ImportError: + app_identity = None + + +_NOW = datetime.datetime.utcnow # To be replaced by tests. +_RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' +_RFC3339_NO_FRACTION = '%Y-%m-%dT%H:%M:%S' +# datetime.strptime cannot handle nanosecond precision: parse w/ regex +_RFC3339_NANOS = re.compile(r""" + (?P + \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS + ) + \. # decimal point + (?P\d{1,9}) # nanoseconds, maybe truncated + Z # Zulu +""", re.VERBOSE) +DEFAULT_CONFIGURATION_PATH = '~/.config/gcloud/configurations/config_default' + + +class _LocalStack(Local): + """Manage a thread-local LIFO stack of resources. + + Intended for use in :class:`gcloud.datastore.batch.Batch.__enter__`, + :class:`gcloud.storage.batch.Batch.__enter__`, etc. + """ + def __init__(self): + super(_LocalStack, self).__init__() + self._stack = [] + + def __iter__(self): + """Iterate the stack in LIFO order. + """ + return iter(reversed(self._stack)) + + def push(self, resource): + """Push a resource onto our stack. + """ + self._stack.append(resource) + + def pop(self): + """Pop a resource from our stack. + + :raises: IndexError if the stack is empty. + :returns: the top-most resource, after removing it. + """ + return self._stack.pop() + + @property + def top(self): + """Get the top-most resource + + :returns: the top-most item, or None if the stack is empty. + """ + if len(self._stack) > 0: + return self._stack[-1] + + +class _UTC(datetime.tzinfo): + """Basic UTC implementation. + + Implementing a small surface area to avoid depending on ``pytz``. + """ + + _dst = datetime.timedelta(0) + _tzname = 'UTC' + _utcoffset = _dst + + def dst(self, dt): # pylint: disable=unused-argument + """Daylight savings time offset.""" + return self._dst + + def fromutc(self, dt): + """Convert a timestamp from (naive) UTC to this timezone.""" + if dt.tzinfo is None: + return dt.replace(tzinfo=self) + return super(_UTC, self).fromutc(dt) + + def tzname(self, dt): # pylint: disable=unused-argument + """Get the name of this timezone.""" + return self._tzname + + def utcoffset(self, dt): # pylint: disable=unused-argument + """UTC offset of this timezone.""" + return self._utcoffset + + def __repr__(self): + return '<%s>' % (self._tzname,) + + def __str__(self): + return self._tzname + + +def _ensure_tuple_or_list(arg_name, tuple_or_list): + """Ensures an input is a tuple or list. + + This effectively reduces the iterable types allowed to a very short + whitelist: list and tuple. + + :type arg_name: str + :param arg_name: Name of argument to use in error message. + + :type tuple_or_list: sequence of str + :param tuple_or_list: Sequence to be verified. + + :rtype: list of str + :returns: The ``tuple_or_list`` passed in cast to a ``list``. + :raises: class:`TypeError` if the ``tuple_or_list`` is not a tuple or + list. + """ + if not isinstance(tuple_or_list, (tuple, list)): + raise TypeError('Expected %s to be a tuple or list. ' + 'Received %r' % (arg_name, tuple_or_list)) + return list(tuple_or_list) + + +def _app_engine_id(): + """Gets the App Engine application ID if it can be inferred. + + :rtype: str or ``NoneType`` + :returns: App Engine application ID if running in App Engine, + else ``None``. + """ + if app_identity is None: + return None + + return app_identity.get_application_id() + + +def _file_project_id(): + """Gets the project id from the credentials file if one is available. + + :rtype: str or ``NoneType`` + :returns: Project-ID from JSON credentials file if value exists, + else ``None``. + """ + credentials_file_path = os.getenv(CREDENTIALS) + if credentials_file_path: + with open(credentials_file_path, 'rb') as credentials_file: + credentials_json = credentials_file.read() + credentials = json.loads(credentials_json.decode('utf-8')) + return credentials.get('project_id') + + +def _default_service_project_id(): + """Retrieves the project ID from the gcloud command line tool. + + Files that cannot be opened with configparser are silently ignored; this is + designed so that you can specify a list of potential configuration file + locations. + + :rtype: str or ``NoneType`` + :returns: Project-ID from default configuration file else ``None`` + """ + search_paths = [] + # Workaround for GAE not supporting pwd which is used by expanduser. + try: + search_paths.append(os.path.expanduser(DEFAULT_CONFIGURATION_PATH)) + except ImportError: + pass + win32_config_path = os.path.join(os.getenv('APPDATA', ''), + 'gcloud', 'configurations', + 'config_default') + search_paths.append(win32_config_path) + config = configparser.RawConfigParser() + config.read(search_paths) + + if config.has_section('core'): + return config.get('core', 'project') + + +def _compute_engine_id(): + """Gets the Compute Engine project ID if it can be inferred. + + Uses 169.254.169.254 for the metadata server to avoid request + latency from DNS lookup. + + See https://cloud.google.com/compute/docs/metadata#metadataserver + for information about this IP address. (This IP is also used for + Amazon EC2 instances, so the metadata flavor is crucial.) + + See https://github.com/google/oauth2client/issues/93 for context about + DNS latency. + + :rtype: str or ``NoneType`` + :returns: Compute Engine project ID if the metadata service is available, + else ``None``. + """ + host = '169.254.169.254' + uri_path = '/computeMetadata/v1/project/project-id' + headers = {'Metadata-Flavor': 'Google'} + connection = HTTPConnection(host, timeout=0.1) + + try: + connection.request('GET', uri_path, headers=headers) + response = connection.getresponse() + if response.status == 200: + return response.read() + except socket.error: # socket.timeout or socket.error(64, 'Host is down') + pass + finally: + connection.close() + + +def _get_production_project(): + """Gets the production project if it can be inferred.""" + return os.getenv(PROJECT) + + +def _determine_default_project(project=None): + """Determine default project ID explicitly or implicitly as fall-back. + + In implicit case, supports three environments. In order of precedence, the + implicit environments are: + + * GCLOUD_PROJECT environment variable + * GOOGLE_APPLICATION_CREDENTIALS JSON file + * Get default service project from + ``$ gcloud beta auth application-default login`` + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + + :type project: str + :param project: Optional. The project name to use as default. + + :rtype: str or ``NoneType`` + :returns: Default project if it can be determined. + """ + if project is None: + project = _get_production_project() + + if project is None: + project = _file_project_id() + + if project is None: + project = _default_service_project_id() + + if project is None: + project = _app_engine_id() + + if project is None: + project = _compute_engine_id() + + return project + + +def _millis(when): + """Convert a zone-aware datetime to integer milliseconds. + + :type when: :class:`datetime.datetime` + :param when: the datetime to convert + + :rtype: int + :returns: milliseconds since epoch for ``when`` + """ + micros = _microseconds_from_datetime(when) + return micros // 1000 + + +def _datetime_from_microseconds(value): + """Convert timestamp to datetime, assuming UTC. + + :type value: float + :param value: The timestamp to convert + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the value. + """ + return _EPOCH + datetime.timedelta(microseconds=value) + + +def _microseconds_from_datetime(value): + """Convert non-none datetime to microseconds. + + :type value: :class:`datetime.datetime` + :param value: The timestamp to convert. + + :rtype: int + :returns: The timestamp, in microseconds. + """ + if not value.tzinfo: + value = value.replace(tzinfo=UTC) + # Regardless of what timezone is on the value, convert it to UTC. + value = value.astimezone(UTC) + # Convert the datetime to a microsecond timestamp. + return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond + + +def _millis_from_datetime(value): + """Convert non-none datetime to timestamp, assuming UTC. + + :type value: :class:`datetime.datetime`, or None + :param value: the timestamp + + :rtype: int, or ``NoneType`` + :returns: the timestamp, in milliseconds, or None + """ + if value is not None: + return _millis(value) + + +def _total_seconds_backport(offset): + """Backport of timedelta.total_seconds() from python 2.7+. + + :type offset: :class:`datetime.timedelta` + :param offset: A timedelta object. + + :rtype: int + :returns: The total seconds (including microseconds) in the + duration. + """ + seconds = offset.days * 24 * 60 * 60 + offset.seconds + return seconds + offset.microseconds * 1e-6 + + +def _total_seconds(offset): + """Version independent total seconds for a time delta. + + :type offset: :class:`datetime.timedelta` + :param offset: A timedelta object. + + :rtype: int + :returns: The total seconds (including microseconds) in the + duration. + """ + if sys.version_info[:2] < (2, 7): # pragma: NO COVER Python 2.6 + return _total_seconds_backport(offset) + else: + return offset.total_seconds() + + +def _rfc3339_to_datetime(dt_str): + """Convert a microsecond-precision timetamp to a native datetime. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + """ + return datetime.datetime.strptime( + dt_str, _RFC3339_MICROS).replace(tzinfo=UTC) + + +def _rfc3339_nanos_to_datetime(dt_str): + """Convert a nanosecond-precision timestamp to a native datetime. + + .. note:: + + Python datetimes do not support nanosecond precision; this function + therefore truncates such values to microseconds. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + """ + with_nanos = _RFC3339_NANOS.match(dt_str) + if with_nanos is None: + raise ValueError( + 'Timestamp: %r, does not match pattern: %r' % ( + dt_str, _RFC3339_NANOS.pattern)) + bare_seconds = datetime.datetime.strptime( + with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) + fraction = with_nanos.group('nanos') + scale = 9 - len(fraction) + nanos = int(fraction) * (10 ** scale) + micros = nanos // 1000 + return bare_seconds.replace(microsecond=micros, tzinfo=UTC) + + +def _datetime_to_rfc3339(value): + """Convert a native timestamp to a string. + + :type value: :class:`datetime.datetime` + :param value: The datetime object to be converted to a string. + + :rtype: str + :returns: The string representing the datetime stamp. + """ + return value.strftime(_RFC3339_MICROS) + + +def _to_bytes(value, encoding='ascii'): + """Converts a string value to bytes, if necessary. + + Unfortunately, ``six.b`` is insufficient for this task since in + Python2 it does not modify ``unicode`` objects. + + :type value: str / bytes or unicode + :param value: The string/bytes value to be converted. + + :type encoding: str + :param encoding: The encoding to use to convert unicode to bytes. Defaults + to "ascii", which will not allow any characters from + ordinals larger than 127. Other useful values are + "latin-1", which which will only allows byte ordinals + (up to 255) and "utf-8", which will encode any unicode + that needs to be. + + :rtype: str / bytes + :returns: The original value converted to bytes (if unicode) or as passed + in if it started out as bytes. + :raises: :class:`TypeError ` if the value + could not be converted to bytes. + """ + result = (value.encode(encoding) + if isinstance(value, six.text_type) else value) + if isinstance(result, six.binary_type): + return result + else: + raise TypeError('%r could not be converted to bytes' % (value,)) + + +def _bytes_to_unicode(value): + """Converts bytes to a unicode value, if necessary. + + :type value: bytes + :param value: bytes value to attempt string conversion on. + + :rtype: str + :returns: The original value converted to unicode (if bytes) or as passed + in if it started out as unicode. + + :raises: :class:`ValueError` if the value could not be converted to + unicode. + """ + result = (value.decode('utf-8') + if isinstance(value, six.binary_type) else value) + if isinstance(result, six.text_type): + return result + else: + raise ValueError('%r could not be converted to unicode' % (value,)) + + +def _pb_timestamp_to_datetime(timestamp): + """Convert a Timestamp protobuf to a datetime object. + + :type timestamp: :class:`google.protobuf.timestamp_pb2.Timestamp` + :param timestamp: A Google returned timestamp protobuf. + + :rtype: :class:`datetime.datetime` + :returns: A UTC datetime object converted from a protobuf timestamp. + """ + return ( + _EPOCH + + datetime.timedelta( + seconds=timestamp.seconds, + microseconds=(timestamp.nanos / 1000.0), + ) + ) + + +def _datetime_to_pb_timestamp(when): + """Convert a datetime object to a Timestamp protobuf. + + :type when: :class:`datetime.datetime` + :param when: the datetime to convert + + :rtype: :class:`google.protobuf.timestamp_pb2.Timestamp` + :returns: A timestamp protobuf corresponding to the object. + """ + ms_value = _microseconds_from_datetime(when) + seconds, micros = divmod(ms_value, 10**6) + nanos = micros * 10**3 + return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) + + +def _name_from_project_path(path, project, template): + """Validate a URI path and get the leaf object's name. + + :type path: str + :param path: URI path containing the name. + + :type project: str or NoneType + :param project: The project associated with the request. It is + included for validation purposes. If passed as None, + disables validation. + + :type template: str + :param template: Template regex describing the expected form of the path. + The regex must have two named groups, 'project' and + 'name'. + + :rtype: str + :returns: Name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + if isinstance(template, str): + template = re.compile(template) + + match = template.match(path) + + if not match: + raise ValueError('path "%s" did not match expected pattern "%s"' % ( + path, template.pattern,)) + + if project is not None: + found_project = match.group('project') + if found_project != project: + raise ValueError( + 'Project from client (%s) should agree with ' + 'project from resource(%s).' % (project, found_project)) + + return match.group('name') + + +try: + from pytz import UTC # pylint: disable=unused-import,wrong-import-order +except ImportError: + UTC = _UTC() # Singleton instance to be used throughout. + +# Need to define _EPOCH at the end of module since it relies on UTC. +_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=UTC) diff --git a/env/Lib/site-packages/gcloud/_testing.py b/env/Lib/site-packages/gcloud/_testing.py new file mode 100644 index 0000000..0a440e8 --- /dev/null +++ b/env/Lib/site-packages/gcloud/_testing.py @@ -0,0 +1,61 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared testing utilities.""" + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + + def __init__(self, module, **kw): + self.module = module + if len(kw) == 0: # pragma: NO COVER + raise ValueError('_Monkey was used with nothing to monkey-patch') + self.to_restore = dict([(key, getattr(module, key)) for key in kw]) + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) + + +class _NamedTemporaryFile(object): + + def __init__(self, suffix=''): + import os + import tempfile + filehandle, self.name = tempfile.mkstemp(suffix=suffix) + os.close(filehandle) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + import os + os.remove(self.name) + + +class _GAXPageIterator(object): + + def __init__(self, items, page_token): + self._items = items + self.page_token = page_token + + def next(self): + items, self._items = self._items, None + return items diff --git a/env/Lib/site-packages/gcloud/bigquery/__init__.py b/env/Lib/site-packages/gcloud/bigquery/__init__.py new file mode 100644 index 0000000..9007b9f --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/__init__.py @@ -0,0 +1,32 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud BigQuery API wrapper. + +The main concepts with this API are: + +- :class:`gcloud.bigquery.dataset.Dataset` represents an collection of tables. + +- :class:`gcloud.bigquery.table.Table` represents a single "relation". +""" + +from gcloud.bigquery.client import Client +from gcloud.bigquery.connection import Connection +from gcloud.bigquery.dataset import AccessGrant +from gcloud.bigquery.dataset import Dataset +from gcloud.bigquery.table import SchemaField +from gcloud.bigquery.table import Table + + +SCOPE = Connection.SCOPE diff --git a/env/Lib/site-packages/gcloud/bigquery/_helpers.py b/env/Lib/site-packages/gcloud/bigquery/_helpers.py new file mode 100644 index 0000000..afb4d94 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/_helpers.py @@ -0,0 +1,166 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared elper functions for BigQuery API classes.""" + +from gcloud._helpers import _datetime_from_microseconds + + +def _not_null(value, field): + """Check whether 'value' should be coerced to 'field' type.""" + return value is not None or field.mode != 'NULLABLE' + + +def _int_from_json(value, field): + """Coerce 'value' to an int, if set or not nullable.""" + if _not_null(value, field): + return int(value) + + +def _float_from_json(value, field): + """Coerce 'value' to a float, if set or not nullable.""" + if _not_null(value, field): + return float(value) + + +def _bool_from_json(value, field): + """Coerce 'value' to a bool, if set or not nullable.""" + if _not_null(value, field): + return value.lower() in ['t', 'true', '1'] + + +def _datetime_from_json(value, field): + """Coerce 'value' to a datetime, if set or not nullable.""" + if _not_null(value, field): + # value will be a float in seconds, to microsecond precision, in UTC. + return _datetime_from_microseconds(1e6 * float(value)) + + +def _record_from_json(value, field): + """Coerce 'value' to a mapping, if set or not nullable.""" + if _not_null(value, field): + record = {} + for subfield, cell in zip(field.fields, value['f']): + converter = _CELLDATA_FROM_JSON[subfield.field_type] + if field.mode == 'REPEATED': + value = [converter(item, field) for item in cell['v']] + else: + value = converter(cell['v'], field) + record[subfield.name] = value + return record + + +def _string_from_json(value, _): + """NOOP string -> string coercion""" + return value + + +_CELLDATA_FROM_JSON = { + 'INTEGER': _int_from_json, + 'FLOAT': _float_from_json, + 'BOOLEAN': _bool_from_json, + 'TIMESTAMP': _datetime_from_json, + 'RECORD': _record_from_json, + 'STRING': _string_from_json, +} + + +def _rows_from_json(rows, schema): + """Convert JSON row data to rows w/ appropriate types.""" + rows_data = [] + for row in rows: + row_data = [] + for field, cell in zip(schema, row['f']): + converter = _CELLDATA_FROM_JSON[field.field_type] + if field.mode == 'REPEATED': + row_data.append([converter(item, field) + for item in cell['v']]) + else: + row_data.append(converter(cell['v'], field)) + rows_data.append(tuple(row_data)) + return rows_data + + +class _ConfigurationProperty(object): + """Base property implementation. + + Values will be stored on a `_configuration` helper attribute of the + property's job instance. + + :type name: string + :param name: name of the property + """ + + def __init__(self, name): + self.name = name + self._backing_name = '_%s' % (self.name,) + + def __get__(self, instance, owner): + """Descriptor protocal: accesstor""" + if instance is None: + return self + return getattr(instance._configuration, self._backing_name) + + def _validate(self, value): + """Subclasses override to impose validation policy.""" + pass + + def __set__(self, instance, value): + """Descriptor protocal: mutator""" + self._validate(value) + setattr(instance._configuration, self._backing_name, value) + + def __delete__(self, instance): + """Descriptor protocal: deleter""" + delattr(instance._configuration, self._backing_name) + + +class _TypedProperty(_ConfigurationProperty): + """Property implementation: validates based on value type. + + :type name: string + :param name: name of the property + + :type property_type: type or sequence of types + :param property_type: type to be validated + """ + def __init__(self, name, property_type): + super(_TypedProperty, self).__init__(name) + self.property_type = property_type + + def _validate(self, value): + """Ensure that 'value' is of the appropriate type. + + :raises: ValueError on a type mismatch. + """ + if not isinstance(value, self.property_type): + raise ValueError('Required type: %s' % (self.property_type,)) + + +class _EnumProperty(_ConfigurationProperty): + """Psedo-enumeration class. + + Subclasses must define ``ALLOWED`` as a class-level constant: it must + be a sequence of strings. + + :type name: string + :param name: name of the property + """ + def _validate(self, value): + """Check that ``value`` is one of the allowed values. + + :raises: ValueError if value is not allowed. + """ + if value not in self.ALLOWED: + raise ValueError('Pass one of: %s' ', '.join(self.ALLOWED)) diff --git a/env/Lib/site-packages/gcloud/bigquery/client.py b/env/Lib/site-packages/gcloud/bigquery/client.py new file mode 100644 index 0000000..0d429ec --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/client.py @@ -0,0 +1,275 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google BigQuery API.""" + + +from gcloud.client import JSONClient +from gcloud.bigquery.connection import Connection +from gcloud.bigquery.dataset import Dataset +from gcloud.bigquery.job import CopyJob +from gcloud.bigquery.job import ExtractTableToStorageJob +from gcloud.bigquery.job import LoadTableFromStorageJob +from gcloud.bigquery.job import QueryJob +from gcloud.bigquery.query import QueryResults + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: str + :param project: the project which the client acts on behalf of. Will be + passed when creating a dataset / job. If not passed, + falls back to the default inferred from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def list_datasets(self, include_all=False, max_results=None, + page_token=None): + """List datasets for the project associated with this client. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/datasets/list + + :type include_all: boolean + :param include_all: True if results include hidden datasets. + + :type max_results: int + :param max_results: maximum number of datasets to return, If not + passed, defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of datasets. If + not passed, the API will return the first page of + datasets. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.bigquery.dataset.Dataset`, plus a + "next page token" string: if the token is not None, + indicates that more datasets can be retrieved with another + call (pass that value as ``page_token``). + """ + params = {} + + if include_all: + params['all'] = True + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/datasets' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + datasets = [Dataset.from_api_repr(resource, self) + for resource in resp.get('datasets', ())] + return datasets, resp.get('nextPageToken') + + def dataset(self, dataset_name): + """Construct a dataset bound to this client. + + :type dataset_name: str + :param dataset_name: Name of the dataset. + + :rtype: :class:`gcloud.bigquery.dataset.Dataset` + :returns: a new ``Dataset`` instance + """ + return Dataset(dataset_name, client=self) + + def job_from_resource(self, resource): + """Detect correct job type from resource and instantiate. + + :type resource: dict + :param resource: one job resource from API response + + :rtype: One of: + :class:`gcloud.bigquery.job.LoadTableFromStorageJob`, + :class:`gcloud.bigquery.job.CopyJob`, + :class:`gcloud.bigquery.job.ExtractTableToStorageJob`, + :class:`gcloud.bigquery.job.QueryJob`, + :class:`gcloud.bigquery.job.RunSyncQueryJob` + :returns: the job instance, constructed via the resource + """ + config = resource['configuration'] + if 'load' in config: + return LoadTableFromStorageJob.from_api_repr(resource, self) + elif 'copy' in config: + return CopyJob.from_api_repr(resource, self) + elif 'extract' in config: + return ExtractTableToStorageJob.from_api_repr(resource, self) + elif 'query' in config: + return QueryJob.from_api_repr(resource, self) + raise ValueError('Cannot parse job resource') + + def list_jobs(self, max_results=None, page_token=None, all_users=None, + state_filter=None): + """List jobs for the project associated with this client. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/list + + :type max_results: int + :param max_results: maximum number of jobs to return, If not + passed, defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of jobs. If + not passed, the API will return the first page of + jobs. + + :type all_users: boolean + :param all_users: if true, include jobs owned by all users in the + project. + + :type state_filter: str + :param state_filter: if passed, include only jobs matching the given + state. One of + + * ``"done"`` + * ``"pending"`` + * ``"running"`` + + :rtype: tuple, (list, str) + :returns: list of job instances, plus a "next page token" string: + if the token is not ``None``, indicates that more jobs can be + retrieved with another call, passing that value as + ``page_token``). + """ + params = {'projection': 'full'} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + if all_users is not None: + params['allUsers'] = all_users + + if state_filter is not None: + params['stateFilter'] = state_filter + + path = '/projects/%s/jobs' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + jobs = [self.job_from_resource(resource) for resource in resp['jobs']] + return jobs, resp.get('nextPageToken') + + def load_table_from_storage(self, job_name, destination, *source_uris): + """Construct a job for loading data into a table from CloudStorage. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load + + :type job_name: str + :param job_name: Name of the job. + + :type destination: :class:`gcloud.bigquery.table.Table` + :param destination: Table into which data is to be loaded. + + :type source_uris: sequence of string + :param source_uris: URIs of data files to be loaded; in format + ``gs:///``. + + :rtype: :class:`gcloud.bigquery.job.LoadTableFromStorageJob` + :returns: a new ``LoadTableFromStorageJob`` instance + """ + return LoadTableFromStorageJob(job_name, destination, source_uris, + client=self) + + def copy_table(self, job_name, destination, *sources): + """Construct a job for copying one or more tables into another table. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy + + :type job_name: str + :param job_name: Name of the job. + + :type destination: :class:`gcloud.bigquery.table.Table` + :param destination: Table into which data is to be copied. + + :type sources: sequence of :class:`gcloud.bigquery.table.Table` + :param sources: tables to be copied. + + :rtype: :class:`gcloud.bigquery.job.CopyJob` + :returns: a new ``CopyJob`` instance + """ + return CopyJob(job_name, destination, sources, client=self) + + def extract_table_to_storage(self, job_name, source, *destination_uris): + """Construct a job for extracting a table into Cloud Storage files. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract + + :type job_name: str + :param job_name: Name of the job. + + :type source: :class:`gcloud.bigquery.table.Table` + :param source: table to be extracted. + + :type destination_uris: sequence of string + :param destination_uris: URIs of CloudStorage file(s) into which + table data is to be extracted; in format + ``gs:///``. + + :rtype: :class:`gcloud.bigquery.job.ExtractTableToStorageJob` + :returns: a new ``ExtractTableToStorageJob`` instance + """ + return ExtractTableToStorageJob(job_name, source, destination_uris, + client=self) + + def run_async_query(self, job_name, query): + """Construct a job for running a SQL query asynchronously. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query + + :type job_name: str + :param job_name: Name of the job. + + :type query: str + :param query: SQL query to be executed + + :rtype: :class:`gcloud.bigquery.job.QueryJob` + :returns: a new ``QueryJob`` instance + """ + return QueryJob(job_name, query, client=self) + + def run_sync_query(self, query): + """Run a SQL query synchronously. + + :type query: str + :param query: SQL query to be executed + + :rtype: :class:`gcloud.bigquery.query.QueryResults` + :returns: a new ``QueryResults`` instance + """ + return QueryResults(query, client=self) diff --git a/env/Lib/site-packages/gcloud/bigquery/connection.py b/env/Lib/site-packages/gcloud/bigquery/connection.py new file mode 100644 index 0000000..6195c1a --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/connection.py @@ -0,0 +1,34 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud bigquery connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud BigQuery via the JSON REST API.""" + + API_BASE_URL = 'https://www.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v2' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/bigquery/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud BigQuery consumer.""" diff --git a/env/Lib/site-packages/gcloud/bigquery/dataset.py b/env/Lib/site-packages/gcloud/bigquery/dataset.py new file mode 100644 index 0000000..397dbe2 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/dataset.py @@ -0,0 +1,577 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Datasets.""" +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud.exceptions import NotFound +from gcloud.bigquery.table import Table + + +class AccessGrant(object): + """Represent grant of an access role to an entity. + + Every entry in the access list will have exactly one of + ``userByEmail``, ``groupByEmail``, ``domain``, ``specialGroup`` or + ``view`` set. And if anything but ``view`` is set, it'll also have a + ``role`` specified. ``role`` is omitted for a ``view``, since + ``view`` s are always read-only. + + See https://cloud.google.com/bigquery/docs/reference/v2/datasets. + + :type role: string + :param role: Role granted to the entity. One of + + * ``'OWNER'`` + * ``'WRITER'`` + * ``'READER'`` + + May also be ``None`` if the ``entity_type`` is ``view``. + + :type entity_type: string + :param entity_type: Type of entity being granted the role. One of + :attr:`ENTITY_TYPES`. + + :type entity_id: string + :param entity_id: ID of entity being granted the role. + + :raises: :class:`ValueError` if the ``entity_type`` is not among + :attr:`ENTITY_TYPES`, or if a ``view`` has ``role`` set or + a non ``view`` **does not** have a ``role`` set. + """ + + ENTITY_TYPES = frozenset(['userByEmail', 'groupByEmail', 'domain', + 'specialGroup', 'view']) + """Allowed entity types.""" + + def __init__(self, role, entity_type, entity_id): + if entity_type not in self.ENTITY_TYPES: + message = 'Entity type %r not among: %s' % ( + entity_type, ', '.join(self.ENTITY_TYPES)) + raise ValueError(message) + if entity_type == 'view': + if role is not None: + raise ValueError('Role must be None for a view. Received ' + 'role: %r' % (role,)) + else: + if role is None: + raise ValueError('Role must be set for entity ' + 'type %r' % (entity_type,)) + + self.role = role + self.entity_type = entity_type + self.entity_id = entity_id + + def __eq__(self, other): + return ( + self.role == other.role and + self.entity_type == other.entity_type and + self.entity_id == other.entity_id) + + def __repr__(self): + return '' % ( + self.role, self.entity_type, self.entity_id) + + +class Dataset(object): + """Datasets are containers for tables. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/datasets + + :type name: string + :param name: the name of the dataset + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + + :type access_grants: list of :class:`AccessGrant` + :param access_grants: roles granted to entities for this dataset + """ + + _access_grants = None + + def __init__(self, name, client, access_grants=()): + self.name = name + self._client = client + self._properties = {} + # Let the @property do validation. + self.access_grants = access_grants + + @property + def project(self): + """Project bound to the dataset. + + :rtype: string + :returns: the project (derived from the client). + """ + return self._client.project + + @property + def path(self): + """URL path for the dataset's APIs. + + :rtype: string + :returns: the path based on project and dataste name. + """ + return '/projects/%s/datasets/%s' % (self.project, self.name) + + @property + def access_grants(self): + """Dataset's access grants. + + :rtype: list of :class:`AccessGrant` + :returns: roles granted to entities for this dataset + """ + return list(self._access_grants) + + @access_grants.setter + def access_grants(self, value): + """Update dataset's access grants + + :type value: list of :class:`AccessGrant` + :param value: roles granted to entities for this dataset + + :raises: TypeError if 'value' is not a sequence, or ValueError if + any item in the sequence is not an AccessGrant + """ + if not all(isinstance(field, AccessGrant) for field in value): + raise ValueError('Values must be AccessGrant instances') + self._access_grants = tuple(value) + + @property + def created(self): + """Datetime at which the dataset was created. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the creation time (None until set from the server). + """ + creation_time = self._properties.get('creationTime') + if creation_time is not None: + # creation_time will be in milliseconds. + return _datetime_from_microseconds(1000.0 * creation_time) + + @property + def dataset_id(self): + """ID for the dataset resource. + + :rtype: string, or ``NoneType`` + :returns: the ID (None until set from the server). + """ + return self._properties.get('id') + + @property + def etag(self): + """ETag for the dataset resource. + + :rtype: string, or ``NoneType`` + :returns: the ETag (None until set from the server). + """ + return self._properties.get('etag') + + @property + def modified(self): + """Datetime at which the dataset was last modified. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the modification time (None until set from the server). + """ + modified_time = self._properties.get('lastModifiedTime') + if modified_time is not None: + # modified_time will be in milliseconds. + return _datetime_from_microseconds(1000.0 * modified_time) + + @property + def self_link(self): + """URL for the dataset resource. + + :rtype: string, or ``NoneType`` + :returns: the URL (None until set from the server). + """ + return self._properties.get('selfLink') + + @property + def default_table_expiration_ms(self): + """Default expiration time for tables in the dataset. + + :rtype: integer, or ``NoneType`` + :returns: The time in milliseconds, or None (the default). + """ + return self._properties.get('defaultTableExpirationMs') + + @default_table_expiration_ms.setter + def default_table_expiration_ms(self, value): + """Update default expiration time for tables in the dataset. + + :type value: integer, or ``NoneType`` + :param value: new default time, in milliseconds + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.integer_types) and value is not None: + raise ValueError("Pass an integer, or None") + self._properties['defaultTableExpirationMs'] = value + + @property + def description(self): + """Description of the dataset. + + :rtype: string, or ``NoneType`` + :returns: The description as set by the user, or None (the default). + """ + return self._properties.get('description') + + @description.setter + def description(self, value): + """Update description of the dataset. + + :type value: string, or ``NoneType`` + :param value: new description + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['description'] = value + + @property + def friendly_name(self): + """Title of the dataset. + + :rtype: string, or ``NoneType`` + :returns: The name as set by the user, or None (the default). + """ + return self._properties.get('friendlyName') + + @friendly_name.setter + def friendly_name(self, value): + """Update title of the dataset. + + :type value: string, or ``NoneType`` + :param value: new title + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['friendlyName'] = value + + @property + def location(self): + """Location in which the dataset is hosted. + + :rtype: string, or ``NoneType`` + :returns: The location as set by the user, or None (the default). + """ + return self._properties.get('location') + + @location.setter + def location(self, value): + """Update location in which the dataset is hosted. + + :type value: string, or ``NoneType`` + :param value: new location + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['location'] = value + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a dataset given its API representation + + :type resource: dict + :param resource: dataset resource representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.dataset.Dataset` + :returns: Dataset parsed from ``resource``. + """ + if ('datasetReference' not in resource or + 'datasetId' not in resource['datasetReference']): + raise KeyError('Resource lacks required identity information:' + '["datasetReference"]["datasetId"]') + name = resource['datasetReference']['datasetId'] + dataset = cls(name, client=client) + dataset._set_properties(resource) + return dataset + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: :class:`gcloud.bigquery.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + @staticmethod + def _parse_access_grants(access): + """Parse a resource fragment into a set of access grants. + + ``role`` augments the entity type and present **unless** the entity + type is ``view``. + + :type access: list of mappings + :param access: each mapping represents a single access grant + + :rtype: list of :class:`AccessGrant` + :returns: a list of parsed grants + :raises: :class:`ValueError` if a grant in ``access`` has more keys + than ``role`` and one additional key. + """ + result = [] + for grant in access: + grant = grant.copy() + role = grant.pop('role', None) + entity_type, entity_id = grant.popitem() + if len(grant) != 0: + raise ValueError('Grant has unexpected keys remaining.', grant) + result.append( + AccessGrant(role, entity_type, entity_id)) + return result + + def _set_properties(self, api_response): + """Update properties from resource in body of ``api_response`` + + :type api_response: httplib2.Response + :param api_response: response returned from an API call + """ + self._properties.clear() + cleaned = api_response.copy() + access = cleaned.pop('access', ()) + self.access_grants = self._parse_access_grants(access) + if 'creationTime' in cleaned: + cleaned['creationTime'] = float(cleaned['creationTime']) + if 'lastModifiedTime' in cleaned: + cleaned['lastModifiedTime'] = float(cleaned['lastModifiedTime']) + if 'defaultTableExpirationMs' in cleaned: + cleaned['defaultTableExpirationMs'] = int( + cleaned['defaultTableExpirationMs']) + self._properties.update(cleaned) + + def _build_access_resource(self): + """Generate a resource fragment for dataset's access grants.""" + result = [] + for grant in self.access_grants: + info = {grant.entity_type: grant.entity_id} + if grant.role is not None: + info['role'] = grant.role + result.append(info) + return result + + def _build_resource(self): + """Generate a resource for ``create`` or ``update``.""" + resource = { + 'datasetReference': { + 'projectId': self.project, 'datasetId': self.name}, + } + if self.default_table_expiration_ms is not None: + value = self.default_table_expiration_ms + resource['defaultTableExpirationMs'] = value + + if self.description is not None: + resource['description'] = self.description + + if self.friendly_name is not None: + resource['friendlyName'] = self.friendly_name + + if self.location is not None: + resource['location'] = self.location + + if len(self.access_grants) > 0: + resource['access'] = self._build_access_resource() + + return resource + + def create(self, client=None): + """API call: create the dataset via a PUT request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/insert + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + path = '/projects/%s/datasets' % (self.project,) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def exists(self, client=None): + """API call: test for the existence of the dataset via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/datasets/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path, + query_params={'fields': 'id'}) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: refresh dataset properties via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/datasets/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='GET', path=self.path) + self._set_properties(api_response) + + def patch(self, client=None, **kw): + """API call: update individual dataset properties via a PATCH request + + See + https://cloud.google.com/bigquery/docs/reference/v2/datasets/patch + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :type kw: ``dict`` + :param kw: properties to be patched. + + :raises: ValueError for invalid value types. + """ + client = self._require_client(client) + + partial = {} + + if 'default_table_expiration_ms' in kw: + value = kw['default_table_expiration_ms'] + if not isinstance(value, six.integer_types) and value is not None: + raise ValueError("Pass an integer, or None") + partial['defaultTableExpirationMs'] = value + + if 'description' in kw: + partial['description'] = kw['description'] + + if 'friendly_name' in kw: + partial['friendlyName'] = kw['friendly_name'] + + if 'location' in kw: + partial['location'] = kw['location'] + + api_response = client.connection.api_request( + method='PATCH', path=self.path, data=partial) + self._set_properties(api_response) + + def update(self, client=None): + """API call: update dataset properties via a PUT request + + See + https://cloud.google.com/bigquery/docs/reference/v2/datasets/update + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + api_response = client.connection.api_request( + method='PUT', path=self.path, data=self._build_resource()) + self._set_properties(api_response) + + def delete(self, client=None): + """API call: delete the dataset via a DELETE request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/delete + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) + + def list_tables(self, max_results=None, page_token=None): + """List tables for the project associated with this client. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/list + + :type max_results: int + :param max_results: maximum number of tables to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of datasets. If + not passed, the API will return the first page of + datasets. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.bigquery.table.Table`, plus a + "next page token" string: if not ``None``, indicates that + more tables can be retrieved with another call (pass that + value as ``page_token``). + """ + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/datasets/%s/tables' % (self.project, self.name) + connection = self._client.connection + resp = connection.api_request(method='GET', path=path, + query_params=params) + tables = [Table.from_api_repr(resource, self) + for resource in resp.get('tables', ())] + return tables, resp.get('nextPageToken') + + def table(self, name, schema=()): + """Construct a table bound to this dataset. + + :type name: string + :param name: Name of the table. + + :type schema: list of :class:`gcloud.bigquery.table.SchemaField` + :param schema: The table's schema + + :rtype: :class:`gcloud.bigquery.table.Table` + :returns: a new ``Table`` instance + """ + return Table(name, dataset=self, schema=schema) diff --git a/env/Lib/site-packages/gcloud/bigquery/job.py b/env/Lib/site-packages/gcloud/bigquery/job.py new file mode 100644 index 0000000..db62a7a --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/job.py @@ -0,0 +1,1028 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Jobs.""" + +import six + +from gcloud.exceptions import NotFound +from gcloud._helpers import _datetime_from_microseconds +from gcloud.bigquery.dataset import Dataset +from gcloud.bigquery.table import SchemaField +from gcloud.bigquery.table import Table +from gcloud.bigquery.table import _build_schema_resource +from gcloud.bigquery.table import _parse_schema_resource +from gcloud.bigquery._helpers import _EnumProperty +from gcloud.bigquery._helpers import _TypedProperty + + +class Compression(_EnumProperty): + """Pseudo-enum for ``compression`` properties.""" + GZIP = 'GZIP' + NONE = 'NONE' + ALLOWED = (GZIP, NONE) + + +class CreateDisposition(_EnumProperty): + """Pseudo-enum for ``create_disposition`` properties.""" + CREATE_IF_NEEDED = 'CREATE_IF_NEEDED' + CREATE_NEVER = 'CREATE_NEVER' + ALLOWED = (CREATE_IF_NEEDED, CREATE_NEVER) + + +class DestinationFormat(_EnumProperty): + """Pseudo-enum for ``destination_format`` properties.""" + CSV = 'CSV' + NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + AVRO = 'AVRO' + ALLOWED = (CSV, NEWLINE_DELIMITED_JSON, AVRO) + + +class Encoding(_EnumProperty): + """Pseudo-enum for ``encoding`` properties.""" + UTF_8 = 'UTF-8' + ISO_8559_1 = 'ISO-8559-1' + ALLOWED = (UTF_8, ISO_8559_1) + + +class QueryPriority(_EnumProperty): + """Pseudo-enum for ``QueryJob.priority`` property.""" + INTERACTIVE = 'INTERACTIVE' + BATCH = 'BATCH' + ALLOWED = (INTERACTIVE, BATCH) + + +class SourceFormat(_EnumProperty): + """Pseudo-enum for ``source_format`` properties.""" + CSV = 'CSV' + DATASTORE_BACKUP = 'DATASTORE_BACKUP' + NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + ALLOWED = (CSV, DATASTORE_BACKUP, NEWLINE_DELIMITED_JSON) + + +class WriteDisposition(_EnumProperty): + """Pseudo-enum for ``write_disposition`` properties.""" + WRITE_APPEND = 'WRITE_APPEND' + WRITE_TRUNCATE = 'WRITE_TRUNCATE' + WRITE_EMPTY = 'WRITE_EMPTY' + ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) + + +class _BaseJob(object): + """Base class for jobs. + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + def __init__(self, client): + self._client = client + self._properties = {} + + @property + def project(self): + """Project bound to the job. + + :rtype: string + :returns: the project (derived from the client). + """ + return self._client.project + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: :class:`gcloud.bigquery.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + +class _AsyncJob(_BaseJob): + """Base class for asynchronous jobs. + + :type name: string + :param name: the name of the job + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + def __init__(self, name, client): + super(_AsyncJob, self).__init__(client) + self.name = name + + @property + def job_type(self): + """Type of job + + :rtype: string + :returns: one of 'load', 'copy', 'extract', 'query' + """ + return self._JOB_TYPE + + @property + def path(self): + """URL path for the job's APIs. + + :rtype: string + :returns: the path based on project and job name. + """ + return '/projects/%s/jobs/%s' % (self.project, self.name) + + @property + def etag(self): + """ETag for the job resource. + + :rtype: string, or ``NoneType`` + :returns: the ETag (None until set from the server). + """ + return self._properties.get('etag') + + @property + def self_link(self): + """URL for the job resource. + + :rtype: string, or ``NoneType`` + :returns: the URL (None until set from the server). + """ + return self._properties.get('selfLink') + + @property + def user_email(self): + """E-mail address of user who submitted the job. + + :rtype: string, or ``NoneType`` + :returns: the URL (None until set from the server). + """ + return self._properties.get('user_email') + + @property + def created(self): + """Datetime at which the job was created. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the creation time (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + millis = statistics.get('creationTime') + if millis is not None: + return _datetime_from_microseconds(millis * 1000.0) + + @property + def started(self): + """Datetime at which the job was started. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the start time (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + millis = statistics.get('startTime') + if millis is not None: + return _datetime_from_microseconds(millis * 1000.0) + + @property + def ended(self): + """Datetime at which the job finished. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the end time (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + millis = statistics.get('endTime') + if millis is not None: + return _datetime_from_microseconds(millis * 1000.0) + + @property + def error_result(self): + """Error information about the job as a whole. + + :rtype: mapping, or ``NoneType`` + :returns: the error information (None until set from the server). + """ + status = self._properties.get('status') + if status is not None: + return status.get('errorResult') + + @property + def errors(self): + """Information about individual errors generated by the job. + + :rtype: list of mappings, or ``NoneType`` + :returns: the error information (None until set from the server). + """ + status = self._properties.get('status') + if status is not None: + return status.get('errors') + + @property + def state(self): + """Status of the job. + + :rtype: string, or ``NoneType`` + :returns: the state (None until set from the server). + """ + status = self._properties.get('status') + if status is not None: + return status.get('state') + + def _scrub_local_properties(self, cleaned): + """Helper: handle subclass properties in cleaned.""" + pass + + def _set_properties(self, api_response): + """Update properties from resource in body of ``api_response`` + + :type api_response: httplib2.Response + :param api_response: response returned from an API call + """ + cleaned = api_response.copy() + self._scrub_local_properties(cleaned) + + statistics = cleaned.get('statistics', {}) + if 'creationTime' in statistics: + statistics['creationTime'] = float(statistics['creationTime']) + if 'startTime' in statistics: + statistics['startTime'] = float(statistics['startTime']) + if 'endTime' in statistics: + statistics['endTime'] = float(statistics['endTime']) + + self._properties.clear() + self._properties.update(cleaned) + + @classmethod + def _get_resource_config(cls, resource): + """Helper for :meth:`from_api_repr` + + :type resource: dict + :param resource: resource for the job + + :rtype: dict + :returns: tuple (string, dict), where the first element is the + job name and the second contains job-specific configuration. + :raises: :class:`KeyError` if the resource has no identifier, or + is missing the appropriate configuration. + """ + if ('jobReference' not in resource or + 'jobId' not in resource['jobReference']): + raise KeyError('Resource lacks required identity information: ' + '["jobReference"]["jobId"]') + name = resource['jobReference']['jobId'] + if ('configuration' not in resource or + cls._JOB_TYPE not in resource['configuration']): + raise KeyError('Resource lacks required configuration: ' + '["configuration"]["%s"]' % cls._JOB_TYPE) + config = resource['configuration'][cls._JOB_TYPE] + return name, config + + def begin(self, client=None): + """API call: begin the job via a POST request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + path = '/projects/%s/jobs' % (self.project,) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def exists(self, client=None): + """API call: test for the existence of the job via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/jobs/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path, + query_params={'fields': 'id'}) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: refresh job properties via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/jobs/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='GET', path=self.path) + self._set_properties(api_response) + + def cancel(self, client=None): + """API call: cancel job via a POST request + + See + https://cloud.google.com/bigquery/docs/reference/v2/jobs/cancel + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='POST', path='%s/cancel' % (self.path,)) + self._set_properties(api_response) + + +class _LoadConfiguration(object): + """User-settable configuration options for load jobs. + + Values which are ``None`` -> server defaults. + """ + _allow_jagged_rows = None + _allow_quoted_newlines = None + _create_disposition = None + _encoding = None + _field_delimiter = None + _ignore_unknown_values = None + _max_bad_records = None + _quote_character = None + _skip_leading_rows = None + _source_format = None + _write_disposition = None + + +class LoadTableFromStorageJob(_AsyncJob): + """Asynchronous job for loading data into a table from CloudStorage. + + :type name: string + :param name: the name of the job + + :type destination: :class:`gcloud.bigquery.table.Table` + :param destination: Table into which data is to be loaded. + + :type source_uris: sequence of string + :param source_uris: URIs of one or more data files to be loaded, in + format ``gs:///``. + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + + :type schema: list of :class:`gcloud.bigquery.table.SchemaField` + :param schema: The job's schema + """ + + _schema = None + _JOB_TYPE = 'load' + + def __init__(self, name, destination, source_uris, client, schema=()): + super(LoadTableFromStorageJob, self).__init__(name, client) + self.destination = destination + self.source_uris = source_uris + # Let the @property do validation. + self.schema = schema + self._configuration = _LoadConfiguration() + + @property + def schema(self): + """Table's schema. + + :rtype: list of :class:`SchemaField` + :returns: fields describing the schema + """ + return list(self._schema) + + @schema.setter + def schema(self, value): + """Update table's schema + + :type value: list of :class:`SchemaField` + :param value: fields describing the schema + + :raises: TypeError if 'value' is not a sequence, or ValueError if + any item in the sequence is not a SchemaField + """ + if not all(isinstance(field, SchemaField) for field in value): + raise ValueError('Schema items must be fields') + self._schema = tuple(value) + + @property + def input_file_bytes(self): + """Count of bytes loaded from source files. + + :rtype: integer, or ``NoneType`` + :returns: the count (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + return int(statistics['load']['inputFileBytes']) + + @property + def input_files(self): + """Count of source files. + + :rtype: integer, or ``NoneType`` + :returns: the count (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + return int(statistics['load']['inputFiles']) + + @property + def output_bytes(self): + """Count of bytes saved to destination table. + + :rtype: integer, or ``NoneType`` + :returns: the count (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + return int(statistics['load']['outputBytes']) + + @property + def output_rows(self): + """Count of rows saved to destination table. + + :rtype: integer, or ``NoneType`` + :returns: the count (None until set from the server). + """ + statistics = self._properties.get('statistics') + if statistics is not None: + return int(statistics['load']['outputRows']) + + allow_jagged_rows = _TypedProperty('allow_jagged_rows', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowJaggedRows + """ + + allow_quoted_newlines = _TypedProperty('allow_quoted_newlines', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowQuotedNewlines + """ + + create_disposition = CreateDisposition('create_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.createDisposition + """ + + encoding = Encoding('encoding') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.encoding + """ + + field_delimiter = _TypedProperty('field_delimiter', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.fieldDelimiter + """ + + ignore_unknown_values = _TypedProperty('ignore_unknown_values', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.ignoreUnknownValues + """ + + max_bad_records = _TypedProperty('max_bad_records', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.maxBadRecords + """ + + quote_character = _TypedProperty('quote_character', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.quote + """ + + skip_leading_rows = _TypedProperty('skip_leading_rows', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.skipLeadingRows + """ + + source_format = SourceFormat('source_format') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceFormat + """ + + write_disposition = WriteDisposition('write_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.writeDisposition + """ + + def _populate_config_resource(self, configuration): + """Helper for _build_resource: copy config properties to resource""" + if self.allow_jagged_rows is not None: + configuration['allowJaggedRows'] = self.allow_jagged_rows + if self.allow_quoted_newlines is not None: + configuration['allowQuotedNewlines'] = self.allow_quoted_newlines + if self.create_disposition is not None: + configuration['createDisposition'] = self.create_disposition + if self.encoding is not None: + configuration['encoding'] = self.encoding + if self.field_delimiter is not None: + configuration['fieldDelimiter'] = self.field_delimiter + if self.ignore_unknown_values is not None: + configuration['ignoreUnknownValues'] = self.ignore_unknown_values + if self.max_bad_records is not None: + configuration['maxBadRecords'] = self.max_bad_records + if self.quote_character is not None: + configuration['quote'] = self.quote_character + if self.skip_leading_rows is not None: + configuration['skipLeadingRows'] = self.skip_leading_rows + if self.source_format is not None: + configuration['sourceFormat'] = self.source_format + if self.write_disposition is not None: + configuration['writeDisposition'] = self.write_disposition + + def _build_resource(self): + """Generate a resource for :meth:`begin`.""" + resource = { + 'jobReference': { + 'projectId': self.project, + 'jobId': self.name, + }, + 'configuration': { + self._JOB_TYPE: { + 'sourceUris': self.source_uris, + 'destinationTable': { + 'projectId': self.destination.project, + 'datasetId': self.destination.dataset_name, + 'tableId': self.destination.name, + }, + }, + }, + } + configuration = resource['configuration'][self._JOB_TYPE] + self._populate_config_resource(configuration) + + if len(self.schema) > 0: + configuration['schema'] = { + 'fields': _build_schema_resource(self.schema)} + + return resource + + def _scrub_local_properties(self, cleaned): + """Helper: handle subclass properties in cleaned.""" + schema = cleaned.pop('schema', {'fields': ()}) + self.schema = _parse_schema_resource(schema) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a job given its API representation + + .. note: + + This method assumes that the project found in the resource matches + the client's project. + + :type resource: dict + :param resource: dataset job representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.job.LoadTableFromStorageJob` + :returns: Job parsed from ``resource``. + """ + name, config = cls._get_resource_config(resource) + dest_config = config['destinationTable'] + dataset = Dataset(dest_config['datasetId'], client) + destination = Table(dest_config['tableId'], dataset) + source_urls = config.get('sourceUris', ()) + job = cls(name, destination, source_urls, client=client) + job._set_properties(resource) + return job + + +class _CopyConfiguration(object): + """User-settable configuration options for copy jobs. + + Values which are ``None`` -> server defaults. + """ + _create_disposition = None + _write_disposition = None + + +class CopyJob(_AsyncJob): + """Asynchronous job: copy data into a table from other tables. + + :type name: string + :param name: the name of the job + + :type destination: :class:`gcloud.bigquery.table.Table` + :param destination: Table into which data is to be loaded. + + :type sources: list of :class:`gcloud.bigquery.table.Table` + :param sources: Table into which data is to be loaded. + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + + _JOB_TYPE = 'copy' + + def __init__(self, name, destination, sources, client): + super(CopyJob, self).__init__(name, client) + self.destination = destination + self.sources = sources + self._configuration = _CopyConfiguration() + + create_disposition = CreateDisposition('create_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition + """ + + write_disposition = WriteDisposition('write_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition + """ + + def _populate_config_resource(self, configuration): + """Helper for _build_resource: copy config properties to resource""" + if self.create_disposition is not None: + configuration['createDisposition'] = self.create_disposition + if self.write_disposition is not None: + configuration['writeDisposition'] = self.write_disposition + + def _build_resource(self): + """Generate a resource for :meth:`begin`.""" + + source_refs = [{ + 'projectId': table.project, + 'datasetId': table.dataset_name, + 'tableId': table.name, + } for table in self.sources] + + resource = { + 'jobReference': { + 'projectId': self.project, + 'jobId': self.name, + }, + 'configuration': { + self._JOB_TYPE: { + 'sourceTables': source_refs, + 'destinationTable': { + 'projectId': self.destination.project, + 'datasetId': self.destination.dataset_name, + 'tableId': self.destination.name, + }, + }, + }, + } + configuration = resource['configuration'][self._JOB_TYPE] + self._populate_config_resource(configuration) + + return resource + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a job given its API representation + + .. note: + + This method assumes that the project found in the resource matches + the client's project. + + :type resource: dict + :param resource: dataset job representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.job.CopyJob` + :returns: Job parsed from ``resource``. + """ + name, config = cls._get_resource_config(resource) + dest_config = config['destinationTable'] + dataset = Dataset(dest_config['datasetId'], client) + destination = Table(dest_config['tableId'], dataset) + sources = [] + for source_config in config['sourceTables']: + dataset = Dataset(source_config['datasetId'], client) + sources.append(Table(source_config['tableId'], dataset)) + job = cls(name, destination, sources, client=client) + job._set_properties(resource) + return job + + +class _ExtractConfiguration(object): + """User-settable configuration options for extract jobs. + + Values which are ``None`` -> server defaults. + """ + _compression = None + _destination_format = None + _field_delimiter = None + _print_header = None + + +class ExtractTableToStorageJob(_AsyncJob): + """Asynchronous job: extract data from a table into Cloud Storage. + + :type name: string + :param name: the name of the job + + :type source: :class:`gcloud.bigquery.table.Table` + :param source: Table into which data is to be loaded. + + :type destination_uris: list of string + :param destination_uris: URIs describing Cloud Storage blobs into which + extracted data will be written, in format + ``gs:///``. + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + _JOB_TYPE = 'extract' + + def __init__(self, name, source, destination_uris, client): + super(ExtractTableToStorageJob, self).__init__(name, client) + self.source = source + self.destination_uris = destination_uris + self._configuration = _ExtractConfiguration() + + compression = Compression('compression') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.compression + """ + + destination_format = DestinationFormat('destination_format') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.destinationFormat + """ + + field_delimiter = _TypedProperty('field_delimiter', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.fieldDelimiter + """ + + print_header = _TypedProperty('print_header', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.printHeader + """ + + def _populate_config_resource(self, configuration): + """Helper for _build_resource: copy config properties to resource""" + if self.compression is not None: + configuration['compression'] = self.compression + if self.destination_format is not None: + configuration['destinationFormat'] = self.destination_format + if self.field_delimiter is not None: + configuration['fieldDelimiter'] = self.field_delimiter + if self.print_header is not None: + configuration['printHeader'] = self.print_header + + def _build_resource(self): + """Generate a resource for :meth:`begin`.""" + + source_ref = { + 'projectId': self.source.project, + 'datasetId': self.source.dataset_name, + 'tableId': self.source.name, + } + + resource = { + 'jobReference': { + 'projectId': self.project, + 'jobId': self.name, + }, + 'configuration': { + self._JOB_TYPE: { + 'sourceTable': source_ref, + 'destinationUris': self.destination_uris, + }, + }, + } + configuration = resource['configuration'][self._JOB_TYPE] + self._populate_config_resource(configuration) + + return resource + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a job given its API representation + + .. note: + + This method assumes that the project found in the resource matches + the client's project. + + :type resource: dict + :param resource: dataset job representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.job.ExtractTableToStorageJob` + :returns: Job parsed from ``resource``. + """ + name, config = cls._get_resource_config(resource) + source_config = config['sourceTable'] + dataset = Dataset(source_config['datasetId'], client) + source = Table(source_config['tableId'], dataset) + destination_uris = config['destinationUris'] + job = cls(name, source, destination_uris, client=client) + job._set_properties(resource) + return job + + +class _AsyncQueryConfiguration(object): + """User-settable configuration options for asynchronous query jobs. + + Values which are ``None`` -> server defaults. + """ + _allow_large_results = None + _create_disposition = None + _default_dataset = None + _destination = None + _flatten_results = None + _priority = None + _use_query_cache = None + _write_disposition = None + + +class QueryJob(_AsyncJob): + """Asynchronous job: query tables. + + :type name: string + :param name: the name of the job + + :type query: string + :param query: SQL query string + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + _JOB_TYPE = 'query' + + def __init__(self, name, query, client): + super(QueryJob, self).__init__(name, client) + self.query = query + self._configuration = _AsyncQueryConfiguration() + + allow_large_results = _TypedProperty('allow_large_results', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.allowLargeResults + """ + + create_disposition = CreateDisposition('create_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.createDisposition + """ + + default_dataset = _TypedProperty('default_dataset', Dataset) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.defaultDataset + """ + + destination = _TypedProperty('destination', Table) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.destinationTable + """ + + flatten_results = _TypedProperty('flatten_results', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.flattenResults + """ + + priority = QueryPriority('priority') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.priority + """ + + use_query_cache = _TypedProperty('use_query_cache', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.useQueryCache + """ + + write_disposition = WriteDisposition('write_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.writeDisposition + """ + + def _destination_table_resource(self): + """Create a JSON resource for the destination table. + + Helper for :meth:`_populate_config_resource` and + :meth:`_scrub_local_properties` + """ + if self.destination is not None: + return { + 'projectId': self.destination.project, + 'datasetId': self.destination.dataset_name, + 'tableId': self.destination.name, + } + + def _populate_config_resource(self, configuration): + """Helper for _build_resource: copy config properties to resource""" + if self.allow_large_results is not None: + configuration['allowLargeResults'] = self.allow_large_results + if self.create_disposition is not None: + configuration['createDisposition'] = self.create_disposition + if self.default_dataset is not None: + configuration['defaultDataset'] = { + 'projectId': self.default_dataset.project, + 'datasetId': self.default_dataset.name, + } + if self.destination is not None: + table_res = self._destination_table_resource() + configuration['destinationTable'] = table_res + if self.flatten_results is not None: + configuration['flattenResults'] = self.flatten_results + if self.priority is not None: + configuration['priority'] = self.priority + if self.use_query_cache is not None: + configuration['useQueryCache'] = self.use_query_cache + if self.write_disposition is not None: + configuration['writeDisposition'] = self.write_disposition + + def _build_resource(self): + """Generate a resource for :meth:`begin`.""" + + resource = { + 'jobReference': { + 'projectId': self.project, + 'jobId': self.name, + }, + 'configuration': { + self._JOB_TYPE: { + 'query': self.query, + }, + }, + } + configuration = resource['configuration'][self._JOB_TYPE] + self._populate_config_resource(configuration) + + return resource + + def _scrub_local_properties(self, cleaned): + """Helper: handle subclass properties in cleaned. + + .. note: + + This method assumes that the project found in the resource matches + the client's project. + """ + configuration = cleaned['configuration']['query'] + dest_remote = configuration.get('destinationTable') + + if dest_remote is None: + if self.destination is not None: + del self.destination + else: + dest_local = self._destination_table_resource() + if dest_remote != dest_local: + dataset = self._client.dataset(dest_remote['datasetId']) + self.destination = dataset.table(dest_remote['tableId']) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a job given its API representation + + :type resource: dict + :param resource: dataset job representation returned from the API + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: Client which holds credentials and project + configuration for the dataset. + + :rtype: :class:`gcloud.bigquery.job.RunAsyncQueryJob` + :returns: Job parsed from ``resource``. + """ + name, config = cls._get_resource_config(resource) + query = config['query'] + job = cls(name, query, client=client) + job._set_properties(resource) + return job diff --git a/env/Lib/site-packages/gcloud/bigquery/query.py b/env/Lib/site-packages/gcloud/bigquery/query.py new file mode 100644 index 0000000..b54d7fc --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/query.py @@ -0,0 +1,349 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Queries.""" + +import six + +from gcloud.bigquery._helpers import _TypedProperty +from gcloud.bigquery._helpers import _rows_from_json +from gcloud.bigquery.dataset import Dataset +from gcloud.bigquery.job import QueryJob +from gcloud.bigquery.table import _parse_schema_resource + + +class _SyncQueryConfiguration(object): + """User-settable configuration options for synchronous query jobs. + + Values which are ``None`` -> server defaults. + """ + _default_dataset = None + _dry_run = None + _max_results = None + _timeout_ms = None + _preserve_nulls = None + _use_query_cache = None + + +class QueryResults(object): + """Synchronous job: query tables. + + :type query: string + :param query: SQL query string + + :type client: :class:`gcloud.bigquery.client.Client` + :param client: A client which holds credentials and project configuration + for the dataset (which requires a project). + """ + def __init__(self, query, client): + self._client = client + self._properties = {} + self.query = query + self._configuration = _SyncQueryConfiguration() + self._job = None + + @property + def project(self): + """Project bound to the job. + + :rtype: string + :returns: the project (derived from the client). + """ + return self._client.project + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: :class:`gcloud.bigquery.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + @property + def cache_hit(self): + """Query results served from cache. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#cacheHit + + :rtype: boolean or ``NoneType`` + :returns: True if the query results were served from cache (None + until set by the server). + """ + return self._properties.get('cacheHit') + + @property + def complete(self): + """Server completed query. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobComplete + + :rtype: boolean or ``NoneType`` + :returns: True if the query completed on the server (None + until set by the server). + """ + return self._properties.get('jobComplete') + + @property + def errors(self): + """Errors generated by the query. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#errors + + :rtype: list of mapping, or ``NoneType`` + :returns: Mappings describing errors generated on the server (None + until set by the server). + """ + return self._properties.get('errors') + + @property + def name(self): + """Job name, generated by the back-end. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobReference + + :rtype: list of mapping, or ``NoneType`` + :returns: Mappings describing errors generated on the server (None + until set by the server). + """ + return self._properties.get('jobReference', {}).get('jobId') + + @property + def job(self): + """Job instance used to run the query. + + :rtype: :class:`gcloud.bigquery.job.QueryJob`, or ``NoneType`` + :returns: Job instance used to run the query (None until + ``jobReference`` property is set by the server). + """ + if self._job is None: + job_ref = self._properties.get('jobReference') + if job_ref is not None: + self._job = QueryJob(job_ref['jobId'], self.query, + self._client) + return self._job + + @property + def page_token(self): + """Token for fetching next bach of results. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#pageToken + + :rtype: string, or ``NoneType`` + :returns: Token generated on the server (None until set by the server). + """ + return self._properties.get('pageToken') + + @property + def total_rows(self): + """Total number of rows returned by the query + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalRows + + :rtype: integer, or ``NoneType`` + :returns: Count generated on the server (None until set by the server). + """ + return self._properties.get('totalRows') + + @property + def total_bytes_processed(self): + """Total number of bytes processed by the query + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalBytesProcessed + + :rtype: integer, or ``NoneType`` + :returns: Count generated on the server (None until set by the server). + """ + return self._properties.get('totalBytesProcessed') + + @property + def rows(self): + """Query results. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#rows + + :rtype: list of tuples of row values, or ``NoneType`` + :returns: fields describing the schema (None until set by the server). + """ + return _rows_from_json(self._properties.get('rows', ()), self.schema) + + @property + def schema(self): + """Schema for query results. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#schema + + :rtype: list of :class:`SchemaField`, or ``NoneType`` + :returns: fields describing the schema (None until set by the server). + """ + return _parse_schema_resource(self._properties.get('schema', {})) + + default_dataset = _TypedProperty('default_dataset', Dataset) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#defaultDataset + """ + + dry_run = _TypedProperty('dry_run', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#dryRun + """ + + max_results = _TypedProperty('max_results', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#maxResults + """ + + preserve_nulls = _TypedProperty('preserve_nulls', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#preserveNulls + """ + + timeout_ms = _TypedProperty('timeout_ms', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#timeoutMs + """ + + use_query_cache = _TypedProperty('use_query_cache', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#useQueryCache + """ + + def _set_properties(self, api_response): + """Update properties from resource in body of ``api_response`` + + :type api_response: httplib2.Response + :param api_response: response returned from an API call + """ + self._properties.clear() + self._properties.update(api_response) + + def _build_resource(self): + """Generate a resource for :meth:`begin`.""" + resource = {'query': self.query} + + if self.default_dataset is not None: + resource['defaultDataset'] = { + 'projectId': self.project, + 'datasetId': self.default_dataset.name, + } + + if self.max_results is not None: + resource['maxResults'] = self.max_results + + if self.preserve_nulls is not None: + resource['preserveNulls'] = self.preserve_nulls + + if self.timeout_ms is not None: + resource['timeoutMs'] = self.timeout_ms + + if self.use_query_cache is not None: + resource['useQueryCache'] = self.use_query_cache + + if self.dry_run is not None: + resource['dryRun'] = self.dry_run + + return resource + + def run(self, client=None): + """API call: run the query via a POST request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/query + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + path = '/projects/%s/queries' % (self.project,) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def fetch_data(self, max_results=None, page_token=None, start_index=None, + timeout_ms=None, client=None): + """API call: fetch a page of query result data via a GET request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults + + :type max_results: integer or ``NoneType`` + :param max_results: maximum number of rows to return. + + :type page_token: string or ``NoneType`` + :param page_token: token representing a cursor into the table's rows. + + :type start_index: integer or ``NoneType`` + :param start_index: zero-based index of starting row + + :type timeout_ms: integer or ``NoneType`` + :param timeout_ms: timeout, in milliseconds, to wait for query to + complete + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: tuple + :returns: ``(row_data, total_rows, page_token)``, where ``row_data`` + is a list of tuples, one per result row, containing only + the values; ``total_rows`` is a count of the total number + of rows in the table; and ``page_token`` is an opaque + string which can be used to fetch the next batch of rows + (``None`` if no further batches can be fetched). + :raises: ValueError if the query has not yet been executed. + """ + if self.name is None: + raise ValueError("Query not yet executed: call 'run()'") + + client = self._require_client(client) + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + if start_index is not None: + params['startIndex'] = start_index + + if timeout_ms is not None: + params['timeoutMs'] = timeout_ms + + path = '/projects/%s/queries/%s' % (self.project, self.name) + response = client.connection.api_request(method='GET', + path=path, + query_params=params) + self._set_properties(response) + + total_rows = response.get('totalRows') + page_token = response.get('pageToken') + rows_data = _rows_from_json(response.get('rows', ()), self.schema) + + return rows_data, total_rows, page_token diff --git a/env/Lib/site-packages/gcloud/bigquery/table.py b/env/Lib/site-packages/gcloud/bigquery/table.py new file mode 100644 index 0000000..7bd7f81 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/table.py @@ -0,0 +1,1020 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Datasets.""" + +import datetime +import json +import os + +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud._helpers import _microseconds_from_datetime +from gcloud._helpers import _millis_from_datetime +from gcloud.exceptions import NotFound +from gcloud.streaming.http_wrapper import Request +from gcloud.streaming.http_wrapper import make_api_request +from gcloud.streaming.transfer import RESUMABLE_UPLOAD +from gcloud.streaming.transfer import Upload +from gcloud.bigquery._helpers import _rows_from_json + + +_MARKER = object() + + +class SchemaField(object): + """Describe a single field within a table schema. + + :type name: str + :param name: the name of the field + + :type field_type: str + :param field_type: the type of the field (one of 'STRING', 'INTEGER', + 'FLOAT', 'BOOLEAN', 'TIMESTAMP' or 'RECORD') + + :type mode: str + :param mode: the type of the field (one of 'NULLABLE', 'REQUIRED', + or 'REPEATED') + + :type description: str + :param description: optional description for the field + + :type fields: list of :class:`SchemaField`, or None + :param fields: subfields (requires ``field_type`` of 'RECORD'). + """ + def __init__(self, name, field_type, mode='NULLABLE', description=None, + fields=None): + self.name = name + self.field_type = field_type + self.mode = mode + self.description = description + self.fields = fields + + def __eq__(self, other): + return ( + self.name == other.name and + self.field_type.lower() == other.field_type.lower() and + self.mode == other.mode and + self.description == other.description and + self.fields == other.fields) + + +class Table(object): + """Tables represent a set of rows whose values correspond to a schema. + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables + + :type name: str + :param name: the name of the table + + :type dataset: :class:`gcloud.bigquery.dataset.Dataset` + :param dataset: The dataset which contains the table. + + :type schema: list of :class:`SchemaField` + :param schema: The table's schema + """ + + _schema = None + + def __init__(self, name, dataset, schema=()): + self.name = name + self._dataset = dataset + self._properties = {} + # Let the @property do validation. + self.schema = schema + + @property + def project(self): + """Project bound to the table. + + :rtype: str + :returns: the project (derived from the dataset). + """ + return self._dataset.project + + @property + def dataset_name(self): + """Name of dataset containing the table. + + :rtype: str + :returns: the ID (derived from the dataset). + """ + return self._dataset.name + + @property + def path(self): + """URL path for the table's APIs. + + :rtype: str + :returns: the path based on project and dataste name. + """ + return '%s/tables/%s' % (self._dataset.path, self.name) + + @property + def schema(self): + """Table's schema. + + :rtype: list of :class:`SchemaField` + :returns: fields describing the schema + """ + return list(self._schema) + + @schema.setter + def schema(self, value): + """Update table's schema + + :type value: list of :class:`SchemaField` + :param value: fields describing the schema + + :raises: TypeError if 'value' is not a sequence, or ValueError if + any item in the sequence is not a SchemaField + """ + if not all(isinstance(field, SchemaField) for field in value): + raise ValueError('Schema items must be fields') + self._schema = tuple(value) + + @property + def created(self): + """Datetime at which the table was created. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the creation time (None until set from the server). + """ + creation_time = self._properties.get('creationTime') + if creation_time is not None: + # creation_time will be in milliseconds. + return _datetime_from_microseconds(1000.0 * creation_time) + + @property + def etag(self): + """ETag for the table resource. + + :rtype: str, or ``NoneType`` + :returns: the ETag (None until set from the server). + """ + return self._properties.get('etag') + + @property + def modified(self): + """Datetime at which the table was last modified. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the modification time (None until set from the server). + """ + modified_time = self._properties.get('lastModifiedTime') + if modified_time is not None: + # modified_time will be in milliseconds. + return _datetime_from_microseconds(1000.0 * modified_time) + + @property + def num_bytes(self): + """The size of the table in bytes. + + :rtype: integer, or ``NoneType`` + :returns: the byte count (None until set from the server). + """ + num_bytes_as_str = self._properties.get('numBytes') + if num_bytes_as_str is not None: + return int(num_bytes_as_str) + + @property + def num_rows(self): + """The number of rows in the table. + + :rtype: integer, or ``NoneType`` + :returns: the row count (None until set from the server). + """ + num_rows_as_str = self._properties.get('numRows') + if num_rows_as_str is not None: + return int(num_rows_as_str) + + @property + def self_link(self): + """URL for the table resource. + + :rtype: str, or ``NoneType`` + :returns: the URL (None until set from the server). + """ + return self._properties.get('selfLink') + + @property + def table_id(self): + """ID for the table resource. + + :rtype: str, or ``NoneType`` + :returns: the ID (None until set from the server). + """ + return self._properties.get('id') + + @property + def table_type(self): + """The type of the table. + + Possible values are "TABLE" or "VIEW". + + :rtype: str, or ``NoneType`` + :returns: the URL (None until set from the server). + """ + return self._properties.get('type') + + @property + def description(self): + """Description of the table. + + :rtype: str, or ``NoneType`` + :returns: The description as set by the user, or None (the default). + """ + return self._properties.get('description') + + @description.setter + def description(self, value): + """Update description of the table. + + :type value: str, or ``NoneType`` + :param value: new description + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['description'] = value + + @property + def expires(self): + """Datetime at which the table will be removed. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the expiration time, or None + """ + expiration_time = self._properties.get('expirationTime') + if expiration_time is not None: + # expiration_time will be in milliseconds. + return _datetime_from_microseconds(1000.0 * expiration_time) + + @expires.setter + def expires(self, value): + """Update datetime at which the table will be removed. + + :type value: ``datetime.datetime``, or ``NoneType`` + :param value: the new expiration time, or None + """ + if not isinstance(value, datetime.datetime) and value is not None: + raise ValueError("Pass a datetime, or None") + self._properties['expirationTime'] = _millis_from_datetime(value) + + @property + def friendly_name(self): + """Title of the table. + + :rtype: str, or ``NoneType`` + :returns: The name as set by the user, or None (the default). + """ + return self._properties.get('friendlyName') + + @friendly_name.setter + def friendly_name(self, value): + """Update title of the table. + + :type value: str, or ``NoneType`` + :param value: new title + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['friendlyName'] = value + + @property + def location(self): + """Location in which the table is hosted. + + :rtype: str, or ``NoneType`` + :returns: The location as set by the user, or None (the default). + """ + return self._properties.get('location') + + @location.setter + def location(self, value): + """Update location in which the table is hosted. + + :type value: str, or ``NoneType`` + :param value: new location + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['location'] = value + + @property + def view_query(self): + """SQL query defining the table as a view. + + :rtype: str, or ``NoneType`` + :returns: The query as set by the user, or None (the default). + """ + view = self._properties.get('view') + if view is not None: + return view.get('query') + + @view_query.setter + def view_query(self, value): + """Update SQL query defining the table as a view. + + :type value: str + :param value: new query + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types): + raise ValueError("Pass a string") + self._properties['view'] = {'query': value} + + @view_query.deleter + def view_query(self): + """Delete SQL query defining the table as a view.""" + self._properties.pop('view', None) + + @classmethod + def from_api_repr(cls, resource, dataset): + """Factory: construct a table given its API representation + + :type resource: dict + :param resource: table resource representation returned from the API + + :type dataset: :class:`gcloud.bigquery.dataset.Dataset` + :param dataset: The dataset containing the table. + + :rtype: :class:`gcloud.bigquery.table.Table` + :returns: Table parsed from ``resource``. + """ + if ('tableReference' not in resource or + 'tableId' not in resource['tableReference']): + raise KeyError('Resource lacks required identity information:' + '["tableReference"]["tableId"]') + table_name = resource['tableReference']['tableId'] + table = cls(table_name, dataset=dataset) + table._set_properties(resource) + return table + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: :class:`gcloud.bigquery.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._dataset._client + return client + + def _set_properties(self, api_response): + """Update properties from resource in body of ``api_response`` + + :type api_response: httplib2.Response + :param api_response: response returned from an API call + """ + self._properties.clear() + cleaned = api_response.copy() + schema = cleaned.pop('schema', {'fields': ()}) + self.schema = _parse_schema_resource(schema) + if 'creationTime' in cleaned: + cleaned['creationTime'] = float(cleaned['creationTime']) + if 'lastModifiedTime' in cleaned: + cleaned['lastModifiedTime'] = float(cleaned['lastModifiedTime']) + if 'expirationTime' in cleaned: + cleaned['expirationTime'] = float(cleaned['expirationTime']) + self._properties.update(cleaned) + + def _build_resource(self): + """Generate a resource for ``create`` or ``update``.""" + resource = { + 'tableReference': { + 'projectId': self._dataset.project, + 'datasetId': self._dataset.name, + 'tableId': self.name}, + } + if self.description is not None: + resource['description'] = self.description + + if self.expires is not None: + value = _millis_from_datetime(self.expires) + resource['expirationTime'] = value + + if self.friendly_name is not None: + resource['friendlyName'] = self.friendly_name + + if self.location is not None: + resource['location'] = self.location + + if self.view_query is not None: + view = resource['view'] = {} + view['query'] = self.view_query + elif self._schema: + resource['schema'] = { + 'fields': _build_schema_resource(self._schema) + } + else: + raise ValueError("Set either 'view_query' or 'schema'.") + + return resource + + def create(self, client=None): + """API call: create the dataset via a PUT request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/insert + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + path = '/projects/%s/datasets/%s/tables' % ( + self._dataset.project, self._dataset.name) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def exists(self, client=None): + """API call: test for the existence of the table via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/tables/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path, + query_params={'fields': 'id'}) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: refresh table properties via a GET request + + See + https://cloud.google.com/bigquery/docs/reference/v2/tables/get + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='GET', path=self.path) + self._set_properties(api_response) + + def patch(self, + client=None, + friendly_name=_MARKER, + description=_MARKER, + location=_MARKER, + expires=_MARKER, + view_query=_MARKER, + schema=_MARKER): + """API call: update individual table properties via a PATCH request + + See + https://cloud.google.com/bigquery/docs/reference/v2/tables/patch + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :type friendly_name: str or ``NoneType`` + :param friendly_name: point in time at which the table expires. + + :type description: str or ``NoneType`` + :param description: point in time at which the table expires. + + :type location: str or ``NoneType`` + :param location: point in time at which the table expires. + + :type expires: :class:`datetime.datetime` or ``NoneType`` + :param expires: point in time at which the table expires. + + :type view_query: str + :param view_query: SQL query defining the table as a view + + :type schema: list of :class:`SchemaField` + :param schema: fields describing the schema + + :raises: ValueError for invalid value types. + """ + client = self._require_client(client) + + partial = {} + + if expires is not _MARKER: + if (not isinstance(expires, datetime.datetime) and + expires is not None): + raise ValueError("Pass a datetime, or None") + partial['expirationTime'] = _millis_from_datetime(expires) + + if description is not _MARKER: + partial['description'] = description + + if friendly_name is not _MARKER: + partial['friendlyName'] = friendly_name + + if location is not _MARKER: + partial['location'] = location + + if view_query is not _MARKER: + if view_query is None: + partial['view'] = None + else: + partial['view'] = {'query': view_query} + + if schema is not _MARKER: + if schema is None: + partial['schema'] = None + else: + partial['schema'] = { + 'fields': _build_schema_resource(schema)} + + api_response = client.connection.api_request( + method='PATCH', path=self.path, data=partial) + self._set_properties(api_response) + + def update(self, client=None): + """API call: update table properties via a PUT request + + See + https://cloud.google.com/bigquery/docs/reference/v2/tables/update + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + api_response = client.connection.api_request( + method='PUT', path=self.path, data=self._build_resource()) + self._set_properties(api_response) + + def delete(self, client=None): + """API call: delete the table via a DELETE request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tables/delete + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) + + def fetch_data(self, max_results=None, page_token=None, client=None): + """API call: fetch the table data via a GET request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list + + .. note:: + + This method assumes that its instance's ``schema`` attribute is + up-to-date with the schema as defined on the back-end: if the + two schemas are not identical, the values returned may be + incomplete. To ensure that the local copy of the schema is + up-to-date, call the table's ``reload`` method. + + :type max_results: integer or ``NoneType`` + :param max_results: maximum number of rows to return. + + :type page_token: str or ``NoneType`` + :param page_token: token representing a cursor into the table's rows. + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: tuple + :returns: ``(row_data, total_rows, page_token)``, where ``row_data`` + is a list of tuples, one per result row, containing only + the values; ``total_rows`` is a count of the total number + of rows in the table; and ``page_token`` is an opaque + string which can be used to fetch the next batch of rows + (``None`` if no further batches can be fetched). + """ + client = self._require_client(client) + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + response = client.connection.api_request(method='GET', + path='%s/data' % self.path, + query_params=params) + total_rows = response.get('totalRows') + if total_rows is not None: + total_rows = int(total_rows) + page_token = response.get('pageToken') + rows_data = _rows_from_json(response.get('rows', ()), self._schema) + + return rows_data, total_rows, page_token + + def insert_data(self, + rows, + row_ids=None, + skip_invalid_rows=None, + ignore_unknown_values=None, + template_suffix=None, + client=None): + """API call: insert table data via a POST request + + See: + https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll + + :type rows: list of tuples + :param rows: Row data to be inserted. Each tuple should contain data + for each schema field on the current table and in the + same order as the schema fields. + + :type row_ids: list of string + :param row_ids: Unique ids, one per row being inserted. If not + passed, no de-duplication occurs. + + :type skip_invalid_rows: boolean or ``NoneType`` + :param skip_invalid_rows: skip rows w/ invalid data? + + :type ignore_unknown_values: boolean or ``NoneType`` + :param ignore_unknown_values: ignore columns beyond schema? + + :type template_suffix: str or ``NoneType`` + :param template_suffix: treat ``name`` as a template table and provide + a suffix. BigQuery will create the table + `` + `` based on the + schema of the template table. See: + https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables + + :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. + + :rtype: list of mappings + :returns: One mapping per row with insert errors: the "index" key + identifies the row, and the "errors" key contains a list + of the mappings describing one or more problems with the + row. + """ + client = self._require_client(client) + rows_info = [] + data = {'rows': rows_info} + + for index, row in enumerate(rows): + row_info = {} + + for field, value in zip(self._schema, row): + if field.field_type == 'TIMESTAMP' and value is not None: + # BigQuery stores TIMESTAMP data internally as a + # UNIX timestamp with microsecond precision. + # Specifies the number of seconds since the epoch. + value = _microseconds_from_datetime(value) * 1e-6 + row_info[field.name] = value + + info = {'json': row_info} + if row_ids is not None: + info['insertId'] = row_ids[index] + + rows_info.append(info) + + if skip_invalid_rows is not None: + data['skipInvalidRows'] = skip_invalid_rows + + if ignore_unknown_values is not None: + data['ignoreUnknownValues'] = ignore_unknown_values + + if template_suffix is not None: + data['templateSuffix'] = template_suffix + + response = client.connection.api_request( + method='POST', + path='%s/insertAll' % self.path, + data=data) + errors = [] + + for error in response.get('insertErrors', ()): + errors.append({'index': int(error['index']), + 'errors': error['errors']}) + + return errors + + # pylint: disable=too-many-arguments,too-many-locals + def upload_from_file(self, + file_obj, + source_format, + rewind=False, + size=None, + num_retries=6, + allow_jagged_rows=None, + allow_quoted_newlines=None, + create_disposition=None, + encoding=None, + field_delimiter=None, + ignore_unknown_values=None, + max_bad_records=None, + quote_character=None, + skip_leading_rows=None, + write_disposition=None, + client=None): + """Upload the contents of this table from a file-like object. + + The content type of the upload will either be + - The value passed in to the function (if any) + - ``text/csv``. + + :type file_obj: file + :param file_obj: A file handle opened in binary mode for reading. + + :type source_format: str + :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'. + job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type rewind: boolean + :param rewind: If True, seek to the beginning of the file handle before + writing the file to Cloud Storage. + + :type size: int + :param size: The number of bytes to read from the file handle. + If not provided, we'll try to guess the size using + :func:`os.fstat`. (If the file handle is not from the + filesystem this won't be possible.) + + :type num_retries: integer + :param num_retries: Number of upload retries. Defaults to 6. + + :type allow_jagged_rows: boolean + :param allow_jagged_rows: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type allow_quoted_newlines: boolean + :param allow_quoted_newlines: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type create_disposition: str + :param create_disposition: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type encoding: str + :param encoding: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type field_delimiter: str + :param field_delimiter: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type ignore_unknown_values: boolean + :param ignore_unknown_values: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type max_bad_records: integer + :param max_bad_records: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type quote_character: str + :param quote_character: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type skip_leading_rows: integer + :param skip_leading_rows: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type write_disposition: str + :param write_disposition: job configuration option; see + :meth:`gcloud.bigquery.job.LoadJob` + + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the current dataset. + + :rtype: :class:`gcloud.bigquery.jobs.LoadTableFromStorageJob` + :returns: the job instance used to load the data (e.g., for + querying status) + :raises: :class:`ValueError` if ``size`` is not passed in and can not + be determined, or if the ``file_obj`` can be detected to be + a file opened in text mode. + """ + client = self._require_client(client) + connection = client.connection + content_type = 'application/octet-stream' + + # Rewind the file if desired. + if rewind: + file_obj.seek(0, os.SEEK_SET) + + mode = getattr(file_obj, 'mode', None) + if mode is not None and mode != 'rb': + raise ValueError( + "Cannot upload files opened in text mode: use " + "open(filename, mode='rb')") + + # Get the basic stats about the file. + total_bytes = size + if total_bytes is None: + if hasattr(file_obj, 'fileno'): + total_bytes = os.fstat(file_obj.fileno()).st_size + else: + raise ValueError('total bytes could not be determined. Please ' + 'pass an explicit size.') + headers = { + 'Accept': 'application/json', + 'Accept-Encoding': 'gzip, deflate', + 'User-Agent': connection.USER_AGENT, + 'content-type': 'application/json', + } + + metadata = { + 'configuration': { + 'load': { + 'sourceFormat': source_format, + 'schema': { + 'fields': _build_schema_resource(self._schema), + }, + 'destinationTable': { + 'projectId': self._dataset.project, + 'datasetId': self._dataset.name, + 'tableId': self.name, + } + } + } + } + + _configure_job_metadata(metadata, allow_jagged_rows, + allow_quoted_newlines, create_disposition, + encoding, field_delimiter, + ignore_unknown_values, max_bad_records, + quote_character, skip_leading_rows, + write_disposition) + + upload = Upload(file_obj, content_type, total_bytes, + auto_transfer=False) + + url_builder = _UrlBuilder() + upload_config = _UploadConfig() + + # Base URL may change once we know simple vs. resumable. + base_url = connection.API_BASE_URL + '/upload' + path = '/projects/%s/jobs' % (self._dataset.project,) + upload_url = connection.build_api_url(api_base_url=base_url, path=path) + + # Use apitools 'Upload' facility. + request = Request(upload_url, 'POST', headers, + body=json.dumps(metadata)) + + upload.configure_request(upload_config, request, url_builder) + query_params = url_builder.query_params + base_url = connection.API_BASE_URL + '/upload' + request.url = connection.build_api_url(api_base_url=base_url, + path=path, + query_params=query_params) + upload.initialize_upload(request, connection.http) + + if upload.strategy == RESUMABLE_UPLOAD: + http_response = upload.stream_file(use_chunks=True) + else: + http_response = make_api_request(connection.http, request, + retries=num_retries) + response_content = http_response.content + if not isinstance(response_content, + six.string_types): # pragma: NO COVER Python3 + response_content = response_content.decode('utf-8') + return client.job_from_resource(json.loads(response_content)) + # pylint: enable=too-many-arguments,too-many-locals + + +def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments + allow_jagged_rows, + allow_quoted_newlines, + create_disposition, + encoding, + field_delimiter, + ignore_unknown_values, + max_bad_records, + quote_character, + skip_leading_rows, + write_disposition): + """Helper for :meth:`Table.upload_from_file`.""" + load_config = metadata['configuration']['load'] + + if allow_jagged_rows is not None: + load_config['allowJaggedRows'] = allow_jagged_rows + + if allow_quoted_newlines is not None: + load_config['allowQuotedNewlines'] = allow_quoted_newlines + + if create_disposition is not None: + load_config['createDisposition'] = create_disposition + + if encoding is not None: + load_config['encoding'] = encoding + + if field_delimiter is not None: + load_config['fieldDelimiter'] = field_delimiter + + if ignore_unknown_values is not None: + load_config['ignoreUnknownValues'] = ignore_unknown_values + + if max_bad_records is not None: + load_config['maxBadRecords'] = max_bad_records + + if quote_character is not None: + load_config['quote'] = quote_character + + if skip_leading_rows is not None: + load_config['skipLeadingRows'] = skip_leading_rows + + if write_disposition is not None: + load_config['writeDisposition'] = write_disposition + + +def _parse_schema_resource(info): + """Parse a resource fragment into a schema field. + + :type info: mapping + :param info: should contain a "fields" key to be parsed + + :rtype: list of :class:`SchemaField`, or ``NoneType`` + :returns: a list of parsed fields, or ``None`` if no "fields" key is + present in ``info``. + """ + if 'fields' not in info: + return None + + schema = [] + for r_field in info['fields']: + name = r_field['name'] + field_type = r_field['type'] + mode = r_field.get('mode', 'NULLABLE') + description = r_field.get('description') + sub_fields = _parse_schema_resource(r_field) + schema.append( + SchemaField(name, field_type, mode, description, sub_fields)) + return schema + + +def _build_schema_resource(fields): + """Generate a resource fragment for a schema. + + :type fields: sequence of :class:`SchemaField` + :param fields: schema to be dumped + + :rtype: mapping + :returns: a mapping describing the schema of the supplied fields. + """ + infos = [] + for field in fields: + info = {'name': field.name, + 'type': field.field_type, + 'mode': field.mode} + if field.description is not None: + info['description'] = field.description + if field.fields is not None: + info['fields'] = _build_schema_resource(field.fields) + infos.append(info) + return infos + + +class _UploadConfig(object): + """Faux message FBO apitools' 'configure_request'.""" + accept = ['*/*'] + max_size = None + resumable_multipart = True + resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' + simple_multipart = True + simple_path = u'/upload/bigquery/v2/projects/{project}/jobs' + + +class _UrlBuilder(object): + """Faux builder FBO apitools' 'configure_request'""" + def __init__(self): + self.query_params = {} + self._relative_path = '' diff --git a/env/Lib/site-packages/gcloud/bigquery/test__helpers.py b/env/Lib/site-packages/gcloud/bigquery/test__helpers.py new file mode 100644 index 0000000..45f036e --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test__helpers.py @@ -0,0 +1,116 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_ConfigurationProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery._helpers import _ConfigurationProperty + return _ConfigurationProperty + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_it(self): + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = self._makeOne('attr') + + def __init__(self): + self._configuration = Configuration() + + self.assertEqual(Wrapper.attr.name, 'attr') + + wrapper = Wrapper() + self.assertEqual(wrapper.attr, None) + + value = object() + wrapper.attr = value + self.assertTrue(wrapper.attr is value) + self.assertTrue(wrapper._configuration._attr is value) + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) + + +class Test_TypedProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery._helpers import _TypedProperty + return _TypedProperty + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_it(self): + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = self._makeOne('attr', int) + + def __init__(self): + self._configuration = Configuration() + + wrapper = Wrapper() + with self.assertRaises(ValueError): + wrapper.attr = 'BOGUS' + + wrapper.attr = 42 + self.assertEqual(wrapper.attr, 42) + self.assertEqual(wrapper._configuration._attr, 42) + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) + + +class Test_EnumProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery._helpers import _EnumProperty + return _EnumProperty + + def test_it(self): + + class Sub(self._getTargetClass()): + ALLOWED = ('FOO', 'BAR', 'BAZ') + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = Sub('attr') + + def __init__(self): + self._configuration = Configuration() + + wrapper = Wrapper() + with self.assertRaises(ValueError): + wrapper.attr = 'BOGUS' + + wrapper.attr = 'FOO' + self.assertEqual(wrapper.attr, 'FOO') + self.assertEqual(wrapper._configuration._attr, 'FOO') + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) diff --git a/env/Lib/site-packages/gcloud/bigquery/test_client.py b/env/Lib/site-packages/gcloud/bigquery/test_client.py new file mode 100644 index 0000000..aa72834 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_client.py @@ -0,0 +1,431 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestClient(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + from gcloud.bigquery.connection import Connection + PROJECT = 'PROJECT' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + self.assertTrue(isinstance(client.connection, Connection)) + self.assertTrue(client.connection.credentials is creds) + self.assertTrue(client.connection.http is http) + + def test_list_datasets_defaults(self): + from gcloud.bigquery.dataset import Dataset + PROJECT = 'PROJECT' + DATASET_1 = 'dataset_one' + DATASET_2 = 'dataset_two' + PATH = 'projects/%s/datasets' % PROJECT + TOKEN = 'TOKEN' + DATA = { + 'nextPageToken': TOKEN, + 'datasets': [ + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_1), + 'datasetReference': {'datasetId': DATASET_1, + 'projectId': PROJECT}, + 'friendlyName': None}, + {'kind': 'bigquery#dataset', + 'id': '%s:%s' % (PROJECT, DATASET_2), + 'datasetReference': {'datasetId': DATASET_2, + 'projectId': PROJECT}, + 'friendlyName': 'Two'}, + ] + } + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + datasets, token = client.list_datasets() + + self.assertEqual(len(datasets), len(DATA['datasets'])) + for found, expected in zip(datasets, DATA['datasets']): + self.assertTrue(isinstance(found, Dataset)) + self.assertEqual(found.dataset_id, expected['id']) + self.assertEqual(found.friendly_name, expected['friendlyName']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_datasets_explicit_response_missing_datasets_key(self): + PROJECT = 'PROJECT' + PATH = 'projects/%s/datasets' % PROJECT + TOKEN = 'TOKEN' + DATA = {} + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + datasets, token = client.list_datasets( + include_all=True, max_results=3, page_token=TOKEN) + + self.assertEqual(len(datasets), 0) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'all': True, 'maxResults': 3, 'pageToken': TOKEN}) + + def test_dataset(self): + from gcloud.bigquery.dataset import Dataset + PROJECT = 'PROJECT' + DATASET = 'dataset_name' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + dataset = client.dataset(DATASET) + self.assertTrue(isinstance(dataset, Dataset)) + self.assertEqual(dataset.name, DATASET) + self.assertTrue(dataset._client is client) + + def test_job_from_resource_unknown_type(self): + PROJECT = 'PROJECT' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + with self.assertRaises(ValueError): + client.job_from_resource({'configuration': {'nonesuch': {}}}) + + def test_list_jobs_defaults(self): + from gcloud.bigquery.job import LoadTableFromStorageJob + from gcloud.bigquery.job import CopyJob + from gcloud.bigquery.job import ExtractTableToStorageJob + from gcloud.bigquery.job import QueryJob + PROJECT = 'PROJECT' + DATASET = 'test_dataset' + SOURCE_TABLE = 'source_table' + DESTINATION_TABLE = 'destination_table' + QUERY_DESTINATION_TABLE = 'query_destination_table' + SOURCE_URI = 'gs://test_bucket/src_object*' + DESTINATION_URI = 'gs://test_bucket/dst_object*' + JOB_TYPES = { + 'load_job': LoadTableFromStorageJob, + 'copy_job': CopyJob, + 'extract_job': ExtractTableToStorageJob, + 'query_job': QueryJob, + } + PATH = 'projects/%s/jobs' % PROJECT + TOKEN = 'TOKEN' + QUERY = 'SELECT * from test_dataset:test_table' + ASYNC_QUERY_DATA = { + 'id': '%s:%s' % (PROJECT, 'query_job'), + 'jobReference': { + 'projectId': PROJECT, + 'jobId': 'query_job', + }, + 'state': 'DONE', + 'configuration': { + 'query': { + 'query': QUERY, + 'destinationTable': { + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': QUERY_DESTINATION_TABLE, + }, + 'createDisposition': 'CREATE_IF_NEEDED', + 'writeDisposition': 'WRITE_TRUNCATE', + } + }, + } + EXTRACT_DATA = { + 'id': '%s:%s' % (PROJECT, 'extract_job'), + 'jobReference': { + 'projectId': PROJECT, + 'jobId': 'extract_job', + }, + 'state': 'DONE', + 'configuration': { + 'extract': { + 'sourceTable': { + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': SOURCE_TABLE, + }, + 'destinationUris': [DESTINATION_URI], + } + }, + } + COPY_DATA = { + 'id': '%s:%s' % (PROJECT, 'copy_job'), + 'jobReference': { + 'projectId': PROJECT, + 'jobId': 'copy_job', + }, + 'state': 'DONE', + 'configuration': { + 'copy': { + 'sourceTables': [{ + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': SOURCE_TABLE, + }], + 'destinationTable': { + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': DESTINATION_TABLE, + }, + } + }, + } + LOAD_DATA = { + 'id': '%s:%s' % (PROJECT, 'load_job'), + 'jobReference': { + 'projectId': PROJECT, + 'jobId': 'load_job', + }, + 'state': 'DONE', + 'configuration': { + 'load': { + 'destinationTable': { + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': SOURCE_TABLE, + }, + 'sourceUris': [SOURCE_URI], + } + }, + } + DATA = { + 'nextPageToken': TOKEN, + 'jobs': [ + ASYNC_QUERY_DATA, + EXTRACT_DATA, + COPY_DATA, + LOAD_DATA, + ] + } + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + jobs, token = client.list_jobs() + + self.assertEqual(len(jobs), len(DATA['jobs'])) + for found, expected in zip(jobs, DATA['jobs']): + name = expected['jobReference']['jobId'] + self.assertTrue(isinstance(found, JOB_TYPES[name])) + self.assertEqual(found.name, name) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'projection': 'full'}) + + def test_list_jobs_load_job_wo_sourceUris(self): + from gcloud.bigquery.job import LoadTableFromStorageJob + PROJECT = 'PROJECT' + DATASET = 'test_dataset' + SOURCE_TABLE = 'source_table' + JOB_TYPES = { + 'load_job': LoadTableFromStorageJob, + } + PATH = 'projects/%s/jobs' % PROJECT + TOKEN = 'TOKEN' + LOAD_DATA = { + 'id': '%s:%s' % (PROJECT, 'load_job'), + 'jobReference': { + 'projectId': PROJECT, + 'jobId': 'load_job', + }, + 'state': 'DONE', + 'configuration': { + 'load': { + 'destinationTable': { + 'projectId': PROJECT, + 'datasetId': DATASET, + 'tableId': SOURCE_TABLE, + }, + } + }, + } + DATA = { + 'nextPageToken': TOKEN, + 'jobs': [ + LOAD_DATA, + ] + } + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + jobs, token = client.list_jobs() + + self.assertEqual(len(jobs), len(DATA['jobs'])) + for found, expected in zip(jobs, DATA['jobs']): + name = expected['jobReference']['jobId'] + self.assertTrue(isinstance(found, JOB_TYPES[name])) + self.assertEqual(found.name, name) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'projection': 'full'}) + + def test_list_jobs_explicit_empty(self): + PROJECT = 'PROJECT' + PATH = 'projects/%s/jobs' % PROJECT + DATA = {'jobs': []} + TOKEN = 'TOKEN' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + conn = client.connection = _Connection(DATA) + + jobs, token = client.list_jobs(max_results=1000, page_token=TOKEN, + all_users=True, state_filter='done') + + self.assertEqual(len(jobs), 0) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'projection': 'full', + 'maxResults': 1000, + 'pageToken': TOKEN, + 'allUsers': True, + 'stateFilter': 'done'}) + + def test_load_table_from_storage(self): + from gcloud.bigquery.job import LoadTableFromStorageJob + PROJECT = 'PROJECT' + JOB = 'job_name' + DATASET = 'dataset_name' + DESTINATION = 'destination_table' + SOURCE_URI = 'http://example.com/source.csv' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + dataset = client.dataset(DATASET) + destination = dataset.table(DESTINATION) + job = client.load_table_from_storage(JOB, destination, SOURCE_URI) + self.assertTrue(isinstance(job, LoadTableFromStorageJob)) + self.assertTrue(job._client is client) + self.assertEqual(job.name, JOB) + self.assertEqual(list(job.source_uris), [SOURCE_URI]) + self.assertTrue(job.destination is destination) + + def test_copy_table(self): + from gcloud.bigquery.job import CopyJob + PROJECT = 'PROJECT' + JOB = 'job_name' + DATASET = 'dataset_name' + SOURCE = 'source_table' + DESTINATION = 'destination_table' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + dataset = client.dataset(DATASET) + source = dataset.table(SOURCE) + destination = dataset.table(DESTINATION) + job = client.copy_table(JOB, destination, source) + self.assertTrue(isinstance(job, CopyJob)) + self.assertTrue(job._client is client) + self.assertEqual(job.name, JOB) + self.assertEqual(list(job.sources), [source]) + self.assertTrue(job.destination is destination) + + def test_extract_table_to_storage(self): + from gcloud.bigquery.job import ExtractTableToStorageJob + PROJECT = 'PROJECT' + JOB = 'job_name' + DATASET = 'dataset_name' + SOURCE = 'source_table' + DESTINATION = 'gs://bucket_name/object_name' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + dataset = client.dataset(DATASET) + source = dataset.table(SOURCE) + job = client.extract_table_to_storage(JOB, source, DESTINATION) + self.assertTrue(isinstance(job, ExtractTableToStorageJob)) + self.assertTrue(job._client is client) + self.assertEqual(job.name, JOB) + self.assertEqual(job.source, source) + self.assertEqual(list(job.destination_uris), [DESTINATION]) + + def test_run_async_query(self): + from gcloud.bigquery.job import QueryJob + PROJECT = 'PROJECT' + JOB = 'job_name' + QUERY = 'select count(*) from persons' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + job = client.run_async_query(JOB, QUERY) + self.assertTrue(isinstance(job, QueryJob)) + self.assertTrue(job._client is client) + self.assertEqual(job.name, JOB) + self.assertEqual(job.query, QUERY) + + def test_run_sync_query(self): + from gcloud.bigquery.query import QueryResults + PROJECT = 'PROJECT' + QUERY = 'select count(*) from persons' + creds = _Credentials() + http = object() + client = self._makeOne(project=PROJECT, credentials=creds, http=http) + job = client.run_sync_query(QUERY) + self.assertTrue(isinstance(job, QueryResults)) + self.assertTrue(job._client is client) + self.assertEqual(job.name, None) + self.assertEqual(job.query, QUERY) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/env/Lib/site-packages/gcloud/bigquery/test_connection.py b/env/Lib/site-packages/gcloud/bigquery/test_connection.py new file mode 100644 index 0000000..7a3aa25 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_connection.py @@ -0,0 +1,47 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_build_api_url_no_extra_query_params(self): + conn = self._makeOne() + URI = '/'.join([ + conn.API_BASE_URL, + 'bigquery', + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo'), URI) + + def test_build_api_url_w_extra_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeOne() + uri = conn.build_api_url('/foo', {'bar': 'baz'}) + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, + '/'.join(['', 'bigquery', conn.API_VERSION, 'foo'])) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['bar'], 'baz') diff --git a/env/Lib/site-packages/gcloud/bigquery/test_dataset.py b/env/Lib/site-packages/gcloud/bigquery/test_dataset.py new file mode 100644 index 0000000..e1caa1f --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_dataset.py @@ -0,0 +1,788 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestAccessGrant(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.dataset import AccessGrant + return AccessGrant + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + self.assertEqual(grant.role, 'OWNER') + self.assertEqual(grant.entity_type, 'userByEmail') + self.assertEqual(grant.entity_id, 'phred@example.com') + + def test_ctor_bad_entity_type(self): + with self.assertRaises(ValueError): + self._makeOne(None, 'unknown', None) + + def test_ctor_view_with_role(self): + role = 'READER' + entity_type = 'view' + with self.assertRaises(ValueError): + self._makeOne(role, entity_type, None) + + def test_ctor_view_success(self): + role = None + entity_type = 'view' + entity_id = object() + grant = self._makeOne(role, entity_type, entity_id) + self.assertEqual(grant.role, role) + self.assertEqual(grant.entity_type, entity_type) + self.assertEqual(grant.entity_id, entity_id) + + def test_ctor_nonview_without_role(self): + role = None + entity_type = 'userByEmail' + with self.assertRaises(ValueError): + self._makeOne(role, entity_type, None) + + def test___eq___role_mismatch(self): + grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + other = self._makeOne('WRITER', 'userByEmail', 'phred@example.com') + self.assertNotEqual(grant, other) + + def test___eq___entity_type_mismatch(self): + grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + other = self._makeOne('OWNER', 'groupByEmail', 'phred@example.com') + self.assertNotEqual(grant, other) + + def test___eq___entity_id_mismatch(self): + grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + other = self._makeOne('OWNER', 'userByEmail', 'bharney@example.com') + self.assertNotEqual(grant, other) + + def test___eq___hit(self): + grant = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + other = self._makeOne('OWNER', 'userByEmail', 'phred@example.com') + self.assertEqual(grant, other) + + +class TestDataset(unittest2.TestCase): + PROJECT = 'project' + DS_NAME = 'dataset-name' + + def _getTargetClass(self): + from gcloud.bigquery.dataset import Dataset + return Dataset + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _setUpConstants(self): + import datetime + from gcloud._helpers import UTC + + self.WHEN_TS = 1437767599.006 + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( + tzinfo=UTC) + self.ETAG = 'ETAG' + self.DS_ID = '%s:%s' % (self.PROJECT, self.DS_NAME) + self.RESOURCE_URL = 'http://example.com/path/to/resource' + + def _makeResource(self): + self._setUpConstants() + USER_EMAIL = 'phred@example.com' + GROUP_EMAIL = 'group-name@lists.example.com' + return { + 'creationTime': self.WHEN_TS * 1000, + 'datasetReference': + {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + 'etag': self.ETAG, + 'id': self.DS_ID, + 'lastModifiedTime': self.WHEN_TS * 1000, + 'location': 'US', + 'selfLink': self.RESOURCE_URL, + 'access': [ + {'role': 'OWNER', 'userByEmail': USER_EMAIL}, + {'role': 'OWNER', 'groupByEmail': GROUP_EMAIL}, + {'role': 'WRITER', 'specialGroup': 'projectWriters'}, + {'role': 'READER', 'specialGroup': 'projectReaders'}], + } + + def _verifyAccessGrants(self, access_grants, resource): + r_grants = [] + for r_grant in resource['access']: + role = r_grant.pop('role') + for entity_type, entity_id in sorted(r_grant.items()): + r_grants.append({'role': role, + 'entity_type': entity_type, + 'entity_id': entity_id}) + + self.assertEqual(len(access_grants), len(r_grants)) + for a_grant, r_grant in zip(access_grants, r_grants): + self.assertEqual(a_grant.role, r_grant['role']) + self.assertEqual(a_grant.entity_type, r_grant['entity_type']) + self.assertEqual(a_grant.entity_id, r_grant['entity_id']) + + def _verifyReadonlyResourceProperties(self, dataset, resource): + + self.assertEqual(dataset.dataset_id, self.DS_ID) + + if 'creationTime' in resource: + self.assertEqual(dataset.created, self.WHEN) + else: + self.assertEqual(dataset.created, None) + if 'etag' in resource: + self.assertEqual(dataset.etag, self.ETAG) + else: + self.assertEqual(dataset.etag, None) + if 'lastModifiedTime' in resource: + self.assertEqual(dataset.modified, self.WHEN) + else: + self.assertEqual(dataset.modified, None) + if 'selfLink' in resource: + self.assertEqual(dataset.self_link, self.RESOURCE_URL) + else: + self.assertEqual(dataset.self_link, None) + + def _verifyResourceProperties(self, dataset, resource): + + self._verifyReadonlyResourceProperties(dataset, resource) + + if 'defaultTableExpirationMs' in resource: + self.assertEqual(dataset.default_table_expiration_ms, + int(resource.get('defaultTableExpirationMs'))) + else: + self.assertEqual(dataset.default_table_expiration_ms, None) + self.assertEqual(dataset.description, resource.get('description')) + self.assertEqual(dataset.friendly_name, resource.get('friendlyName')) + self.assertEqual(dataset.location, resource.get('location')) + + if 'access' in resource: + self._verifyAccessGrants(dataset.access_grants, resource) + else: + self.assertEqual(dataset.access_grants, []) + + def test_ctor(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + self.assertEqual(dataset.name, self.DS_NAME) + self.assertTrue(dataset._client is client) + self.assertEqual(dataset.project, client.project) + self.assertEqual( + dataset.path, + '/projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)) + self.assertEqual(dataset.access_grants, []) + + self.assertEqual(dataset.created, None) + self.assertEqual(dataset.dataset_id, None) + self.assertEqual(dataset.etag, None) + self.assertEqual(dataset.modified, None) + self.assertEqual(dataset.self_link, None) + + self.assertEqual(dataset.default_table_expiration_ms, None) + self.assertEqual(dataset.description, None) + self.assertEqual(dataset.friendly_name, None) + self.assertEqual(dataset.location, None) + + def test_access_roles_setter_non_list(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + with self.assertRaises(TypeError): + dataset.access_grants = object() + + def test_access_roles_setter_invalid_field(self): + from gcloud.bigquery.dataset import AccessGrant + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + phred = AccessGrant('OWNER', 'userByEmail', 'phred@example.com') + with self.assertRaises(ValueError): + dataset.access_grants = [phred, object()] + + def test_access_roles_setter(self): + from gcloud.bigquery.dataset import AccessGrant + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + phred = AccessGrant('OWNER', 'userByEmail', 'phred@example.com') + bharney = AccessGrant('OWNER', 'userByEmail', 'bharney@example.com') + dataset.access_grants = [phred, bharney] + self.assertEqual(dataset.access_grants, [phred, bharney]) + + def test_default_table_expiration_ms_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + with self.assertRaises(ValueError): + dataset.default_table_expiration_ms = 'bogus' + + def test_default_table_expiration_ms_setter(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + dataset.default_table_expiration_ms = 12345 + self.assertEqual(dataset.default_table_expiration_ms, 12345) + + def test_description_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + with self.assertRaises(ValueError): + dataset.description = 12345 + + def test_description_setter(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + dataset.description = 'DESCRIPTION' + self.assertEqual(dataset.description, 'DESCRIPTION') + + def test_friendly_name_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + with self.assertRaises(ValueError): + dataset.friendly_name = 12345 + + def test_friendly_name_setter(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + dataset.friendly_name = 'FRIENDLY' + self.assertEqual(dataset.friendly_name, 'FRIENDLY') + + def test_location_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + with self.assertRaises(ValueError): + dataset.location = 12345 + + def test_location_setter(self): + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client) + dataset.location = 'LOCATION' + self.assertEqual(dataset.location, 'LOCATION') + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'datasetReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + } + } + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test__parse_access_grants_w_unknown_entity_type(self): + ACCESS = [ + {'role': 'READER', 'unknown': 'UNKNOWN'}, + ] + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client=client) + with self.assertRaises(ValueError): + dataset._parse_access_grants(ACCESS) + + def test__parse_access_grants_w_extra_keys(self): + USER_EMAIL = 'phred@example.com' + ACCESS = [ + { + 'role': 'READER', + 'specialGroup': 'projectReaders', + 'userByEmail': USER_EMAIL, + }, + ] + client = _Client(self.PROJECT) + dataset = self._makeOne(self.DS_NAME, client=client) + with self.assertRaises(ValueError): + dataset._parse_access_grants(ACCESS) + + def test_create_w_bound_client(self): + PATH = 'projects/%s/datasets' % self.PROJECT + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + dataset.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'datasetReference': + {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_create_w_alternate_client(self): + from gcloud.bigquery.dataset import AccessGrant + PATH = 'projects/%s/datasets' % self.PROJECT + USER_EMAIL = 'phred@example.com' + GROUP_EMAIL = 'group-name@lists.example.com' + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + dataset.friendly_name = TITLE + dataset.description = DESCRIPTION + VIEW = { + 'projectId': 'my-proj', + 'datasetId': 'starry-skies', + 'tableId': 'northern-hemisphere', + } + dataset.access_grants = [ + AccessGrant('OWNER', 'userByEmail', USER_EMAIL), + AccessGrant('OWNER', 'groupByEmail', GROUP_EMAIL), + AccessGrant('READER', 'domain', 'foo.com'), + AccessGrant('READER', 'specialGroup', 'projectReaders'), + AccessGrant('WRITER', 'specialGroup', 'projectWriters'), + AccessGrant(None, 'view', VIEW), + ] + + dataset.create(client=CLIENT2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'datasetReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + }, + 'description': DESCRIPTION, + 'friendlyName': TITLE, + 'access': [ + {'role': 'OWNER', 'userByEmail': USER_EMAIL}, + {'role': 'OWNER', 'groupByEmail': GROUP_EMAIL}, + {'role': 'READER', 'domain': 'foo.com'}, + {'role': 'READER', 'specialGroup': 'projectReaders'}, + {'role': 'WRITER', 'specialGroup': 'projectWriters'}, + {'view': VIEW}, + ], + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_create_w_missing_output_properties(self): + # In the wild, the resource returned from 'dataset.create' sometimes + # lacks 'creationTime' / 'lastModifiedTime' + PATH = 'projects/%s/datasets' % (self.PROJECT,) + RESOURCE = self._makeResource() + del RESOURCE['creationTime'] + del RESOURCE['lastModifiedTime'] + self.WHEN = None + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + dataset.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'datasetReference': + {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + self.assertFalse(dataset.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + + self.assertTrue(dataset.exists(client=CLIENT2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + dataset.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + + dataset.reload(client=CLIENT2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_patch_w_invalid_expiration(self): + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + with self.assertRaises(ValueError): + dataset.patch(default_table_expiration_ms='BOGUS') + + def test_patch_w_bound_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + dataset.patch(description=DESCRIPTION, friendly_name=TITLE) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + SENT = { + 'description': DESCRIPTION, + 'friendlyName': TITLE, + } + self.assertEqual(req['data'], SENT) + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_patch_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + DEF_TABLE_EXP = 12345 + LOCATION = 'EU' + RESOURCE = self._makeResource() + RESOURCE['defaultTableExpirationMs'] = str(DEF_TABLE_EXP) + RESOURCE['location'] = LOCATION + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + + dataset.patch(client=CLIENT2, + default_table_expiration_ms=DEF_TABLE_EXP, + location=LOCATION) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'defaultTableExpirationMs': DEF_TABLE_EXP, + 'location': LOCATION, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_update_w_bound_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + dataset.description = DESCRIPTION + dataset.friendly_name = TITLE + + dataset.update() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + SENT = { + 'datasetReference': + {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + 'description': DESCRIPTION, + 'friendlyName': TITLE, + } + self.assertEqual(req['data'], SENT) + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_update_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + DEF_TABLE_EXP = 12345 + LOCATION = 'EU' + RESOURCE = self._makeResource() + RESOURCE['defaultTableExpirationMs'] = 12345 + RESOURCE['location'] = LOCATION + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + dataset.default_table_expiration_ms = DEF_TABLE_EXP + dataset.location = LOCATION + + dataset.update(client=CLIENT2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'datasetReference': + {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + 'defaultTableExpirationMs': 12345, + 'location': 'EU', + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_delete_w_bound_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + dataset.delete() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_delete_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + dataset = self._makeOne(self.DS_NAME, client=CLIENT1) + + dataset.delete(client=CLIENT2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_tables_empty(self): + from gcloud.bigquery.table import Table + + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + tables, token = dataset.list_tables() + self.assertEqual(tables, []) + self.assertEqual(token, None) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_tables_defaults(self): + from gcloud.bigquery.table import Table + + TABLE_1 = 'table_one' + TABLE_2 = 'table_two' + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + TOKEN = 'TOKEN' + DATA = { + 'nextPageToken': TOKEN, + 'tables': [ + {'kind': 'bigquery#table', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'tableReference': {'tableId': TABLE_1, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + {'kind': 'bigquery#table', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'tableReference': {'tableId': TABLE_2, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + ] + } + + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + tables, token = dataset.list_tables() + + self.assertEqual(len(tables), len(DATA['tables'])) + for found, expected in zip(tables, DATA['tables']): + self.assertTrue(isinstance(found, Table)) + self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.table_type, expected['type']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_tables_explicit(self): + from gcloud.bigquery.table import Table + + TABLE_1 = 'table_one' + TABLE_2 = 'table_two' + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + TOKEN = 'TOKEN' + DATA = { + 'tables': [ + {'kind': 'bigquery#dataset', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'tableReference': {'tableId': TABLE_1, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + {'kind': 'bigquery#dataset', + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'tableReference': {'tableId': TABLE_2, + 'datasetId': self.DS_NAME, + 'projectId': self.PROJECT}, + 'type': 'TABLE'}, + ] + } + + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + + tables, token = dataset.list_tables(max_results=3, page_token=TOKEN) + + self.assertEqual(len(tables), len(DATA['tables'])) + for found, expected in zip(tables, DATA['tables']): + self.assertTrue(isinstance(found, Table)) + self.assertEqual(found.table_id, expected['id']) + self.assertEqual(found.table_type, expected['type']) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': 3, 'pageToken': TOKEN}) + + def test_table_wo_schema(self): + from gcloud.bigquery.table import Table + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + table = dataset.table('table_name') + self.assertTrue(isinstance(table, Table)) + self.assertEqual(table.name, 'table_name') + self.assertTrue(table._dataset is dataset) + self.assertEqual(table.schema, []) + + def test_table_w_schema(self): + from gcloud.bigquery.table import SchemaField + from gcloud.bigquery.table import Table + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = self._makeOne(self.DS_NAME, client=client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = dataset.table('table_name', schema=[full_name, age]) + self.assertTrue(isinstance(table, Table)) + self.assertEqual(table.name, 'table_name') + self.assertTrue(table._dataset is dataset) + self.assertEqual(table.schema, [full_name, age]) + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: + raise NotFound('miss') + else: + return response diff --git a/env/Lib/site-packages/gcloud/bigquery/test_job.py b/env/Lib/site-packages/gcloud/bigquery/test_job.py new file mode 100644 index 0000000..6466070 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_job.py @@ -0,0 +1,1592 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class _Base(object): + PROJECT = 'project' + SOURCE1 = 'http://example.com/source1.csv' + DS_NAME = 'datset_name' + TABLE_NAME = 'table_name' + JOB_NAME = 'job_name' + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _setUpConstants(self): + import datetime + from gcloud._helpers import UTC + + self.WHEN_TS = 1437767599.006 + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( + tzinfo=UTC) + self.ETAG = 'ETAG' + self.JOB_ID = '%s:%s' % (self.PROJECT, self.JOB_NAME) + self.RESOURCE_URL = 'http://example.com/path/to/resource' + self.USER_EMAIL = 'phred@example.com' + + def _makeResource(self, started=False, ended=False): + self._setUpConstants() + resource = { + 'configuration': { + self.JOB_TYPE: { + }, + }, + 'statistics': { + 'creationTime': self.WHEN_TS * 1000, + self.JOB_TYPE: { + } + }, + 'etag': self.ETAG, + 'id': self.JOB_ID, + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'selfLink': self.RESOURCE_URL, + 'user_email': self.USER_EMAIL, + } + + if started or ended: + resource['statistics']['startTime'] = self.WHEN_TS * 1000 + + if ended: + resource['statistics']['endTime'] = (self.WHEN_TS + 1000) * 1000 + + return resource + + def _verifyInitialReadonlyProperties(self, job): + # root elements of resource + self.assertEqual(job.etag, None) + self.assertEqual(job.self_link, None) + self.assertEqual(job.user_email, None) + + # derived from resource['statistics'] + self.assertEqual(job.created, None) + self.assertEqual(job.started, None) + self.assertEqual(job.ended, None) + + # derived from resource['status'] + self.assertEqual(job.error_result, None) + self.assertEqual(job.errors, None) + self.assertEqual(job.state, None) + + def _verifyReadonlyResourceProperties(self, job, resource): + from datetime import timedelta + + statistics = resource.get('statistics', {}) + + if 'creationTime' in statistics: + self.assertEqual(job.created, self.WHEN) + else: + self.assertEqual(job.created, None) + + if 'startTime' in statistics: + self.assertEqual(job.started, self.WHEN) + else: + self.assertEqual(job.started, None) + + if 'endTime' in statistics: + self.assertEqual(job.ended, self.WHEN + timedelta(seconds=1000)) + else: + self.assertEqual(job.ended, None) + + if 'etag' in resource: + self.assertEqual(job.etag, self.ETAG) + else: + self.assertEqual(job.etag, None) + + if 'selfLink' in resource: + self.assertEqual(job.self_link, self.RESOURCE_URL) + else: + self.assertEqual(job.self_link, None) + + if 'user_email' in resource: + self.assertEqual(job.user_email, self.USER_EMAIL) + else: + self.assertEqual(job.user_email, None) + + +class TestLoadTableFromStorageJob(unittest2.TestCase, _Base): + JOB_TYPE = 'load' + + def _getTargetClass(self): + from gcloud.bigquery.job import LoadTableFromStorageJob + return LoadTableFromStorageJob + + def _setUpConstants(self): + super(TestLoadTableFromStorageJob, self)._setUpConstants() + self.INPUT_FILES = 2 + self.INPUT_BYTES = 12345 + self.OUTPUT_BYTES = 23456 + self.OUTPUT_ROWS = 345 + + def _makeResource(self, started=False, ended=False): + resource = super(TestLoadTableFromStorageJob, self)._makeResource( + started, ended) + config = resource['configuration']['load'] + config['sourceUris'] = [self.SOURCE1] + config['destinationTable'] = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + } + + if ended: + resource['statistics']['load']['inputFiles'] = self.INPUT_FILES + resource['statistics']['load']['inputFileBytes'] = self.INPUT_BYTES + resource['statistics']['load']['outputBytes'] = self.OUTPUT_BYTES + resource['statistics']['load']['outputRows'] = self.OUTPUT_ROWS + + return resource + + def _verifyBooleanConfigProperties(self, job, config): + if 'allowJaggedRows' in config: + self.assertEqual(job.allow_jagged_rows, + config['allowJaggedRows']) + else: + self.assertTrue(job.allow_jagged_rows is None) + if 'allowQuotedNewlines' in config: + self.assertEqual(job.allow_quoted_newlines, + config['allowQuotedNewlines']) + else: + self.assertTrue(job.allow_quoted_newlines is None) + if 'ignoreUnknownValues' in config: + self.assertEqual(job.ignore_unknown_values, + config['ignoreUnknownValues']) + else: + self.assertTrue(job.ignore_unknown_values is None) + + def _verifyEnumConfigProperties(self, job, config): + if 'createDisposition' in config: + self.assertEqual(job.create_disposition, + config['createDisposition']) + else: + self.assertTrue(job.create_disposition is None) + if 'encoding' in config: + self.assertEqual(job.encoding, + config['encoding']) + else: + self.assertTrue(job.encoding is None) + if 'sourceFormat' in config: + self.assertEqual(job.source_format, + config['sourceFormat']) + else: + self.assertTrue(job.source_format is None) + if 'writeDisposition' in config: + self.assertEqual(job.write_disposition, + config['writeDisposition']) + else: + self.assertTrue(job.write_disposition is None) + + def _verifyResourceProperties(self, job, resource): + self._verifyReadonlyResourceProperties(job, resource) + + config = resource.get('configuration', {}).get('load') + + self._verifyBooleanConfigProperties(job, config) + self._verifyEnumConfigProperties(job, config) + + self.assertEqual(job.source_uris, config['sourceUris']) + + table_ref = config['destinationTable'] + self.assertEqual(job.destination.project, table_ref['projectId']) + self.assertEqual(job.destination.dataset_name, table_ref['datasetId']) + self.assertEqual(job.destination.name, table_ref['tableId']) + + if 'fieldDelimiter' in config: + self.assertEqual(job.field_delimiter, + config['fieldDelimiter']) + else: + self.assertTrue(job.field_delimiter is None) + if 'maxBadRecords' in config: + self.assertEqual(job.max_bad_records, + config['maxBadRecords']) + else: + self.assertTrue(job.max_bad_records is None) + if 'quote' in config: + self.assertEqual(job.quote_character, + config['quote']) + else: + self.assertTrue(job.quote_character is None) + if 'skipLeadingRows' in config: + self.assertEqual(job.skip_leading_rows, + config['skipLeadingRows']) + else: + self.assertTrue(job.skip_leading_rows is None) + + def test_ctor(self): + client = _Client(self.PROJECT) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + self.assertTrue(job.destination is table) + self.assertEqual(list(job.source_uris), [self.SOURCE1]) + self.assertTrue(job._client is client) + self.assertEqual(job.job_type, self.JOB_TYPE) + self.assertEqual( + job.path, + '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)) + self.assertEqual(job.schema, []) + + self._verifyInitialReadonlyProperties(job) + + # derived from resource['statistics']['load'] + self.assertEqual(job.input_file_bytes, None) + self.assertEqual(job.input_files, None) + self.assertEqual(job.output_bytes, None) + self.assertEqual(job.output_rows, None) + + # set/read from resource['configuration']['load'] + self.assertTrue(job.allow_jagged_rows is None) + self.assertTrue(job.allow_quoted_newlines is None) + self.assertTrue(job.create_disposition is None) + self.assertTrue(job.encoding is None) + self.assertTrue(job.field_delimiter is None) + self.assertTrue(job.ignore_unknown_values is None) + self.assertTrue(job.max_bad_records is None) + self.assertTrue(job.quote_character is None) + self.assertTrue(job.skip_leading_rows is None) + self.assertTrue(job.source_format is None) + self.assertTrue(job.write_disposition is None) + + def test_ctor_w_schema(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + table = _Table() + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client, + schema=[full_name, age]) + self.assertEqual(job.schema, [full_name, age]) + + def test_schema_setter_non_list(self): + client = _Client(self.PROJECT) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + with self.assertRaises(TypeError): + job.schema = object() + + def test_schema_setter_invalid_field(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + with self.assertRaises(ValueError): + job.schema = [full_name, object()] + + def test_schema_setter(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + job.schema = [full_name, age] + self.assertEqual(job.schema, [full_name, age]) + + def test_props_set_by_server(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _millis + + CREATED = datetime.datetime(2015, 8, 11, 12, 13, 22, tzinfo=UTC) + STARTED = datetime.datetime(2015, 8, 11, 13, 47, 15, tzinfo=UTC) + ENDED = datetime.datetime(2015, 8, 11, 14, 47, 15, tzinfo=UTC) + JOB_ID = '%s:%s' % (self.PROJECT, self.JOB_NAME) + URL = 'http://example.com/projects/%s/jobs/%s' % ( + self.PROJECT, self.JOB_NAME) + EMAIL = 'phred@example.com' + ERROR_RESULT = {'debugInfo': 'DEBUG', + 'location': 'LOCATION', + 'message': 'MESSAGE', + 'reason': 'REASON'} + + client = _Client(self.PROJECT) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + job._properties['etag'] = 'ETAG' + job._properties['id'] = JOB_ID + job._properties['selfLink'] = URL + job._properties['user_email'] = EMAIL + + statistics = job._properties['statistics'] = {} + statistics['creationTime'] = _millis(CREATED) + statistics['startTime'] = _millis(STARTED) + statistics['endTime'] = _millis(ENDED) + load_stats = statistics['load'] = {} + load_stats['inputFileBytes'] = 12345 + load_stats['inputFiles'] = 1 + load_stats['outputBytes'] = 23456 + load_stats['outputRows'] = 345 + + self.assertEqual(job.etag, 'ETAG') + self.assertEqual(job.self_link, URL) + self.assertEqual(job.user_email, EMAIL) + + self.assertEqual(job.created, CREATED) + self.assertEqual(job.started, STARTED) + self.assertEqual(job.ended, ENDED) + + self.assertEqual(job.input_file_bytes, 12345) + self.assertEqual(job.input_files, 1) + self.assertEqual(job.output_bytes, 23456) + self.assertEqual(job.output_rows, 345) + + status = job._properties['status'] = {} + + self.assertEqual(job.error_result, None) + self.assertEqual(job.errors, None) + self.assertEqual(job.state, None) + + status['errorResult'] = ERROR_RESULT + status['errors'] = [ERROR_RESULT] + status['state'] = 'STATE' + + self.assertEqual(job.error_result, ERROR_RESULT) + self.assertEqual(job.errors, [ERROR_RESULT]) + self.assertEqual(job.state, 'STATE') + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_missing_config(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + } + } + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': self.JOB_ID, + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'load': { + 'sourceUris': [self.SOURCE1], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + }, + } + }, + } + klass = self._getTargetClass() + job = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(job._client is client) + self._verifyResourceProperties(job, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_begin_w_bound_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource() + # Ensure None for missing server-set props + del RESOURCE['statistics']['creationTime'] + del RESOURCE['etag'] + del RESOURCE['selfLink'] + del RESOURCE['user_email'] + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + + job.begin() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'load': { + 'sourceUris': [self.SOURCE1], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + }, + }, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_begin_w_alternate_client(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource(ended=True) + LOAD_CONFIGURATION = { + 'sourceUris': [self.SOURCE1], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + }, + 'allowJaggedRows': True, + 'allowQuotedNewlines': True, + 'createDisposition': 'CREATE_NEVER', + 'encoding': 'ISO-8559-1', + 'fieldDelimiter': '|', + 'ignoreUnknownValues': True, + 'maxBadRecords': 100, + 'quote': "'", + 'skipLeadingRows': 1, + 'sourceFormat': 'CSV', + 'writeDisposition': 'WRITE_TRUNCATE', + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}, + ]} + } + RESOURCE['configuration']['load'] = LOAD_CONFIGURATION + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + table = _Table() + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client1, + schema=[full_name, age]) + + job.allow_jagged_rows = True + job.allow_quoted_newlines = True + job.create_disposition = 'CREATE_NEVER' + job.encoding = 'ISO-8559-1' + job.field_delimiter = '|' + job.ignore_unknown_values = True + job.max_bad_records = 100 + job.quote_character = "'" + job.skip_leading_rows = 1 + job.source_format = 'CSV' + job.write_disposition = 'WRITE_TRUNCATE' + + job.begin(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'load': LOAD_CONFIGURATION, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + + self.assertFalse(job.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client1) + + self.assertTrue(job.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + + job.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client1) + + job.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_cancel_w_bound_client(self): + PATH = 'projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) + + job.cancel() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_cancel_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + table = _Table() + job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client1) + + job.cancel(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + +class TestCopyJob(unittest2.TestCase, _Base): + JOB_TYPE = 'copy' + SOURCE_TABLE = 'source_table' + DESTINATION_TABLE = 'destination_table' + + def _getTargetClass(self): + from gcloud.bigquery.job import CopyJob + return CopyJob + + def _makeResource(self, started=False, ended=False): + resource = super(TestCopyJob, self)._makeResource( + started, ended) + config = resource['configuration']['copy'] + config['sourceTables'] = [{ + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + }] + config['destinationTable'] = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.DESTINATION_TABLE, + } + + return resource + + def _verifyResourceProperties(self, job, resource): + self._verifyReadonlyResourceProperties(job, resource) + + config = resource.get('configuration', {}).get('copy') + + table_ref = config['destinationTable'] + self.assertEqual(job.destination.project, table_ref['projectId']) + self.assertEqual(job.destination.dataset_name, table_ref['datasetId']) + self.assertEqual(job.destination.name, table_ref['tableId']) + + sources = config['sourceTables'] + self.assertEqual(len(sources), len(job.sources)) + for table_ref, table in zip(sources, job.sources): + self.assertEqual(table.project, table_ref['projectId']) + self.assertEqual(table.dataset_name, table_ref['datasetId']) + self.assertEqual(table.name, table_ref['tableId']) + + if 'createDisposition' in config: + self.assertEqual(job.create_disposition, + config['createDisposition']) + else: + self.assertTrue(job.create_disposition is None) + + if 'writeDisposition' in config: + self.assertEqual(job.write_disposition, + config['writeDisposition']) + else: + self.assertTrue(job.write_disposition is None) + + def test_ctor(self): + client = _Client(self.PROJECT) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client) + self.assertTrue(job.destination is destination) + self.assertEqual(job.sources, [source]) + self.assertTrue(job._client is client) + self.assertEqual(job.job_type, self.JOB_TYPE) + self.assertEqual( + job.path, + '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)) + + self._verifyInitialReadonlyProperties(job) + + # set/read from resource['configuration']['copy'] + self.assertTrue(job.create_disposition is None) + self.assertTrue(job.write_disposition is None) + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_missing_config(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + } + } + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': self.JOB_ID, + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'copy': { + 'sourceTables': [{ + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + }], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.DESTINATION_TABLE, + }, + } + }, + } + klass = self._getTargetClass() + job = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(job._client is client) + self._verifyResourceProperties(job, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_begin_w_bound_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource() + # Ensure None for missing server-set props + del RESOURCE['statistics']['creationTime'] + del RESOURCE['etag'] + del RESOURCE['selfLink'] + del RESOURCE['user_email'] + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client) + + job.begin() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'copy': { + 'sourceTables': [{ + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE + }], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.DESTINATION_TABLE, + }, + }, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_begin_w_alternate_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource(ended=True) + COPY_CONFIGURATION = { + 'sourceTables': [{ + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + }], + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.DESTINATION_TABLE, + }, + 'createDisposition': 'CREATE_NEVER', + 'writeDisposition': 'WRITE_TRUNCATE', + } + RESOURCE['configuration']['copy'] = COPY_CONFIGURATION + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client1) + + job.create_disposition = 'CREATE_NEVER' + job.write_disposition = 'WRITE_TRUNCATE' + + job.begin(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'copy': COPY_CONFIGURATION, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client) + + self.assertFalse(job.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client1) + + self.assertTrue(job.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client) + + job.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + destination = _Table(self.DESTINATION_TABLE) + job = self._makeOne(self.JOB_NAME, destination, [source], client1) + + job.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + +class TestExtractTableToStorageJob(unittest2.TestCase, _Base): + JOB_TYPE = 'extract' + SOURCE_TABLE = 'source_table' + DESTINATION_URI = 'gs://bucket_name/object_name' + + def _getTargetClass(self): + from gcloud.bigquery.job import ExtractTableToStorageJob + return ExtractTableToStorageJob + + def _makeResource(self, started=False, ended=False): + resource = super(TestExtractTableToStorageJob, self)._makeResource( + started, ended) + config = resource['configuration']['extract'] + config['sourceTable'] = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + } + config['destinationUris'] = [self.DESTINATION_URI] + return resource + + def _verifyResourceProperties(self, job, resource): + self._verifyReadonlyResourceProperties(job, resource) + + config = resource.get('configuration', {}).get('extract') + + self.assertEqual(job.destination_uris, config['destinationUris']) + + table_ref = config['sourceTable'] + self.assertEqual(job.source.project, table_ref['projectId']) + self.assertEqual(job.source.dataset_name, table_ref['datasetId']) + self.assertEqual(job.source.name, table_ref['tableId']) + + if 'compression' in config: + self.assertEqual(job.compression, + config['compression']) + else: + self.assertTrue(job.compression is None) + + if 'destinationFormat' in config: + self.assertEqual(job.destination_format, + config['destinationFormat']) + else: + self.assertTrue(job.destination_format is None) + + if 'fieldDelimiter' in config: + self.assertEqual(job.field_delimiter, + config['fieldDelimiter']) + else: + self.assertTrue(job.field_delimiter is None) + + if 'printHeader' in config: + self.assertEqual(job.print_header, + config['printHeader']) + else: + self.assertTrue(job.print_header is None) + + def test_ctor(self): + client = _Client(self.PROJECT) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client) + self.assertEqual(job.source, source) + self.assertEqual(job.destination_uris, [self.DESTINATION_URI]) + self.assertTrue(job._client is client) + self.assertEqual(job.job_type, self.JOB_TYPE) + self.assertEqual( + job.path, + '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)) + + self._verifyInitialReadonlyProperties(job) + + # set/read from resource['configuration']['copy'] + self.assertTrue(job.compression is None) + self.assertTrue(job.destination_format is None) + self.assertTrue(job.field_delimiter is None) + self.assertTrue(job.print_header is None) + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_missing_config(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + } + } + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': self.JOB_ID, + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'extract': { + 'sourceTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + }, + 'destinationUris': [self.DESTINATION_URI], + } + }, + } + klass = self._getTargetClass() + job = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(job._client is client) + self._verifyResourceProperties(job, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_begin_w_bound_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource() + # Ensure None for missing server-set props + del RESOURCE['statistics']['creationTime'] + del RESOURCE['etag'] + del RESOURCE['selfLink'] + del RESOURCE['user_email'] + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client) + + job.begin() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'extract': { + 'sourceTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE + }, + 'destinationUris': [self.DESTINATION_URI], + }, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_begin_w_alternate_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource(ended=True) + EXTRACT_CONFIGURATION = { + 'sourceTable': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.SOURCE_TABLE, + }, + 'destinationUris': [self.DESTINATION_URI], + 'compression': 'GZIP', + 'destinationFormat': 'NEWLINE_DELIMITED_JSON', + 'fieldDelimiter': '|', + 'printHeader': False, + } + RESOURCE['configuration']['extract'] = EXTRACT_CONFIGURATION + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client1) + + job.compression = 'GZIP' + job.destination_format = 'NEWLINE_DELIMITED_JSON' + job.field_delimiter = '|' + job.print_header = False + + job.begin(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'extract': EXTRACT_CONFIGURATION, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client) + + self.assertFalse(job.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client1) + + self.assertTrue(job.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client) + + job.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + source = _Table(self.SOURCE_TABLE) + job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], + client1) + + job.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + +class TestQueryJob(unittest2.TestCase, _Base): + JOB_TYPE = 'query' + QUERY = 'select count(*) from persons' + DESTINATION_TABLE = 'destination_table' + + def _getTargetClass(self): + from gcloud.bigquery.job import QueryJob + return QueryJob + + def _makeResource(self, started=False, ended=False): + resource = super(TestQueryJob, self)._makeResource( + started, ended) + config = resource['configuration']['query'] + config['query'] = self.QUERY + return resource + + def _verifyBooleanResourceProperties(self, job, config): + + if 'allowLargeResults' in config: + self.assertEqual(job.allow_large_results, + config['allowLargeResults']) + else: + self.assertTrue(job.allow_large_results is None) + if 'flattenResults' in config: + self.assertEqual(job.flatten_results, + config['flattenResults']) + else: + self.assertTrue(job.flatten_results is None) + if 'useQueryCache' in config: + self.assertEqual(job.use_query_cache, + config['useQueryCache']) + else: + self.assertTrue(job.use_query_cache is None) + + def _verifyResourceProperties(self, job, resource): + self._verifyReadonlyResourceProperties(job, resource) + + config = resource.get('configuration', {}).get('query') + self._verifyBooleanResourceProperties(job, config) + + if 'createDisposition' in config: + self.assertEqual(job.create_disposition, + config['createDisposition']) + else: + self.assertTrue(job.create_disposition is None) + if 'defaultDataset' in config: + dataset = job.default_dataset + ds_ref = { + 'projectId': dataset.project, + 'datasetId': dataset.name, + } + self.assertEqual(ds_ref, config['defaultDataset']) + else: + self.assertTrue(job.default_dataset is None) + if 'destinationTable' in config: + table = job.destination + tb_ref = { + 'projectId': table.project, + 'datasetId': table.dataset_name, + 'tableId': table.name + } + self.assertEqual(tb_ref, config['destinationTable']) + else: + self.assertTrue(job.destination is None) + if 'priority' in config: + self.assertEqual(job.priority, + config['priority']) + else: + self.assertTrue(job.priority is None) + if 'writeDisposition' in config: + self.assertEqual(job.write_disposition, + config['writeDisposition']) + else: + self.assertTrue(job.write_disposition is None) + + def test_ctor(self): + client = _Client(self.PROJECT) + job = self._makeOne(self.JOB_NAME, self.QUERY, client) + self.assertEqual(job.query, self.QUERY) + self.assertTrue(job._client is client) + self.assertEqual(job.job_type, self.JOB_TYPE) + self.assertEqual( + job.path, + '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)) + + self._verifyInitialReadonlyProperties(job) + + # set/read from resource['configuration']['copy'] + self.assertTrue(job.allow_large_results is None) + self.assertTrue(job.create_disposition is None) + self.assertTrue(job.default_dataset is None) + self.assertTrue(job.destination is None) + self.assertTrue(job.flatten_results is None) + self.assertTrue(job.priority is None) + self.assertTrue(job.use_query_cache is None) + self.assertTrue(job.write_disposition is None) + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_missing_config(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + } + } + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'id': self.JOB_ID, + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'query': {'query': self.QUERY} + }, + } + klass = self._getTargetClass() + job = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(job._client is client) + self._verifyResourceProperties(job, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + RESOURCE['configuration']['query']['destinationTable'] = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.DESTINATION_TABLE, + } + klass = self._getTargetClass() + dataset = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(dataset._client is client) + self._verifyResourceProperties(dataset, RESOURCE) + + def test_begin_w_bound_client(self): + PATH = 'projects/%s/jobs' % self.PROJECT + RESOURCE = self._makeResource() + # Ensure None for missing server-set props + del RESOURCE['statistics']['creationTime'] + del RESOURCE['etag'] + del RESOURCE['selfLink'] + del RESOURCE['user_email'] + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + job = self._makeOne(self.JOB_NAME, self.QUERY, client) + + job.begin() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'query': { + 'query': self.QUERY, + }, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_begin_w_alternate_client(self): + from gcloud.bigquery.dataset import Dataset + from gcloud.bigquery.dataset import Table + PATH = 'projects/%s/jobs' % self.PROJECT + TABLE = 'TABLE' + DS_NAME = 'DATASET' + RESOURCE = self._makeResource(ended=True) + QUERY_CONFIGURATION = { + 'query': self.QUERY, + 'allowLargeResults': True, + 'createDisposition': 'CREATE_NEVER', + 'defaultDataset': { + 'projectId': self.PROJECT, + 'datasetId': DS_NAME, + }, + 'destinationTable': { + 'projectId': self.PROJECT, + 'datasetId': DS_NAME, + 'tableId': TABLE, + }, + 'flattenResults': True, + 'priority': 'INTERACTIVE', + 'useQueryCache': True, + 'writeDisposition': 'WRITE_TRUNCATE', + } + RESOURCE['configuration']['query'] = QUERY_CONFIGURATION + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + job = self._makeOne(self.JOB_NAME, self.QUERY, client1) + + dataset = Dataset(DS_NAME, client1) + table = Table(TABLE, dataset) + + job.allow_large_results = True + job.create_disposition = 'CREATE_NEVER' + job.default_dataset = dataset + job.destination = table + job.flatten_results = True + job.priority = 'INTERACTIVE' + job.use_query_cache = True + job.write_disposition = 'WRITE_TRUNCATE' + + job.begin(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'configuration': { + 'query': QUERY_CONFIGURATION, + }, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(job, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + job = self._makeOne(self.JOB_NAME, self.QUERY, client) + + self.assertFalse(job.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + job = self._makeOne(self.JOB_NAME, self.QUERY, client1) + + self.assertTrue(job.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + from gcloud.bigquery.dataset import Dataset + from gcloud.bigquery.dataset import Table + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + DS_NAME = 'DATASET' + DEST_TABLE = 'dest_table' + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + job = self._makeOne(self.JOB_NAME, self.QUERY, client) + + dataset = Dataset(DS_NAME, client) + table = Table(DEST_TABLE, dataset) + job.destination = table + + job.reload() + + self.assertEqual(job.destination, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) + DS_NAME = 'DATASET' + DEST_TABLE = 'dest_table' + RESOURCE = self._makeResource() + q_config = RESOURCE['configuration']['query'] + q_config['destinationTable'] = { + 'projectId': self.PROJECT, + 'datasetId': DS_NAME, + 'tableId': DEST_TABLE, + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + job = self._makeOne(self.JOB_NAME, self.QUERY, client1) + + job.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(job, RESOURCE) + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + def dataset(self, name): + from gcloud.bigquery.dataset import Dataset + return Dataset(name, client=self) + + +class _Table(object): + + def __init__(self, name=None): + self._name = name + + @property + def name(self): + if self._name is not None: + return self._name + return TestLoadTableFromStorageJob.TABLE_NAME + + @property + def project(self): + return TestLoadTableFromStorageJob.PROJECT + + @property + def dataset_name(self): + return TestLoadTableFromStorageJob.DS_NAME + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: + raise NotFound('miss') + else: + return response diff --git a/env/Lib/site-packages/gcloud/bigquery/test_query.py b/env/Lib/site-packages/gcloud/bigquery/test_query.py new file mode 100644 index 0000000..c0e2154 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_query.py @@ -0,0 +1,330 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestQueryResults(unittest2.TestCase): + PROJECT = 'project' + JOB_NAME = 'job_name' + JOB_NAME = 'test-synchronous-query' + JOB_TYPE = 'query' + QUERY = 'select count(*) from persons' + TOKEN = 'TOKEN' + + def _getTargetClass(self): + from gcloud.bigquery.query import QueryResults + return QueryResults + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeResource(self, complete=False): + resource = { + 'jobReference': { + 'projectId': self.PROJECT, + 'jobId': self.JOB_NAME, + }, + 'jobComplete': complete, + 'errors': [], + 'schema': { + 'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQURED'}, + ], + }, + } + + if complete: + resource['totalRows'] = 1000 + resource['rows'] = [ + {'f': [ + {'v': 'Phred Phlyntstone'}, + {'v': 32}, + ]}, + {'f': [ + {'v': 'Bharney Rhubble'}, + {'v': 33}, + ]}, + {'f': [ + {'v': 'Wylma Phlyntstone'}, + {'v': 29}, + ]}, + {'f': [ + {'v': 'Bhettye Rhubble'}, + {'v': 27}, + ]}, + ] + resource['pageToken'] = self.TOKEN + resource['totalBytesProcessed'] = 100000 + resource['cacheHit'] = False + + return resource + + def _verifySchema(self, query, resource): + from gcloud.bigquery.table import SchemaField + if 'schema' in resource: + fields = resource['schema']['fields'] + self.assertEqual(len(query.schema), len(fields)) + for found, expected in zip(query.schema, fields): + self.assertTrue(isinstance(found, SchemaField)) + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.field_type, expected['type']) + self.assertEqual(found.mode, expected['mode']) + self.assertEqual(found.description, + expected.get('description')) + self.assertEqual(found.fields, expected.get('fields')) + else: + self.assertTrue(query.schema is None) + + def _verifyRows(self, query, resource): + expected = resource.get('rows') + if expected is None: + self.assertEqual(query.rows, []) + else: + found = query.rows + self.assertEqual(len(found), len(expected)) + for f_row, e_row in zip(found, expected): + self.assertEqual(f_row, + tuple([cell['v'] for cell in e_row['f']])) + + def _verifyResourceProperties(self, query, resource): + self.assertEqual(query.cache_hit, resource.get('cacheHit')) + self.assertEqual(query.complete, resource.get('jobComplete')) + self.assertEqual(query.errors, resource.get('errors')) + self.assertEqual(query.page_token, resource.get('pageToken')) + self.assertEqual(query.total_rows, resource.get('totalRows')) + self.assertEqual(query.total_bytes_processed, + resource.get('totalBytesProcessed')) + + if 'jobReference' in resource: + self.assertEqual(query.name, resource['jobReference']['jobId']) + else: + self.assertTrue(query.name is None) + + self._verifySchema(query, resource) + self._verifyRows(query, resource) + + def test_ctor(self): + client = _Client(self.PROJECT) + query = self._makeOne(self.QUERY, client) + self.assertEqual(query.query, self.QUERY) + self.assertTrue(query._client is client) + + self.assertTrue(query.cache_hit is None) + self.assertTrue(query.complete is None) + self.assertTrue(query.errors is None) + self.assertTrue(query.name is None) + self.assertTrue(query.page_token is None) + self.assertEqual(query.rows, []) + self.assertTrue(query.schema is None) + self.assertTrue(query.total_rows is None) + self.assertTrue(query.total_bytes_processed is None) + + self.assertTrue(query.default_dataset is None) + self.assertTrue(query.max_results is None) + self.assertTrue(query.preserve_nulls is None) + self.assertTrue(query.use_query_cache is None) + + def test_job_wo_jobid(self): + client = _Client(self.PROJECT) + query = self._makeOne(self.QUERY, client) + self.assertTrue(query.job is None) + + def test_job_w_jobid(self): + from gcloud.bigquery.job import QueryJob + SERVER_GENERATED = 'SERVER_GENERATED' + client = _Client(self.PROJECT) + query = self._makeOne(self.QUERY, client) + query._properties['jobReference'] = { + 'projectId': self.PROJECT, + 'jobId': SERVER_GENERATED, + } + job = query.job + self.assertTrue(isinstance(job, QueryJob)) + self.assertEqual(job.query, self.QUERY) + self.assertTrue(job._client is client) + self.assertEqual(job.name, SERVER_GENERATED) + fetched_later = query.job + self.assertTrue(fetched_later is job) + + def test_schema(self): + client = _Client(self.PROJECT) + query = self._makeOne(self.QUERY, client) + self._verifyResourceProperties(query, {}) + resource = { + 'schema': { + 'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQURED'}, + ], + }, + } + query._set_properties(resource) + self._verifyResourceProperties(query, resource) + + def test_run_w_bound_client(self): + PATH = 'projects/%s/queries' % self.PROJECT + RESOURCE = self._makeResource(complete=False) + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + query = self._makeOne(self.QUERY, client) + + query.run() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = {'query': self.QUERY} + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(query, RESOURCE) + + def test_run_w_alternate_client(self): + PATH = 'projects/%s/queries' % self.PROJECT + RESOURCE = self._makeResource(complete=True) + DATASET = 'test_dataset' + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + query = self._makeOne(self.QUERY, client1) + + query.default_dataset = client2.dataset(DATASET) + query.max_results = 100 + query.preserve_nulls = True + query.timeout_ms = 20000 + query.use_query_cache = False + query.dry_run = True + + query.run(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'query': self.QUERY, + 'defaultDataset': { + 'projectId': self.PROJECT, + 'datasetId': DATASET, + }, + 'dryRun': True, + 'maxResults': 100, + 'preserveNulls': True, + 'timeoutMs': 20000, + 'useQueryCache': False, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(query, RESOURCE) + + def test_fetch_data_query_not_yet_run(self): + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + query = self._makeOne(self.QUERY, client) + self.assertRaises(ValueError, query.fetch_data) + + def test_fetch_data_w_bound_client(self): + PATH = 'projects/%s/queries/%s' % (self.PROJECT, self.JOB_NAME) + BEFORE = self._makeResource(complete=False) + AFTER = self._makeResource(complete=True) + + conn = _Connection(AFTER) + client = _Client(project=self.PROJECT, connection=conn) + query = self._makeOne(self.QUERY, client) + query._set_properties(BEFORE) + self.assertFalse(query.complete) + + rows, total_rows, page_token = query.fetch_data() + + self.assertTrue(query.complete) + self.assertEqual(len(rows), 4) + self.assertEqual(rows[0], ('Phred Phlyntstone', 32)) + self.assertEqual(rows[1], ('Bharney Rhubble', 33)) + self.assertEqual(rows[2], ('Wylma Phlyntstone', 29)) + self.assertEqual(rows[3], ('Bhettye Rhubble', 27)) + self.assertEqual(total_rows, AFTER['totalRows']) + self.assertEqual(page_token, AFTER['pageToken']) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_fetch_data_w_alternate_client(self): + PATH = 'projects/%s/queries/%s' % (self.PROJECT, self.JOB_NAME) + MAX = 10 + TOKEN = 'TOKEN' + START = 2257 + TIMEOUT = 20000 + BEFORE = self._makeResource(complete=False) + AFTER = self._makeResource(complete=True) + + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(AFTER) + client2 = _Client(project=self.PROJECT, connection=conn2) + query = self._makeOne(self.QUERY, client1) + query._set_properties(BEFORE) + self.assertFalse(query.complete) + + rows, total_rows, page_token = query.fetch_data( + client=client2, max_results=MAX, page_token=TOKEN, + start_index=START, timeout_ms=TIMEOUT) + + self.assertTrue(query.complete) + self.assertEqual(len(rows), 4) + self.assertEqual(rows[0], ('Phred Phlyntstone', 32)) + self.assertEqual(rows[1], ('Bharney Rhubble', 33)) + self.assertEqual(rows[2], ('Wylma Phlyntstone', 29)) + self.assertEqual(rows[3], ('Bhettye Rhubble', 27)) + self.assertEqual(total_rows, AFTER['totalRows']) + self.assertEqual(page_token, AFTER['pageToken']) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': MAX, + 'pageToken': TOKEN, + 'startIndex': START, + 'timeoutMs': TIMEOUT}) + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + def dataset(self, name): + from gcloud.bigquery.dataset import Dataset + return Dataset(name, client=self) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/env/Lib/site-packages/gcloud/bigquery/test_table.py b/env/Lib/site-packages/gcloud/bigquery/test_table.py new file mode 100644 index 0000000..a0edf92 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigquery/test_table.py @@ -0,0 +1,1741 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestSchemaField(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.table import SchemaField + return SchemaField + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + field = self._makeOne('test', 'STRING') + self.assertEqual(field.name, 'test') + self.assertEqual(field.field_type, 'STRING') + self.assertEqual(field.mode, 'NULLABLE') + self.assertEqual(field.description, None) + self.assertEqual(field.fields, None) + + def test_ctor_explicit(self): + field = self._makeOne('test', 'STRING', mode='REQUIRED', + description='Testing') + self.assertEqual(field.name, 'test') + self.assertEqual(field.field_type, 'STRING') + self.assertEqual(field.mode, 'REQUIRED') + self.assertEqual(field.description, 'Testing') + self.assertEqual(field.fields, None) + + def test_ctor_subfields(self): + field = self._makeOne('phone_number', 'RECORD', + fields=[self._makeOne('area_code', 'STRING'), + self._makeOne('local_number', 'STRING')]) + self.assertEqual(field.name, 'phone_number') + self.assertEqual(field.field_type, 'RECORD') + self.assertEqual(field.mode, 'NULLABLE') + self.assertEqual(field.description, None) + self.assertEqual(len(field.fields), 2) + self.assertEqual(field.fields[0].name, 'area_code') + self.assertEqual(field.fields[0].field_type, 'STRING') + self.assertEqual(field.fields[0].mode, 'NULLABLE') + self.assertEqual(field.fields[0].description, None) + self.assertEqual(field.fields[0].fields, None) + self.assertEqual(field.fields[1].name, 'local_number') + self.assertEqual(field.fields[1].field_type, 'STRING') + self.assertEqual(field.fields[1].mode, 'NULLABLE') + self.assertEqual(field.fields[1].description, None) + self.assertEqual(field.fields[1].fields, None) + + def test___eq___name_mismatch(self): + field = self._makeOne('test', 'STRING') + other = self._makeOne('other', 'STRING') + self.assertNotEqual(field, other) + + def test___eq___field_type_mismatch(self): + field = self._makeOne('test', 'STRING') + other = self._makeOne('test', 'INTEGER') + self.assertNotEqual(field, other) + + def test___eq___mode_mismatch(self): + field = self._makeOne('test', 'STRING', mode='REQUIRED') + other = self._makeOne('test', 'STRING', mode='NULLABLE') + self.assertNotEqual(field, other) + + def test___eq___description_mismatch(self): + field = self._makeOne('test', 'STRING', description='Testing') + other = self._makeOne('test', 'STRING', description='Other') + self.assertNotEqual(field, other) + + def test___eq___fields_mismatch(self): + sub1 = self._makeOne('sub1', 'STRING') + sub2 = self._makeOne('sub2', 'STRING') + field = self._makeOne('test', 'RECORD', fields=[sub1]) + other = self._makeOne('test', 'RECORD', fields=[sub2]) + self.assertNotEqual(field, other) + + def test___eq___hit(self): + field = self._makeOne('test', 'STRING', mode='REQUIRED', + description='Testing') + other = self._makeOne('test', 'STRING', mode='REQUIRED', + description='Testing') + self.assertEqual(field, other) + + def test___eq___hit_case_diff_on_type(self): + field = self._makeOne('test', 'STRING', mode='REQUIRED', + description='Testing') + other = self._makeOne('test', 'string', mode='REQUIRED', + description='Testing') + self.assertEqual(field, other) + + def test___eq___hit_w_fields(self): + sub1 = self._makeOne('sub1', 'STRING') + sub2 = self._makeOne('sub2', 'STRING') + field = self._makeOne('test', 'RECORD', fields=[sub1, sub2]) + other = self._makeOne('test', 'RECORD', fields=[sub1, sub2]) + self.assertEqual(field, other) + + +class _SchemaBase(object): + + def _verify_field(self, field, r_field): + self.assertEqual(field.name, r_field['name']) + self.assertEqual(field.field_type, r_field['type']) + self.assertEqual(field.mode, r_field.get('mode', 'NULLABLE')) + + def _verifySchema(self, schema, resource): + r_fields = resource['schema']['fields'] + self.assertEqual(len(schema), len(r_fields)) + + for field, r_field in zip(schema, r_fields): + self._verify_field(field, r_field) + + +class TestTable(unittest2.TestCase, _SchemaBase): + PROJECT = 'project' + DS_NAME = 'dataset-name' + TABLE_NAME = 'table-name' + + def _getTargetClass(self): + from gcloud.bigquery.table import Table + return Table + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _setUpConstants(self): + import datetime + from gcloud._helpers import UTC + + self.WHEN_TS = 1437767599.006 + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( + tzinfo=UTC) + self.ETAG = 'ETAG' + self.TABLE_ID = '%s:%s:%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.RESOURCE_URL = 'http://example.com/path/to/resource' + self.NUM_BYTES = 12345 + self.NUM_ROWS = 67 + + def _makeResource(self): + self._setUpConstants() + return { + 'creationTime': self.WHEN_TS * 1000, + 'tableReference': + {'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, + 'etag': 'ETAG', + 'id': self.TABLE_ID, + 'lastModifiedTime': self.WHEN_TS * 1000, + 'location': 'US', + 'selfLink': self.RESOURCE_URL, + 'numRows': self.NUM_ROWS, + 'numBytes': self.NUM_BYTES, + 'type': 'TABLE', + } + + def _verifyReadonlyResourceProperties(self, table, resource): + if 'creationTime' in resource: + self.assertEqual(table.created, self.WHEN) + else: + self.assertEqual(table.created, None) + + if 'etag' in resource: + self.assertEqual(table.etag, self.ETAG) + else: + self.assertEqual(table.etag, None) + + if 'numRows' in resource: + self.assertEqual(table.num_rows, self.NUM_ROWS) + else: + self.assertEqual(table.num_rows, None) + + if 'numBytes' in resource: + self.assertEqual(table.num_bytes, self.NUM_BYTES) + else: + self.assertEqual(table.num_bytes, None) + + if 'selfLink' in resource: + self.assertEqual(table.self_link, self.RESOURCE_URL) + else: + self.assertEqual(table.self_link, None) + + self.assertEqual(table.table_id, self.TABLE_ID) + self.assertEqual(table.table_type, + 'TABLE' if 'view' not in resource else 'VIEW') + + def _verifyResourceProperties(self, table, resource): + + self._verifyReadonlyResourceProperties(table, resource) + + if 'expirationTime' in resource: + self.assertEqual(table.expires, self.EXP_TIME) + else: + self.assertEqual(table.expires, None) + + self.assertEqual(table.description, resource.get('description')) + self.assertEqual(table.friendly_name, resource.get('friendlyName')) + self.assertEqual(table.location, resource.get('location')) + + if 'view' in resource: + self.assertEqual(table.view_query, resource['view']['query']) + else: + self.assertEqual(table.view_query, None) + + if 'schema' in resource: + self._verifySchema(table.schema, resource) + else: + self.assertEqual(table.schema, []) + + def test_ctor(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + self.assertEqual(table.name, self.TABLE_NAME) + self.assertTrue(table._dataset is dataset) + self.assertEqual(table.project, self.PROJECT) + self.assertEqual(table.dataset_name, self.DS_NAME) + self.assertEqual( + table.path, + '/projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME)) + self.assertEqual(table.schema, []) + + self.assertEqual(table.created, None) + self.assertEqual(table.etag, None) + self.assertEqual(table.modified, None) + self.assertEqual(table.num_bytes, None) + self.assertEqual(table.num_rows, None) + self.assertEqual(table.self_link, None) + self.assertEqual(table.table_id, None) + self.assertEqual(table.table_type, None) + + self.assertEqual(table.description, None) + self.assertEqual(table.expires, None) + self.assertEqual(table.friendly_name, None) + self.assertEqual(table.location, None) + self.assertEqual(table.view_query, None) + + def test_ctor_w_schema(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = self._makeOne(self.TABLE_NAME, dataset, + schema=[full_name, age]) + self.assertEqual(table.schema, [full_name, age]) + + def test_num_bytes_getter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + + # Check with no value set. + self.assertEqual(table.num_bytes, None) + + num_bytes = 1337 + # Check with integer value set. + table._properties = {'numBytes': num_bytes} + self.assertEqual(table.num_bytes, num_bytes) + + # Check with a string value set. + table._properties = {'numBytes': str(num_bytes)} + self.assertEqual(table.num_bytes, num_bytes) + + # Check with invalid int value. + table._properties = {'numBytes': 'x'} + with self.assertRaises(ValueError): + getattr(table, 'num_bytes') + + def test_num_rows_getter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + + # Check with no value set. + self.assertEqual(table.num_rows, None) + + num_rows = 42 + # Check with integer value set. + table._properties = {'numRows': num_rows} + self.assertEqual(table.num_rows, num_rows) + + # Check with a string value set. + table._properties = {'numRows': str(num_rows)} + self.assertEqual(table.num_rows, num_rows) + + # Check with invalid int value. + table._properties = {'numRows': 'x'} + with self.assertRaises(ValueError): + getattr(table, 'num_rows') + + def test_schema_setter_non_list(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(TypeError): + table.schema = object() + + def test_schema_setter_invalid_field(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + with self.assertRaises(ValueError): + table.schema = [full_name, object()] + + def test_schema_setter(self): + from gcloud.bigquery.table import SchemaField + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table.schema = [full_name, age] + self.assertEqual(table.schema, [full_name, age]) + + def test_props_set_by_server(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _millis + + CREATED = datetime.datetime(2015, 7, 29, 12, 13, 22, tzinfo=UTC) + MODIFIED = datetime.datetime(2015, 7, 29, 14, 47, 15, tzinfo=UTC) + TABLE_ID = '%s:%s:%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + URL = 'http://example.com/projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table._properties['creationTime'] = _millis(CREATED) + table._properties['etag'] = 'ETAG' + table._properties['lastModifiedTime'] = _millis(MODIFIED) + table._properties['numBytes'] = 12345 + table._properties['numRows'] = 66 + table._properties['selfLink'] = URL + table._properties['id'] = TABLE_ID + table._properties['type'] = 'TABLE' + + self.assertEqual(table.created, CREATED) + self.assertEqual(table.etag, 'ETAG') + self.assertEqual(table.modified, MODIFIED) + self.assertEqual(table.num_bytes, 12345) + self.assertEqual(table.num_rows, 66) + self.assertEqual(table.self_link, URL) + self.assertEqual(table.table_id, TABLE_ID) + self.assertEqual(table.table_type, 'TABLE') + + def test_description_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.description = 12345 + + def test_description_setter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.description = 'DESCRIPTION' + self.assertEqual(table.description, 'DESCRIPTION') + + def test_expires_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.expires = object() + + def test_expires_setter(self): + import datetime + from gcloud._helpers import UTC + + WHEN = datetime.datetime(2015, 7, 28, 16, 39, tzinfo=UTC) + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.expires = WHEN + self.assertEqual(table.expires, WHEN) + + def test_friendly_name_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.friendly_name = 12345 + + def test_friendly_name_setter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.friendly_name = 'FRIENDLY' + self.assertEqual(table.friendly_name, 'FRIENDLY') + + def test_location_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.location = 12345 + + def test_location_setter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.location = 'LOCATION' + self.assertEqual(table.location, 'LOCATION') + + def test_view_query_setter_bad_value(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + with self.assertRaises(ValueError): + table.view_query = 12345 + + def test_view_query_setter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.view_query = 'select * from foo' + self.assertEqual(table.view_query, 'select * from foo') + + def test_view_query_deleter(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + table.view_query = 'select * from foo' + del table.view_query + self.assertEqual(table.view_query, None) + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, dataset) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = { + 'id': '%s:%s:%s' % (self.PROJECT, self.DS_NAME, self.TABLE_NAME), + 'tableReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + }, + 'type': 'TABLE', + } + klass = self._getTargetClass() + table = klass.from_api_repr(RESOURCE, dataset) + self.assertEqual(table.name, self.TABLE_NAME) + self.assertTrue(table._dataset is dataset) + self._verifyResourceProperties(table, RESOURCE) + + def test_from_api_repr_w_properties(self): + client = _Client(self.PROJECT) + dataset = _Dataset(client) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + table = klass.from_api_repr(RESOURCE, dataset) + self.assertTrue(table._dataset._client is client) + self._verifyResourceProperties(table, RESOURCE) + + def test_create_no_view_query_no_schema(self): + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset) + + with self.assertRaises(ValueError): + table.create() + + def test_create_w_bound_client(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = self._makeOne(self.TABLE_NAME, dataset, + schema=[full_name, age]) + + table.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'tableReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(table, RESOURCE) + + def test_create_w_alternate_client(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _millis + from gcloud.bigquery.table import SchemaField + + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + QUERY = 'select fullname, age from person_ages' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, + tzinfo=UTC) + RESOURCE['expirationTime'] = _millis(self.EXP_TIME) + RESOURCE['view'] = {} + RESOURCE['view']['query'] = QUERY + RESOURCE['type'] = 'VIEW' + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client=client1) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age]) + table.friendly_name = TITLE + table.description = DESCRIPTION + table.view_query = QUERY + + table.create(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'tableReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'description': DESCRIPTION, + 'friendlyName': TITLE, + 'view': {'query': QUERY}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(table, RESOURCE) + + def test_create_w_missing_output_properties(self): + # In the wild, the resource returned from 'dataset.create' sometimes + # lacks 'creationTime' / 'lastModifiedTime' + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + RESOURCE = self._makeResource() + del RESOURCE['creationTime'] + del RESOURCE['lastModifiedTime'] + self.WHEN = None + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = self._makeOne(self.TABLE_NAME, dataset, + schema=[full_name, age]) + + table.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'tableReference': { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(table, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + self.assertFalse(table.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + self.assertTrue(table.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(table, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(table, RESOURCE) + + def test_patch_w_invalid_expiration(self): + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + with self.assertRaises(ValueError): + table.patch(expires='BOGUS') + + def test_patch_w_bound_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.patch(description=DESCRIPTION, + friendly_name=TITLE, + view_query=None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + SENT = { + 'description': DESCRIPTION, + 'friendlyName': TITLE, + 'view': None, + } + self.assertEqual(req['data'], SENT) + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(table, RESOURCE) + + def test_patch_w_alternate_client(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _millis + from gcloud.bigquery.table import SchemaField + + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + QUERY = 'select fullname, age from person_ages' + LOCATION = 'EU' + RESOURCE = self._makeResource() + RESOURCE['view'] = {'query': QUERY} + RESOURCE['type'] = 'VIEW' + RESOURCE['location'] = LOCATION + self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, + tzinfo=UTC) + RESOURCE['expirationTime'] = _millis(self.EXP_TIME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='NULLABLE') + + table.patch(client=client2, view_query=QUERY, location=LOCATION, + expires=self.EXP_TIME, schema=[full_name, age]) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'view': {'query': QUERY}, + 'location': LOCATION, + 'expirationTime': _millis(self.EXP_TIME), + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'NULLABLE'}]}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(table, RESOURCE) + + def test_patch_w_schema_None(self): + # Simulate deleting schema: not sure if back-end will actually + # allow this operation, but the spec says it is optional. + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.patch(schema=None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + SENT = {'schema': None} + self.assertEqual(req['data'], SENT) + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(table, RESOURCE) + + def test_update_w_bound_client(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + DESCRIPTION = 'DESCRIPTION' + TITLE = 'TITLE' + RESOURCE = self._makeResource() + RESOURCE['description'] = DESCRIPTION + RESOURCE['friendlyName'] = TITLE + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age]) + table.description = DESCRIPTION + table.friendly_name = TITLE + + table.update() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + SENT = { + 'tableReference': + {'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, + 'description': DESCRIPTION, + 'friendlyName': TITLE, + } + self.assertEqual(req['data'], SENT) + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(table, RESOURCE) + + def test_update_w_alternate_client(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _millis + from gcloud.bigquery.table import SchemaField + + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + DEF_TABLE_EXP = 12345 + LOCATION = 'EU' + QUERY = 'select fullname, age from person_ages' + RESOURCE = self._makeResource() + RESOURCE['defaultTableExpirationMs'] = 12345 + RESOURCE['location'] = LOCATION + self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, + tzinfo=UTC) + RESOURCE['expirationTime'] = _millis(self.EXP_TIME) + RESOURCE['view'] = {'query': QUERY} + RESOURCE['type'] = 'VIEW' + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + table.default_table_expiration_ms = DEF_TABLE_EXP + table.location = LOCATION + table.expires = self.EXP_TIME + table.view_query = QUERY + + table.update(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'tableReference': + {'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME}, + 'expirationTime': _millis(self.EXP_TIME), + 'location': 'EU', + 'view': {'query': QUERY}, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(table, RESOURCE) + + def test_delete_w_bound_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.delete() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_delete_w_alternate_client(self): + PATH = 'projects/%s/datasets/%s/tables/%s' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + + table.delete(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_fetch_data_w_bound_client(self): + import datetime + from gcloud._helpers import UTC + from gcloud.bigquery.table import SchemaField + + PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + WHEN_TS = 1437767599.006 + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( + tzinfo=UTC) + WHEN_1 = WHEN + datetime.timedelta(seconds=1) + WHEN_2 = WHEN + datetime.timedelta(seconds=2) + ROWS = 1234 + TOKEN = 'TOKEN' + + def _bigquery_timestamp_float_repr(ts_float): + # Preserve microsecond precision for E+09 timestamps + return '%0.15E' % (ts_float,) + + DATA = { + 'totalRows': str(ROWS), + 'pageToken': TOKEN, + 'rows': [ + {'f': [ + {'v': 'Phred Phlyntstone'}, + {'v': '32'}, + {'v': _bigquery_timestamp_float_repr(WHEN_TS)}, + ]}, + {'f': [ + {'v': 'Bharney Rhubble'}, + {'v': '33'}, + {'v': _bigquery_timestamp_float_repr(WHEN_TS + 1)}, + ]}, + {'f': [ + {'v': 'Wylma Phlyntstone'}, + {'v': '29'}, + {'v': _bigquery_timestamp_float_repr(WHEN_TS + 2)}, + ]}, + {'f': [ + {'v': 'Bhettye Rhubble'}, + {'v': None}, + {'v': None}, + ]}, + ] + } + + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='NULLABLE') + joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age, joined]) + + rows, total_rows, page_token = table.fetch_data() + + self.assertEqual(len(rows), 4) + self.assertEqual(rows[0], ('Phred Phlyntstone', 32, WHEN)) + self.assertEqual(rows[1], ('Bharney Rhubble', 33, WHEN_1)) + self.assertEqual(rows[2], ('Wylma Phlyntstone', 29, WHEN_2)) + self.assertEqual(rows[3], ('Bhettye Rhubble', None, None)) + self.assertEqual(total_rows, ROWS) + self.assertEqual(page_token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_fetch_data_w_alternate_client(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + MAX = 10 + TOKEN = 'TOKEN' + DATA = { + 'rows': [ + {'f': [ + {'v': 'Phred Phlyntstone'}, + {'v': '32'}, + {'v': 'true'}, + {'v': '3.1415926'}, + ]}, + {'f': [ + {'v': 'Bharney Rhubble'}, + {'v': '33'}, + {'v': 'false'}, + {'v': '1.414'}, + ]}, + {'f': [ + {'v': 'Wylma Phlyntstone'}, + {'v': '29'}, + {'v': 'true'}, + {'v': '2.71828'}, + ]}, + {'f': [ + {'v': 'Bhettye Rhubble'}, + {'v': '27'}, + {'v': None}, + {'v': None}, + ]}, + ] + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(DATA) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + voter = SchemaField('voter', 'BOOLEAN', mode='NULLABLE') + score = SchemaField('score', 'FLOAT', mode='NULLABLE') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age, voter, score]) + + rows, total_rows, page_token = table.fetch_data(client=client2, + max_results=MAX, + page_token=TOKEN) + + self.assertEqual(len(rows), 4) + self.assertEqual(rows[0], ('Phred Phlyntstone', 32, True, 3.1415926)) + self.assertEqual(rows[1], ('Bharney Rhubble', 33, False, 1.414)) + self.assertEqual(rows[2], ('Wylma Phlyntstone', 29, True, 2.71828)) + self.assertEqual(rows[3], ('Bhettye Rhubble', 27, None, None)) + self.assertEqual(total_rows, None) + self.assertEqual(page_token, None) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': MAX, 'pageToken': TOKEN}) + + def test_fetch_data_w_repeated_fields(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + ROWS = 1234 + TOKEN = 'TOKEN' + DATA = { + 'totalRows': ROWS, + 'pageToken': TOKEN, + 'rows': [ + {'f': [ + {'v': ['red', 'green']}, + {'v': [{'f': [{'v': ['1', '2']}, + {'v': ['3.1415', '1.414']}]}]}, + ]}, + ] + } + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('color', 'STRING', mode='REPEATED') + index = SchemaField('index', 'INTEGER', 'REPEATED') + score = SchemaField('score', 'FLOAT', 'REPEATED') + struct = SchemaField('struct', 'RECORD', mode='REPEATED', + fields=[index, score]) + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, struct]) + + rows, total_rows, page_token = table.fetch_data() + + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0][0], ['red', 'green']) + self.assertEqual(rows[0][1], [{'index': [1, 2], + 'score': [3.1415, 1.414]}]) + self.assertEqual(total_rows, ROWS) + self.assertEqual(page_token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_fetch_data_w_record_schema(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + ROWS = 1234 + TOKEN = 'TOKEN' + DATA = { + 'totalRows': ROWS, + 'pageToken': TOKEN, + 'rows': [ + {'f': [ + {'v': 'Phred Phlyntstone'}, + {'v': {'f': [{'v': '800'}, {'v': '555-1212'}, {'v': 1}]}}, + ]}, + {'f': [ + {'v': 'Bharney Rhubble'}, + {'v': {'f': [{'v': '877'}, {'v': '768-5309'}, {'v': 2}]}}, + ]}, + {'f': [ + {'v': 'Wylma Phlyntstone'}, + {'v': None}, + ]}, + ] + } + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + area_code = SchemaField('area_code', 'STRING', 'REQUIRED') + local_number = SchemaField('local_number', 'STRING', 'REQUIRED') + rank = SchemaField('rank', 'INTEGER', 'REQUIRED') + phone = SchemaField('phone', 'RECORD', mode='NULLABLE', + fields=[area_code, local_number, rank]) + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, phone]) + + rows, total_rows, page_token = table.fetch_data() + + self.assertEqual(len(rows), 3) + self.assertEqual(rows[0][0], 'Phred Phlyntstone') + self.assertEqual(rows[0][1], {'area_code': '800', + 'local_number': '555-1212', + 'rank': 1}) + self.assertEqual(rows[1][0], 'Bharney Rhubble') + self.assertEqual(rows[1][1], {'area_code': '877', + 'local_number': '768-5309', + 'rank': 2}) + self.assertEqual(rows[2][0], 'Wylma Phlyntstone') + self.assertEqual(rows[2][1], None) + self.assertEqual(total_rows, ROWS) + self.assertEqual(page_token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_insert_data_w_bound_client(self): + import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _microseconds_from_datetime + from gcloud.bigquery.table import SchemaField + + WHEN_TS = 1437767599.006 + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( + tzinfo=UTC) + PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age, joined]) + ROWS = [ + ('Phred Phlyntstone', 32, WHEN), + ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), + ('Wylma Phlyntstone', 29, WHEN + datetime.timedelta(seconds=2)), + ('Bhettye Rhubble', 27, None), + ] + + def _row_data(row): + joined = None + if row[2] is not None: + joined = _microseconds_from_datetime(row[2]) * 1e-6 + return {'full_name': row[0], + 'age': row[1], + 'joined': joined} + + SENT = { + 'rows': [{'json': _row_data(row)} for row in ROWS], + } + + errors = table.insert_data(ROWS) + + self.assertEqual(len(errors), 0) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['data'], SENT) + + def test_insert_data_w_alternate_client(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + RESPONSE = { + 'insertErrors': [ + {'index': 1, + 'errors': [ + {'reason': 'REASON', + 'location': 'LOCATION', + 'debugInfo': 'INFO', + 'message': 'MESSAGE'} + ]}, + ]} + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESPONSE) + client2 = _Client(project=self.PROJECT, connection=conn2) + dataset = _Dataset(client1) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + voter = SchemaField('voter', 'BOOLEAN', mode='NULLABLE') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age, voter]) + ROWS = [ + ('Phred Phlyntstone', 32, True), + ('Bharney Rhubble', 33, False), + ('Wylma Phlyntstone', 29, True), + ('Bhettye Rhubble', 27, True), + ] + + def _row_data(row): + return {'full_name': row[0], 'age': row[1], 'voter': row[2]} + + SENT = { + 'skipInvalidRows': True, + 'ignoreUnknownValues': True, + 'templateSuffix': '20160303', + 'rows': [{'insertId': index, 'json': _row_data(row)} + for index, row in enumerate(ROWS)], + } + + errors = table.insert_data( + client=client2, + rows=ROWS, + row_ids=[index for index, _ in enumerate(ROWS)], + skip_invalid_rows=True, + ignore_unknown_values=True, + template_suffix='20160303', + ) + + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0]['index'], 1) + self.assertEqual(len(errors[0]['errors']), 1) + self.assertEqual(errors[0]['errors'][0], + RESPONSE['insertErrors'][0]['errors'][0]) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['data'], SENT) + + def test_insert_data_w_repeated_fields(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('color', 'STRING', mode='REPEATED') + index = SchemaField('index', 'INTEGER', 'REPEATED') + score = SchemaField('score', 'FLOAT', 'REPEATED') + struct = SchemaField('struct', 'RECORD', mode='REPEATED', + fields=[index, score]) + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, struct]) + ROWS = [ + (['red', 'green'], [{'index': [1, 2], 'score': [3.1415, 1.414]}]), + ] + + def _row_data(row): + return {'color': row[0], + 'struct': row[1]} + + SENT = { + 'rows': [{'json': _row_data(row)} for row in ROWS], + } + + errors = table.insert_data(ROWS) + + self.assertEqual(len(errors), 0) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['data'], SENT) + + def test_insert_data_w_record_schema(self): + from gcloud.bigquery.table import SchemaField + PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( + self.PROJECT, self.DS_NAME, self.TABLE_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + area_code = SchemaField('area_code', 'STRING', 'REQUIRED') + local_number = SchemaField('local_number', 'STRING', 'REQUIRED') + rank = SchemaField('rank', 'INTEGER', 'REQUIRED') + phone = SchemaField('phone', 'RECORD', mode='NULLABLE', + fields=[area_code, local_number, rank]) + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, phone]) + ROWS = [ + ('Phred Phlyntstone', {'area_code': '800', + 'local_number': '555-1212', + 'rank': 1}), + ('Bharney Rhubble', {'area_code': '877', + 'local_number': '768-5309', + 'rank': 2}), + ('Wylma Phlyntstone', None), + ] + + def _row_data(row): + return {'full_name': row[0], + 'phone': row[1]} + + SENT = { + 'rows': [{'json': _row_data(row)} for row in ROWS], + } + + errors = table.insert_data(ROWS) + + self.assertEqual(len(errors), 0) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['data'], SENT) + + def test_upload_from_file_text_mode_file_failure(self): + + class TextModeFile(object): + mode = 'r' + + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + file_obj = TextModeFile() + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + with self.assertRaises(ValueError): + table.upload_from_file(file_obj, 'CSV', size=1234) + + def test_upload_from_file_size_failure(self): + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + dataset = _Dataset(client) + file_obj = object() + table = self._makeOne(self.TABLE_NAME, dataset=dataset) + with self.assertRaises(ValueError): + table.upload_from_file(file_obj, 'CSV', size=None) + + def _upload_from_file_helper(self, **kw): + import csv + import datetime + from six.moves.http_client import OK + from gcloud._helpers import UTC + from gcloud._testing import _NamedTemporaryFile + from gcloud.bigquery.table import SchemaField + + WHEN_TS = 1437767599.006 + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( + tzinfo=UTC) + PATH = 'projects/%s/jobs' % (self.PROJECT,) + response = {'status': OK} + conn = _Connection( + (response, b'{}'), + ) + client = _Client(project=self.PROJECT, connection=conn) + expected_job = object() + if 'client' in kw: + kw['client']._job = expected_job + else: + client._job = expected_job + dataset = _Dataset(client) + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') + table = self._makeOne(self.TABLE_NAME, dataset=dataset, + schema=[full_name, age, joined]) + ROWS = [ + ('Phred Phlyntstone', 32, WHEN), + ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), + ('Wylma Phlyntstone', 29, WHEN + datetime.timedelta(seconds=2)), + ('Bhettye Rhubble', 27, None), + ] + + with _NamedTemporaryFile() as temp: + with open(temp.name, 'w') as file_obj: + writer = csv.writer(file_obj) + writer.writerow(('full_name', 'age', 'joined')) + writer.writerows(ROWS) + + with open(temp.name, 'rb') as file_obj: + BODY = file_obj.read() + explicit_size = kw.pop('_explicit_size', False) + if explicit_size: + kw['size'] = len(BODY) + job = table.upload_from_file( + file_obj, 'CSV', rewind=True, **kw) + + self.assertTrue(job is expected_job) + return conn.http._requested, PATH, BODY + + def test_upload_from_file_w_bound_client_multipart(self): + import json + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + from gcloud._helpers import _to_bytes + from gcloud.streaming.test_transfer import _email_chunk_parser + + requested, PATH, BODY = self._upload_from_file_helper() + parse_chunk = _email_chunk_parser() + + self.assertEqual(len(requested), 1) + req = requested[0] + self.assertEqual(req['method'], 'POST') + uri = req['uri'] + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual(scheme, 'http') + self.assertEqual(netloc, 'example.com') + self.assertEqual(path, '/%s' % PATH) + self.assertEqual(dict(parse_qsl(qs)), + {'uploadType': 'multipart'}) + + ctype, boundary = [x.strip() + for x in req['headers']['content-type'].split(';')] + self.assertEqual(ctype, 'multipart/related') + self.assertTrue(boundary.startswith('boundary="==')) + self.assertTrue(boundary.endswith('=="')) + + divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) + chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog + self.assertEqual(len(chunks), 2) + + text_msg = parse_chunk(chunks[0].strip()) + self.assertEqual(dict(text_msg._headers), + {'Content-Type': 'application/json', + 'MIME-Version': '1.0'}) + metadata = json.loads(text_msg._payload) + load_config = metadata['configuration']['load'] + DESTINATION_TABLE = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + } + self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) + self.assertEqual(load_config['sourceFormat'], 'CSV') + + app_msg = parse_chunk(chunks[1].strip()) + self.assertEqual(dict(app_msg._headers), + {'Content-Type': 'application/octet-stream', + 'Content-Transfer-Encoding': 'binary', + 'MIME-Version': '1.0'}) + body = BODY.decode('ascii').rstrip() + body_lines = [line.strip() for line in body.splitlines()] + payload_lines = app_msg._payload.rstrip().splitlines() + self.assertEqual(payload_lines, body_lines) + + # pylint: disable=too-many-statements + def test_upload_from_file_w_explicit_client_resumable(self): + import json + from six.moves.http_client import OK + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + from gcloud._testing import _Monkey + from gcloud.bigquery import table as MUT + + UPLOAD_PATH = 'https://example.com/upload/test' + initial_response = {'status': OK, 'location': UPLOAD_PATH} + upload_response = {'status': OK} + conn = _Connection( + (initial_response, b'{}'), + (upload_response, b'{}'), + ) + client = _Client(project=self.PROJECT, connection=conn) + + class _UploadConfig(object): + accept = ['*/*'] + max_size = None + resumable_multipart = True + resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' + simple_multipart = True + simple_path = u'' # force resumable + + with _Monkey(MUT, _UploadConfig=_UploadConfig): + orig_requested, PATH, BODY = self._upload_from_file_helper( + allow_jagged_rows=False, + allow_quoted_newlines=False, + create_disposition='CREATE_IF_NEEDED', + encoding='utf8', + field_delimiter=',', + ignore_unknown_values=False, + max_bad_records=0, + quote_character='"', + skip_leading_rows=1, + write_disposition='WRITE_APPEND', + client=client, + _explicit_size=True) + + self.assertEqual(len(orig_requested), 0) + + requested = conn.http._requested + self.assertEqual(len(requested), 2) + req = requested[0] + self.assertEqual(req['method'], 'POST') + uri = req['uri'] + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual(scheme, 'http') + self.assertEqual(netloc, 'example.com') + self.assertEqual(path, '/%s' % PATH) + self.assertEqual(dict(parse_qsl(qs)), + {'uploadType': 'resumable'}) + + self.assertEqual(req['headers']['content-type'], 'application/json') + metadata = json.loads(req['body']) + load_config = metadata['configuration']['load'] + DESTINATION_TABLE = { + 'projectId': self.PROJECT, + 'datasetId': self.DS_NAME, + 'tableId': self.TABLE_NAME, + } + self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) + self.assertEqual(load_config['sourceFormat'], 'CSV') + self.assertEqual(load_config['allowJaggedRows'], False) + self.assertEqual(load_config['allowQuotedNewlines'], False) + self.assertEqual(load_config['createDisposition'], 'CREATE_IF_NEEDED') + self.assertEqual(load_config['encoding'], 'utf8') + self.assertEqual(load_config['fieldDelimiter'], ',') + self.assertEqual(load_config['ignoreUnknownValues'], False) + self.assertEqual(load_config['maxBadRecords'], 0) + self.assertEqual(load_config['quote'], '"') + self.assertEqual(load_config['skipLeadingRows'], 1) + self.assertEqual(load_config['writeDisposition'], 'WRITE_APPEND') + + req = requested[1] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['uri'], UPLOAD_PATH) + headers = req['headers'] + length = len(BODY) + self.assertEqual(headers['Content-Type'], 'application/octet-stream') + self.assertEqual(headers['Content-Range'], + 'bytes 0-%d/%d' % (length - 1, length)) + self.assertEqual(headers['content-length'], '%d' % (length,)) + self.assertEqual(req['body'], BODY) + # pylint: enable=too-many-statements + + +class Test_parse_schema_resource(unittest2.TestCase, _SchemaBase): + + def _callFUT(self, resource): + from gcloud.bigquery.table import _parse_schema_resource + return _parse_schema_resource(resource) + + def _makeResource(self): + return { + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}, + ]}, + } + + def test__parse_schema_resource_defaults(self): + RESOURCE = self._makeResource() + schema = self._callFUT(RESOURCE['schema']) + self._verifySchema(schema, RESOURCE) + + def test__parse_schema_resource_subfields(self): + RESOURCE = self._makeResource() + RESOURCE['schema']['fields'].append( + {'name': 'phone', + 'type': 'RECORD', + 'mode': 'REPEATABLE', + 'fields': [{'name': 'type', + 'type': 'STRING', + 'mode': 'REQUIRED'}, + {'name': 'number', + 'type': 'STRING', + 'mode': 'REQUIRED'}]}) + schema = self._callFUT(RESOURCE['schema']) + self._verifySchema(schema, RESOURCE) + + def test__parse_schema_resource_fields_without_mode(self): + RESOURCE = self._makeResource() + RESOURCE['schema']['fields'].append( + {'name': 'phone', + 'type': 'STRING'}) + + schema = self._callFUT(RESOURCE['schema']) + self._verifySchema(schema, RESOURCE) + + +class Test_build_schema_resource(unittest2.TestCase, _SchemaBase): + + def _callFUT(self, resource): + from gcloud.bigquery.table import _build_schema_resource + return _build_schema_resource(resource) + + def test_defaults(self): + from gcloud.bigquery.table import SchemaField + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + resource = self._callFUT([full_name, age]) + self.assertEqual(len(resource), 2) + self.assertEqual(resource[0], + {'name': 'full_name', + 'type': 'STRING', + 'mode': 'REQUIRED'}) + self.assertEqual(resource[1], + {'name': 'age', + 'type': 'INTEGER', + 'mode': 'REQUIRED'}) + + def test_w_description(self): + from gcloud.bigquery.table import SchemaField + DESCRIPTION = 'DESCRIPTION' + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED', + description=DESCRIPTION) + age = SchemaField('age', 'INTEGER', mode='REQUIRED') + resource = self._callFUT([full_name, age]) + self.assertEqual(len(resource), 2) + self.assertEqual(resource[0], + {'name': 'full_name', + 'type': 'STRING', + 'mode': 'REQUIRED', + 'description': DESCRIPTION}) + self.assertEqual(resource[1], + {'name': 'age', + 'type': 'INTEGER', + 'mode': 'REQUIRED'}) + + def test_w_subfields(self): + from gcloud.bigquery.table import SchemaField + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + ph_type = SchemaField('type', 'STRING', 'REQUIRED') + ph_num = SchemaField('number', 'STRING', 'REQUIRED') + phone = SchemaField('phone', 'RECORD', mode='REPEATABLE', + fields=[ph_type, ph_num]) + resource = self._callFUT([full_name, phone]) + self.assertEqual(len(resource), 2) + self.assertEqual(resource[0], + {'name': 'full_name', + 'type': 'STRING', + 'mode': 'REQUIRED'}) + self.assertEqual(resource[1], + {'name': 'phone', + 'type': 'RECORD', + 'mode': 'REPEATABLE', + 'fields': [{'name': 'type', + 'type': 'STRING', + 'mode': 'REQUIRED'}, + {'name': 'number', + 'type': 'STRING', + 'mode': 'REQUIRED'}]}) + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + def job_from_resource(self, resource): # pylint: disable=unused-argument + return self._job + + +class _Dataset(object): + + def __init__(self, client, name=TestTable.DS_NAME): + self._client = client + self.name = name + + @property + def path(self): + return '/projects/%s/datasets/%s' % ( + self._client.project, self.name) + + @property + def project(self): + return self._client.project + + +class _Responder(object): + + def __init__(self, *responses): + self._responses = responses[:] + self._requested = [] + + def _respond(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response + + +class _HTTP(_Responder): + + connections = {} # For google-apitools debugging. + + def request(self, uri, method, headers, body, **kw): + if hasattr(body, 'read'): + body = body.read() + return self._respond(uri=uri, method=method, headers=headers, + body=body, **kw) + + +class _Connection(_Responder): + + API_BASE_URL = 'http://example.com' + USER_AGENT = 'testing 1.2.3' + + def __init__(self, *responses): + super(_Connection, self).__init__(*responses) + self.http = _HTTP(*responses) + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: + raise NotFound('miss') + else: + return response + + def build_api_url(self, path, query_params=None, + api_base_url=API_BASE_URL): + from six.moves.urllib.parse import urlencode + from six.moves.urllib.parse import urlsplit + from six.moves.urllib.parse import urlunsplit + # Mimic the build_api_url interface. + qs = urlencode(query_params or {}) + scheme, netloc, _, _, _ = urlsplit(api_base_url) + return urlunsplit((scheme, netloc, path, qs, '')) diff --git a/env/Lib/site-packages/gcloud/bigtable/__init__.py b/env/Lib/site-packages/gcloud/bigtable/__init__.py new file mode 100644 index 0000000..6b4e266 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/__init__.py @@ -0,0 +1,38 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable API package.""" + + +from gcloud.bigtable.client import Client + + +_ERR_MSG = """\ +gRPC is required for using the Cloud Bigtable API, but +importing the gRPC library (grpcio in PyPI) has failed. + +As of June 2016, grpcio is only supported in Python 2.7, +which unfortunately means the Cloud Bigtable API isn't +available if you're using Python 3 or Python < 2.7. + +If you're using Python 2.7 and importing / installing +grpcio has failed, this likely means you have a non-standard version +of Python installed. Check http://grpc.io if you're +having trouble installing the grpcio package. +""" + +try: + import grpc.beta.implementations +except ImportError as exc: # pragma: NO COVER + raise ImportError(_ERR_MSG, exc) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/__init__.py b/env/Lib/site-packages/gcloud/bigtable/_generated/__init__.py new file mode 100644 index 0000000..ad35adc --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generated protobuf modules for Google Cloud Bigtable API.""" diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_data.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_data.proto new file mode 100644 index 0000000..c0f8a93 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_data.proto @@ -0,0 +1,93 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.cluster.v1; + +import "google/api/annotations.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableClusterDataProto"; +option java_package = "com.google.bigtable.admin.cluster.v1"; + + +// A physical location in which a particular project can allocate Cloud BigTable +// resources. +message Zone { + // Possible states of a zone. + enum Status { + // The state of the zone is unknown or unspecified. + UNKNOWN = 0; + + // The zone is in a good state. + OK = 1; + + // The zone is down for planned maintenance. + PLANNED_MAINTENANCE = 2; + + // The zone is down for emergency or unplanned maintenance. + EMERGENCY_MAINENANCE = 3; + } + + // A permanent unique identifier for the zone. + // Values are of the form projects//zones/[a-z][-a-z0-9]* + string name = 1; + + // The name of this zone as it appears in UIs. + string display_name = 2; + + // The current state of this zone. + Status status = 3; +} + +// An isolated set of Cloud BigTable resources on which tables can be hosted. +message Cluster { + // A permanent unique identifier for the cluster. For technical reasons, the + // zone in which the cluster resides is included here. + // Values are of the form + // projects//zones//clusters/[a-z][-a-z0-9]* + string name = 1; + + // The operation currently running on the cluster, if any. + // This cannot be set directly, only through CreateCluster, UpdateCluster, + // or UndeleteCluster. Calls to these methods will be rejected if + // "current_operation" is already set. + google.longrunning.Operation current_operation = 3; + + // The descriptive name for this cluster as it appears in UIs. + // Must be unique per zone. + string display_name = 4; + + // The number of serve nodes allocated to this cluster. + int32 serve_nodes = 5; + + // What storage type to use for tables in this cluster. Only configurable at + // cluster creation time. If unspecified, STORAGE_SSD will be used. + StorageType default_storage_type = 8; +} + +enum StorageType { + // The storage type used is unspecified. + STORAGE_UNSPECIFIED = 0; + + // Data will be stored in SSD, providing low and consistent latencies. + STORAGE_SSD = 1; + + // Data will be stored in HDD, providing high and less predictable + // latencies. + STORAGE_HDD = 2; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service.proto new file mode 100644 index 0000000..e65bca4 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service.proto @@ -0,0 +1,129 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.cluster.v1; + +import "google/api/annotations.proto"; +import "google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto"; +import "google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableClusterServicesProto"; +option java_package = "com.google.bigtable.admin.cluster.v1"; + + +// Service for managing zonal Cloud Bigtable resources. +service BigtableClusterService { + // Lists the supported zones for the given project. + rpc ListZones(ListZonesRequest) returns (ListZonesResponse) { + option (google.api.http) = { get: "/v1/{name=projects/*}/zones" }; + } + + // Gets information about a particular cluster. + rpc GetCluster(GetClusterRequest) returns (Cluster) { + option (google.api.http) = { get: "/v1/{name=projects/*/zones/*/clusters/*}" }; + } + + // Lists all clusters in the given project, along with any zones for which + // cluster information could not be retrieved. + rpc ListClusters(ListClustersRequest) returns (ListClustersResponse) { + option (google.api.http) = { get: "/v1/{name=projects/*}/aggregated/clusters" }; + } + + // Creates a cluster and begins preparing it to begin serving. The returned + // cluster embeds as its "current_operation" a long-running operation which + // can be used to track the progress of turning up the new cluster. + // Immediately upon completion of this request: + // * The cluster will be readable via the API, with all requested attributes + // but no allocated resources. + // Until completion of the embedded operation: + // * Cancelling the operation will render the cluster immediately unreadable + // via the API. + // * All other attempts to modify or delete the cluster will be rejected. + // Upon completion of the embedded operation: + // * Billing for all successfully-allocated resources will begin (some types + // may have lower than the requested levels). + // * New tables can be created in the cluster. + // * The cluster's allocated resource levels will be readable via the API. + // The embedded operation's "metadata" field type is + // [CreateClusterMetadata][google.bigtable.admin.cluster.v1.CreateClusterMetadata] The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + rpc CreateCluster(CreateClusterRequest) returns (Cluster) { + option (google.api.http) = { post: "/v1/{name=projects/*/zones/*}/clusters" body: "*" }; + } + + // Updates a cluster, and begins allocating or releasing resources as + // requested. The returned cluster embeds as its "current_operation" a + // long-running operation which can be used to track the progress of updating + // the cluster. + // Immediately upon completion of this request: + // * For resource types where a decrease in the cluster's allocation has been + // requested, billing will be based on the newly-requested level. + // Until completion of the embedded operation: + // * Cancelling the operation will set its metadata's "cancelled_at_time", + // and begin restoring resources to their pre-request values. The operation + // is guaranteed to succeed at undoing all resource changes, after which + // point it will terminate with a CANCELLED status. + // * All other attempts to modify or delete the cluster will be rejected. + // * Reading the cluster via the API will continue to give the pre-request + // resource levels. + // Upon completion of the embedded operation: + // * Billing will begin for all successfully-allocated resources (some types + // may have lower than the requested levels). + // * All newly-reserved resources will be available for serving the cluster's + // tables. + // * The cluster's new resource levels will be readable via the API. + // [UpdateClusterMetadata][google.bigtable.admin.cluster.v1.UpdateClusterMetadata] The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + rpc UpdateCluster(Cluster) returns (Cluster) { + option (google.api.http) = { put: "/v1/{name=projects/*/zones/*/clusters/*}" body: "*" }; + } + + // Marks a cluster and all of its tables for permanent deletion in 7 days. + // Immediately upon completion of the request: + // * Billing will cease for all of the cluster's reserved resources. + // * The cluster's "delete_time" field will be set 7 days in the future. + // Soon afterward: + // * All tables within the cluster will become unavailable. + // Prior to the cluster's "delete_time": + // * The cluster can be recovered with a call to UndeleteCluster. + // * All other attempts to modify or delete the cluster will be rejected. + // At the cluster's "delete_time": + // * The cluster and *all of its tables* will immediately and irrevocably + // disappear from the API, and their data will be permanently deleted. + rpc DeleteCluster(DeleteClusterRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v1/{name=projects/*/zones/*/clusters/*}" }; + } + + // Cancels the scheduled deletion of an cluster and begins preparing it to + // resume serving. The returned operation will also be embedded as the + // cluster's "current_operation". + // Immediately upon completion of this request: + // * The cluster's "delete_time" field will be unset, protecting it from + // automatic deletion. + // Until completion of the returned operation: + // * The operation cannot be cancelled. + // Upon completion of the returned operation: + // * Billing for the cluster's resources will resume. + // * All tables within the cluster will be available. + // [UndeleteClusterMetadata][google.bigtable.admin.cluster.v1.UndeleteClusterMetadata] The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + rpc UndeleteCluster(UndeleteClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { post: "/v1/{name=projects/*/zones/*/clusters/*}:undelete" body: "null" }; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service_messages.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service_messages.proto new file mode 100644 index 0000000..3291969 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_cluster_service_messages.proto @@ -0,0 +1,134 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.cluster.v1; + +import "google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto"; +import "google/protobuf/timestamp.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableClusterServiceMessagesProto"; +option java_package = "com.google.bigtable.admin.cluster.v1"; + + +// Request message for BigtableClusterService.ListZones. +message ListZonesRequest { + // The unique name of the project for which a list of supported zones is + // requested. + // Values are of the form projects/ + string name = 1; +} + +// Response message for BigtableClusterService.ListZones. +message ListZonesResponse { + // The list of requested zones. + repeated Zone zones = 1; +} + +// Request message for BigtableClusterService.GetCluster. +message GetClusterRequest { + // The unique name of the requested cluster. + // Values are of the form projects//zones//clusters/ + string name = 1; +} + +// Request message for BigtableClusterService.ListClusters. +message ListClustersRequest { + // The unique name of the project for which a list of clusters is requested. + // Values are of the form projects/ + string name = 1; +} + +// Response message for BigtableClusterService.ListClusters. +message ListClustersResponse { + // The list of requested Clusters. + repeated Cluster clusters = 1; + + // The zones for which clusters could not be retrieved. + repeated Zone failed_zones = 2; +} + +// Request message for BigtableClusterService.CreateCluster. +message CreateClusterRequest { + // The unique name of the zone in which to create the cluster. + // Values are of the form projects//zones/ + string name = 1; + + // The id to be used when referring to the new cluster within its zone, + // e.g. just the "test-cluster" section of the full name + // "projects//zones//clusters/test-cluster". + string cluster_id = 2; + + // The cluster to create. + // The "name", "delete_time", and "current_operation" fields must be left + // blank. + Cluster cluster = 3; +} + +// Metadata type for the operation returned by +// BigtableClusterService.CreateCluster. +message CreateClusterMetadata { + // The request which prompted the creation of this operation. + CreateClusterRequest original_request = 1; + + // The time at which original_request was received. + google.protobuf.Timestamp request_time = 2; + + // The time at which this operation failed or was completed successfully. + google.protobuf.Timestamp finish_time = 3; +} + +// Metadata type for the operation returned by +// BigtableClusterService.UpdateCluster. +message UpdateClusterMetadata { + // The request which prompted the creation of this operation. + Cluster original_request = 1; + + // The time at which original_request was received. + google.protobuf.Timestamp request_time = 2; + + // The time at which this operation was cancelled. If set, this operation is + // in the process of undoing itself (which is guaranteed to succeed) and + // cannot be cancelled again. + google.protobuf.Timestamp cancel_time = 3; + + // The time at which this operation failed or was completed successfully. + google.protobuf.Timestamp finish_time = 4; +} + +// Request message for BigtableClusterService.DeleteCluster. +message DeleteClusterRequest { + // The unique name of the cluster to be deleted. + // Values are of the form projects//zones//clusters/ + string name = 1; +} + +// Request message for BigtableClusterService.UndeleteCluster. +message UndeleteClusterRequest { + // The unique name of the cluster to be un-deleted. + // Values are of the form projects//zones//clusters/ + string name = 1; +} + +// Metadata type for the operation returned by +// BigtableClusterService.UndeleteCluster. +message UndeleteClusterMetadata { + // The time at which the original request was received. + google.protobuf.Timestamp request_time = 1; + + // The time at which this operation failed or was completed successfully. + google.protobuf.Timestamp finish_time = 2; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_data.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_data.proto new file mode 100644 index 0000000..290eb91 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_data.proto @@ -0,0 +1,515 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.v1; + +option java_multiple_files = true; +option java_outer_classname = "BigtableDataProto"; +option java_package = "com.google.bigtable.v1"; + + +// Specifies the complete (requested) contents of a single row of a table. +// Rows which exceed 256MiB in size cannot be read in full. +message Row { + // The unique key which identifies this row within its table. This is the same + // key that's used to identify the row in, for example, a MutateRowRequest. + // May contain any non-empty byte string up to 4KiB in length. + bytes key = 1; + + // May be empty, but only if the entire row is empty. + // The mutual ordering of column families is not specified. + repeated Family families = 2; +} + +// Specifies (some of) the contents of a single row/column family of a table. +message Family { + // The unique key which identifies this family within its row. This is the + // same key that's used to identify the family in, for example, a RowFilter + // which sets its "family_name_regex_filter" field. + // Must match [-_.a-zA-Z0-9]+, except that AggregatingRowProcessors may + // produce cells in a sentinel family with an empty name. + // Must be no greater than 64 characters in length. + string name = 1; + + // Must not be empty. Sorted in order of increasing "qualifier". + repeated Column columns = 2; +} + +// Specifies (some of) the contents of a single row/column of a table. +message Column { + // The unique key which identifies this column within its family. This is the + // same key that's used to identify the column in, for example, a RowFilter + // which sets its "column_qualifier_regex_filter" field. + // May contain any byte string, including the empty string, up to 16kiB in + // length. + bytes qualifier = 1; + + // Must not be empty. Sorted in order of decreasing "timestamp_micros". + repeated Cell cells = 2; +} + +// Specifies (some of) the contents of a single row/column/timestamp of a table. +message Cell { + // The cell's stored timestamp, which also uniquely identifies it within + // its column. + // Values are always expressed in microseconds, but individual tables may set + // a coarser "granularity" to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will only allow + // values of "timestamp_micros" which are multiples of 1000. + int64 timestamp_micros = 1; + + // The value stored in the cell. + // May contain any byte string, including the empty string, up to 100MiB in + // length. + bytes value = 2; + + // Labels applied to the cell by a [RowFilter][google.bigtable.v1.RowFilter]. + repeated string labels = 3; +} + +// Specifies a contiguous range of rows. +message RowRange { + // Inclusive lower bound. If left empty, interpreted as the empty string. + bytes start_key = 2; + + // Exclusive upper bound. If left empty, interpreted as infinity. + bytes end_key = 3; +} + +// Specifies a non-contiguous set of rows. +message RowSet { + // Single rows included in the set. + repeated bytes row_keys = 1; + + // Contiguous row ranges included in the set. + repeated RowRange row_ranges = 2; +} + +// Specifies a contiguous range of columns within a single column family. +// The range spans from : to +// :, where both bounds can be either inclusive or +// exclusive. +message ColumnRange { + // The name of the column family within which this range falls. + string family_name = 1; + + // The column qualifier at which to start the range (within 'column_family'). + // If neither field is set, interpreted as the empty string, inclusive. + oneof start_qualifier { + // Used when giving an inclusive lower bound for the range. + bytes start_qualifier_inclusive = 2; + + // Used when giving an exclusive lower bound for the range. + bytes start_qualifier_exclusive = 3; + } + + // The column qualifier at which to end the range (within 'column_family'). + // If neither field is set, interpreted as the infinite string, exclusive. + oneof end_qualifier { + // Used when giving an inclusive upper bound for the range. + bytes end_qualifier_inclusive = 4; + + // Used when giving an exclusive upper bound for the range. + bytes end_qualifier_exclusive = 5; + } +} + +// Specified a contiguous range of microsecond timestamps. +message TimestampRange { + // Inclusive lower bound. If left empty, interpreted as 0. + int64 start_timestamp_micros = 1; + + // Exclusive upper bound. If left empty, interpreted as infinity. + int64 end_timestamp_micros = 2; +} + +// Specifies a contiguous range of raw byte values. +message ValueRange { + // The value at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + oneof start_value { + // Used when giving an inclusive lower bound for the range. + bytes start_value_inclusive = 1; + + // Used when giving an exclusive lower bound for the range. + bytes start_value_exclusive = 2; + } + + // The value at which to end the range. + // If neither field is set, interpreted as the infinite string, exclusive. + oneof end_value { + // Used when giving an inclusive upper bound for the range. + bytes end_value_inclusive = 3; + + // Used when giving an exclusive upper bound for the range. + bytes end_value_exclusive = 4; + } +} + +// Takes a row as input and produces an alternate view of the row based on +// specified rules. For example, a RowFilter might trim down a row to include +// just the cells from columns matching a given regular expression, or might +// return all the cells of a row but not their values. More complicated filters +// can be composed out of these components to express requests such as, "within +// every column of a particular family, give just the two most recent cells +// which are older than timestamp X." +// +// There are two broad categories of RowFilters (true filters and transformers), +// as well as two ways to compose simple filters into more complex ones +// (chains and interleaves). They work as follows: +// +// * True filters alter the input row by excluding some of its cells wholesale +// from the output row. An example of a true filter is the "value_regex_filter", +// which excludes cells whose values don't match the specified pattern. All +// regex true filters use RE2 syntax (https://github.com/google/re2/wiki/Syntax) +// in raw byte mode (RE2::Latin1), and are evaluated as full matches. An +// important point to keep in mind is that RE2(.) is equivalent by default to +// RE2([^\n]), meaning that it does not match newlines. When attempting to match +// an arbitrary byte, you should therefore use the escape sequence '\C', which +// may need to be further escaped as '\\C' in your client language. +// +// * Transformers alter the input row by changing the values of some of its +// cells in the output, without excluding them completely. Currently, the only +// supported transformer is the "strip_value_transformer", which replaces every +// cell's value with the empty string. +// +// * Chains and interleaves are described in more detail in the +// RowFilter.Chain and RowFilter.Interleave documentation. +// +// The total serialized size of a RowFilter message must not +// exceed 4096 bytes, and RowFilters may not be nested within each other +// (in Chains or Interleaves) to a depth of more than 20. +message RowFilter { + // A RowFilter which sends rows through several RowFilters in sequence. + message Chain { + // The elements of "filters" are chained together to process the input row: + // in row -> f(0) -> intermediate row -> f(1) -> ... -> f(N) -> out row + // The full chain is executed atomically. + repeated RowFilter filters = 1; + } + + // A RowFilter which sends each row to each of several component + // RowFilters and interleaves the results. + message Interleave { + // The elements of "filters" all process a copy of the input row, and the + // results are pooled, sorted, and combined into a single output row. + // If multiple cells are produced with the same column and timestamp, + // they will all appear in the output row in an unspecified mutual order. + // Consider the following example, with three filters: + // + // input row + // | + // ----------------------------------------------------- + // | | | + // f(0) f(1) f(2) + // | | | + // 1: foo,bar,10,x foo,bar,10,z far,bar,7,a + // 2: foo,blah,11,z far,blah,5,x far,blah,5,x + // | | | + // ----------------------------------------------------- + // | + // 1: foo,bar,10,z // could have switched with #2 + // 2: foo,bar,10,x // could have switched with #1 + // 3: foo,blah,11,z + // 4: far,bar,7,a + // 5: far,blah,5,x // identical to #6 + // 6: far,blah,5,x // identical to #5 + // All interleaved filters are executed atomically. + repeated RowFilter filters = 1; + } + + // A RowFilter which evaluates one of two possible RowFilters, depending on + // whether or not a predicate RowFilter outputs any cells from the input row. + // + // IMPORTANT NOTE: The predicate filter does not execute atomically with the + // true and false filters, which may lead to inconsistent or unexpected + // results. Additionally, Condition filters have poor performance, especially + // when filters are set for the false condition. + message Condition { + // If "predicate_filter" outputs any cells, then "true_filter" will be + // evaluated on the input row. Otherwise, "false_filter" will be evaluated. + RowFilter predicate_filter = 1; + + // The filter to apply to the input row if "predicate_filter" returns any + // results. If not provided, no results will be returned in the true case. + RowFilter true_filter = 2; + + // The filter to apply to the input row if "predicate_filter" does not + // return any results. If not provided, no results will be returned in the + // false case. + RowFilter false_filter = 3; + } + + // Which of the possible RowFilter types to apply. If none are set, this + // RowFilter returns all cells in the input row. + oneof filter { + // Applies several RowFilters to the data in sequence, progressively + // narrowing the results. + Chain chain = 1; + + // Applies several RowFilters to the data in parallel and combines the + // results. + Interleave interleave = 2; + + // Applies one of two possible RowFilters to the data based on the output of + // a predicate RowFilter. + Condition condition = 3; + + // ADVANCED USE ONLY. + // Hook for introspection into the RowFilter. Outputs all cells directly to + // the output of the read rather than to any parent filter. Consider the + // following example: + // + // Chain( + // FamilyRegex("A"), + // Interleave( + // All(), + // Chain(Label("foo"), Sink()) + // ), + // QualifierRegex("B") + // ) + // + // A,A,1,w + // A,B,2,x + // B,B,4,z + // | + // FamilyRegex("A") + // | + // A,A,1,w + // A,B,2,x + // | + // +------------+-------------+ + // | | + // All() Label(foo) + // | | + // A,A,1,w A,A,1,w,labels:[foo] + // A,B,2,x A,B,2,x,labels:[foo] + // | | + // | Sink() --------------+ + // | | | + // +------------+ x------+ A,A,1,w,labels:[foo] + // | A,B,2,x,labels:[foo] + // A,A,1,w | + // A,B,2,x | + // | | + // QualifierRegex("B") | + // | | + // A,B,2,x | + // | | + // +--------------------------------+ + // | + // A,A,1,w,labels:[foo] + // A,B,2,x,labels:[foo] // could be switched + // A,B,2,x // could be switched + // + // Despite being excluded by the qualifier filter, a copy of every cell + // that reaches the sink is present in the final result. + // + // As with an [Interleave][google.bigtable.v1.RowFilter.Interleave], + // duplicate cells are possible, and appear in an unspecified mutual order. + // In this case we have a duplicate with column "A:B" and timestamp 2, + // because one copy passed through the all filter while the other was + // passed through the label and sink. Note that one copy has label "foo", + // while the other does not. + // + // Cannot be used within the `predicate_filter`, `true_filter`, or + // `false_filter` of a [Condition][google.bigtable.v1.RowFilter.Condition]. + bool sink = 16; + + // Matches all cells, regardless of input. Functionally equivalent to + // leaving `filter` unset, but included for completeness. + bool pass_all_filter = 17; + + // Does not match any cells, regardless of input. Useful for temporarily + // disabling just part of a filter. + bool block_all_filter = 18; + + // Matches only cells from rows whose keys satisfy the given RE2 regex. In + // other words, passes through the entire row when the key matches, and + // otherwise produces an empty row. + // Note that, since row keys can contain arbitrary bytes, the '\C' escape + // sequence must be used if a true wildcard is desired. The '.' character + // will not match the new line character '\n', which may be present in a + // binary key. + bytes row_key_regex_filter = 4; + + // Matches all cells from a row with probability p, and matches no cells + // from the row with probability 1-p. + double row_sample_filter = 14; + + // Matches only cells from columns whose families satisfy the given RE2 + // regex. For technical reasons, the regex must not contain the ':' + // character, even if it is not being used as a literal. + // Note that, since column families cannot contain the new line character + // '\n', it is sufficient to use '.' as a full wildcard when matching + // column family names. + string family_name_regex_filter = 5; + + // Matches only cells from columns whose qualifiers satisfy the given RE2 + // regex. + // Note that, since column qualifiers can contain arbitrary bytes, the '\C' + // escape sequence must be used if a true wildcard is desired. The '.' + // character will not match the new line character '\n', which may be + // present in a binary qualifier. + bytes column_qualifier_regex_filter = 6; + + // Matches only cells from columns within the given range. + ColumnRange column_range_filter = 7; + + // Matches only cells with timestamps within the given range. + TimestampRange timestamp_range_filter = 8; + + // Matches only cells with values that satisfy the given regular expression. + // Note that, since cell values can contain arbitrary bytes, the '\C' escape + // sequence must be used if a true wildcard is desired. The '.' character + // will not match the new line character '\n', which may be present in a + // binary value. + bytes value_regex_filter = 9; + + // Matches only cells with values that fall within the given range. + ValueRange value_range_filter = 15; + + // Skips the first N cells of each row, matching all subsequent cells. + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_row_offset_filter = 10; + + // Matches only the first N cells of each row. + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_row_limit_filter = 11; + + // Matches only the most recent N cells within each column. For example, + // if N=2, this filter would match column "foo:bar" at timestamps 10 and 9, + // skip all earlier cells in "foo:bar", and then begin matching again in + // column "foo:bar2". + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_column_limit_filter = 12; + + // Replaces each cell's value with the empty string. + bool strip_value_transformer = 13; + + // Applies the given label to all cells in the output row. This allows + // the client to determine which results were produced from which part of + // the filter. + // + // Values must be at most 15 characters in length, and match the RE2 + // pattern [a-z0-9\\-]+ + // + // Due to a technical limitation, it is not currently possible to apply + // multiple labels to a cell. As a result, a Chain may have no more than + // one sub-filter which contains a apply_label_transformer. It is okay for + // an Interleave to contain multiple apply_label_transformers, as they will + // be applied to separate copies of the input. This may be relaxed in the + // future. + string apply_label_transformer = 19; + } +} + +// Specifies a particular change to be made to the contents of a row. +message Mutation { + // A Mutation which sets the value of the specified cell. + message SetCell { + // The name of the family into which new data should be written. + // Must match [-_.a-zA-Z0-9]+ + string family_name = 1; + + // The qualifier of the column into which new data should be written. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The timestamp of the cell into which new data should be written. + // Use -1 for current Bigtable server time. + // Otherwise, the client should set this value itself, noting that the + // default value is a timestamp of zero if the field is left unspecified. + // Values must match the "granularity" of the table (e.g. micros, millis). + int64 timestamp_micros = 3; + + // The value to be written into the specified cell. + bytes value = 4; + } + + // A Mutation which deletes cells from the specified column, optionally + // restricting the deletions to a given timestamp range. + message DeleteFromColumn { + // The name of the family from which cells should be deleted. + // Must match [-_.a-zA-Z0-9]+ + string family_name = 1; + + // The qualifier of the column from which cells should be deleted. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The range of timestamps within which cells should be deleted. + TimestampRange time_range = 3; + } + + // A Mutation which deletes all cells from the specified column family. + message DeleteFromFamily { + // The name of the family from which cells should be deleted. + // Must match [-_.a-zA-Z0-9]+ + string family_name = 1; + } + + // A Mutation which deletes all cells from the containing row. + message DeleteFromRow { + + } + + // Which of the possible Mutation types to apply. + oneof mutation { + // Set a cell's value. + SetCell set_cell = 1; + + // Deletes cells from a column. + DeleteFromColumn delete_from_column = 2; + + // Deletes cells from a column family. + DeleteFromFamily delete_from_family = 3; + + // Deletes cells from the entire row. + DeleteFromRow delete_from_row = 4; + } +} + +// Specifies an atomic read/modify/write operation on the latest value of the +// specified column. +message ReadModifyWriteRule { + // The name of the family to which the read/modify/write should be applied. + // Must match [-_.a-zA-Z0-9]+ + string family_name = 1; + + // The qualifier of the column to which the read/modify/write should be + // applied. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The rule used to determine the column's new latest value from its current + // latest value. + oneof rule { + // Rule specifying that "append_value" be appended to the existing value. + // If the targeted cell is unset, it will be treated as containing the + // empty string. + bytes append_value = 3; + + // Rule specifying that "increment_amount" be added to the existing value. + // If the targeted cell is unset, it will be treated as containing a zero. + // Otherwise, the targeted cell must contain an 8-byte value (interpreted + // as a 64-bit big-endian signed integer), or the entire request will fail. + int64 increment_amount = 4; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service.proto new file mode 100644 index 0000000..f1a83d3 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service.proto @@ -0,0 +1,73 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.v1; + +import "google/api/annotations.proto"; +import "google/bigtable/v1/bigtable_data.proto"; +import "google/bigtable/v1/bigtable_service_messages.proto"; +import "google/protobuf/empty.proto"; + +option java_generic_services = true; +option java_multiple_files = true; +option java_outer_classname = "BigtableServicesProto"; +option java_package = "com.google.bigtable.v1"; + + +// Service for reading from and writing to existing Bigtables. +service BigtableService { + // Streams back the contents of all requested rows, optionally applying + // the same Reader filter to each. Depending on their size, rows may be + // broken up across multiple responses, but atomicity of each row will still + // be preserved. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { post: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows:read" body: "*" }; + } + + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + rpc SampleRowKeys(SampleRowKeysRequest) returns (stream SampleRowKeysResponse) { + option (google.api.http) = { get: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows:sampleKeys" }; + } + + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by 'mutation'. + rpc MutateRow(MutateRowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { post: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:mutate" body: "*" }; + } + + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + rpc MutateRows(MutateRowsRequest) returns (MutateRowsResponse) { + option (google.api.http) = { post: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}:mutateRows" body: "*" }; + } + + // Mutates a row atomically based on the output of a predicate Reader filter. + rpc CheckAndMutateRow(CheckAndMutateRowRequest) returns (CheckAndMutateRowResponse) { + option (google.api.http) = { post: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:checkAndMutate" body: "*" }; + } + + // Modifies a row atomically, reading the latest existing timestamp/value from + // the specified columns and writing a new value at + // max(existing timestamp, current server time) based on pre-defined + // read/modify/write rules. Returns the new contents of all modified cells. + rpc ReadModifyWriteRow(ReadModifyWriteRowRequest) returns (Row) { + option (google.api.http) = { post: "/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:readModifyWrite" body: "*" }; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service_messages.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service_messages.proto new file mode 100644 index 0000000..1479fb6 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_service_messages.proto @@ -0,0 +1,214 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.v1; + +import "google/bigtable/v1/bigtable_data.proto"; +import "google/rpc/status.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableServiceMessagesProto"; +option java_package = "com.google.bigtable.v1"; + + +// Request message for BigtableServer.ReadRows. +message ReadRowsRequest { + // The unique name of the table from which to read. + string table_name = 1; + + // If neither row_key nor row_range is set, reads from all rows. + oneof target { + // The key of a single row from which to read. + bytes row_key = 2; + + // A range of rows from which to read. + RowRange row_range = 3; + + // A set of rows from which to read. Entries need not be in order, and will + // be deduplicated before reading. + // The total serialized size of the set must not exceed 1MB. + RowSet row_set = 8; + } + + // The filter to apply to the contents of the specified row(s). If unset, + // reads the entire table. + RowFilter filter = 5; + + // By default, rows are read sequentially, producing results which are + // guaranteed to arrive in increasing row order. Setting + // "allow_row_interleaving" to true allows multiple rows to be interleaved in + // the response stream, which increases throughput but breaks this guarantee, + // and may force the client to use more memory to buffer partially-received + // rows. Cannot be set to true when specifying "num_rows_limit". + bool allow_row_interleaving = 6; + + // The read will terminate after committing to N rows' worth of results. The + // default (zero) is to return all results. + // Note that "allow_row_interleaving" cannot be set to true when this is set. + int64 num_rows_limit = 7; +} + +// Response message for BigtableService.ReadRows. +message ReadRowsResponse { + // Specifies a piece of a row's contents returned as part of the read + // response stream. + message Chunk { + oneof chunk { + // A subset of the data from a particular row. As long as no "reset_row" + // is received in between, multiple "row_contents" from the same row are + // from the same atomic view of that row, and will be received in the + // expected family/column/timestamp order. + Family row_contents = 1; + + // Indicates that the client should drop all previous chunks for + // "row_key", as it will be re-read from the beginning. + bool reset_row = 2; + + // Indicates that the client can safely process all previous chunks for + // "row_key", as its data has been fully read. + bool commit_row = 3; + } + } + + // The key of the row for which we're receiving data. + // Results will be received in increasing row key order, unless + // "allow_row_interleaving" was specified in the request. + bytes row_key = 1; + + // One or more chunks of the row specified by "row_key". + repeated Chunk chunks = 2; +} + +// Request message for BigtableService.SampleRowKeys. +message SampleRowKeysRequest { + // The unique name of the table from which to sample row keys. + string table_name = 1; +} + +// Response message for BigtableService.SampleRowKeys. +message SampleRowKeysResponse { + // Sorted streamed sequence of sample row keys in the table. The table might + // have contents before the first row key in the list and after the last one, + // but a key containing the empty string indicates "end of table" and will be + // the last response given, if present. + // Note that row keys in this list may not have ever been written to or read + // from, and users should therefore not make any assumptions about the row key + // structure that are specific to their use case. + bytes row_key = 1; + + // Approximate total storage space used by all rows in the table which precede + // "row_key". Buffering the contents of all rows between two subsequent + // samples would require space roughly equal to the difference in their + // "offset_bytes" fields. + int64 offset_bytes = 2; +} + +// Request message for BigtableService.MutateRow. +message MutateRowRequest { + // The unique name of the table to which the mutation should be applied. + string table_name = 1; + + // The key of the row to which the mutation should be applied. + bytes row_key = 2; + + // Changes to be atomically applied to the specified row. Entries are applied + // in order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry and at most 100000. + repeated Mutation mutations = 3; +} + +// Request message for BigtableService.MutateRows. +message MutateRowsRequest { + message Entry { + // The key of the row to which the `mutations` should be applied. + bytes row_key = 1; + + // Changes to be atomically applied to the specified row. Mutations are + // applied in order, meaning that earlier mutations can be masked by + // later ones. + // At least one mutation must be specified. + repeated Mutation mutations = 2; + } + + // The unique name of the table to which the mutations should be applied. + string table_name = 1; + + // The row keys/mutations to be applied in bulk. + // Each entry is applied as an atomic mutation, but the entries may be + // applied in arbitrary order (even between entries for the same row). + // At least one entry must be specified, and in total the entries may + // contain at most 100000 mutations. + repeated Entry entries = 2; +} + +// Response message for BigtableService.MutateRows. +message MutateRowsResponse { + // The results for each Entry from the request, presented in the order + // in which the entries were originally given. + repeated google.rpc.Status statuses = 1; +} + +// Request message for BigtableService.CheckAndMutateRowRequest +message CheckAndMutateRowRequest { + // The unique name of the table to which the conditional mutation should be + // applied. + string table_name = 1; + + // The key of the row to which the conditional mutation should be applied. + bytes row_key = 2; + + // The filter to be applied to the contents of the specified row. Depending + // on whether or not any results are yielded, either "true_mutations" or + // "false_mutations" will be executed. If unset, checks that the row contains + // any values at all. + RowFilter predicate_filter = 6; + + // Changes to be atomically applied to the specified row if "predicate_filter" + // yields at least one cell when applied to "row_key". Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if "false_mutations" is empty, and at most + // 100000. + repeated Mutation true_mutations = 4; + + // Changes to be atomically applied to the specified row if "predicate_filter" + // does not yield any cells when applied to "row_key". Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if "true_mutations" is empty, and at most + // 100000. + repeated Mutation false_mutations = 5; +} + +// Response message for BigtableService.CheckAndMutateRowRequest. +message CheckAndMutateRowResponse { + // Whether or not the request's "predicate_filter" yielded any results for + // the specified row. + bool predicate_matched = 1; +} + +// Request message for BigtableService.ReadModifyWriteRowRequest. +message ReadModifyWriteRowRequest { + // The unique name of the table to which the read/modify/write rules should be + // applied. + string table_name = 1; + + // The key of the row to which the read/modify/write rules should be applied. + bytes row_key = 2; + + // Rules specifying how the specified row's contents are to be transformed + // into writes. Entries are applied in order, meaning that earlier rules will + // affect the results of later ones. + repeated ReadModifyWriteRule rules = 3; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_data.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_data.proto new file mode 100644 index 0000000..f81c878 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_data.proto @@ -0,0 +1,125 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.table.v1; + +import "google/longrunning/operations.proto"; +import "google/protobuf/duration.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableTableDataProto"; +option java_package = "com.google.bigtable.admin.table.v1"; + + +// A collection of user data indexed by row, column, and timestamp. +// Each table is served using the resources of its parent cluster. +message Table { + enum TimestampGranularity { + MILLIS = 0; + } + + // A unique identifier of the form + // /tables/[_a-zA-Z0-9][-_.a-zA-Z0-9]* + string name = 1; + + // If this Table is in the process of being created, the Operation used to + // track its progress. As long as this operation is present, the Table will + // not accept any Table Admin or Read/Write requests. + google.longrunning.Operation current_operation = 2; + + // The column families configured for this table, mapped by column family id. + map column_families = 3; + + // The granularity (e.g. MILLIS, MICROS) at which timestamps are stored in + // this table. Timestamps not matching the granularity will be rejected. + // Cannot be changed once the table is created. + TimestampGranularity granularity = 4; +} + +// A set of columns within a table which share a common configuration. +message ColumnFamily { + // A unique identifier of the form /columnFamilies/[-_.a-zA-Z0-9]+ + // The last segment is the same as the "name" field in + // google.bigtable.v1.Family. + string name = 1; + + // Garbage collection expression specified by the following grammar: + // GC = EXPR + // | "" ; + // EXPR = EXPR, "||", EXPR (* lowest precedence *) + // | EXPR, "&&", EXPR + // | "(", EXPR, ")" (* highest precedence *) + // | PROP ; + // PROP = "version() >", NUM32 + // | "age() >", NUM64, [ UNIT ] ; + // NUM32 = non-zero-digit { digit } ; (* # NUM32 <= 2^32 - 1 *) + // NUM64 = non-zero-digit { digit } ; (* # NUM64 <= 2^63 - 1 *) + // UNIT = "d" | "h" | "m" (* d=days, h=hours, m=minutes, else micros *) + // GC expressions can be up to 500 characters in length + // + // The different types of PROP are defined as follows: + // version() - cell index, counting from most recent and starting at 1 + // age() - age of the cell (current time minus cell timestamp) + // + // Example: "version() > 3 || (age() > 3d && version() > 1)" + // drop cells beyond the most recent three, and drop cells older than three + // days unless they're the most recent cell in the row/column + // + // Garbage collection executes opportunistically in the background, and so + // it's possible for reads to return a cell even if it matches the active GC + // expression for its family. + string gc_expression = 2; + + // Garbage collection rule specified as a protobuf. + // Supersedes `gc_expression`. + // Must serialize to at most 500 bytes. + // + // NOTE: Garbage collection executes opportunistically in the background, and + // so it's possible for reads to return a cell even if it matches the active + // GC expression for its family. + GcRule gc_rule = 3; +} + +// Rule for determining which cells to delete during garbage collection. +message GcRule { + // A GcRule which deletes cells matching all of the given rules. + message Intersection { + // Only delete cells which would be deleted by every element of `rules`. + repeated GcRule rules = 1; + } + + // A GcRule which deletes cells matching any of the given rules. + message Union { + // Delete cells which would be deleted by any element of `rules`. + repeated GcRule rules = 1; + } + + oneof rule { + // Delete all cells in a column except the most recent N. + int32 max_num_versions = 1; + + // Delete cells in a column older than the given age. + // Values must be at least one millisecond, and will be truncated to + // microsecond granularity. + google.protobuf.Duration max_age = 2; + + // Delete cells that would be deleted by every nested rule. + Intersection intersection = 3; + + // Delete cells that would be deleted by any nested rule. + Union union = 4; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service.proto new file mode 100644 index 0000000..417409c --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service.proto @@ -0,0 +1,74 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.table.v1; + +import "google/api/annotations.proto"; +import "google/bigtable/admin/table/v1/bigtable_table_data.proto"; +import "google/bigtable/admin/table/v1/bigtable_table_service_messages.proto"; +import "google/protobuf/empty.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableTableServicesProto"; +option java_package = "com.google.bigtable.admin.table.v1"; + + +// Service for creating, configuring, and deleting Cloud Bigtable tables. +// Provides access to the table schemas only, not the data stored within the tables. +service BigtableTableService { + // Creates a new table, to be served from a specified cluster. + // The table can be created with a full set of initial column families, + // specified in the request. + rpc CreateTable(CreateTableRequest) returns (Table) { + option (google.api.http) = { post: "/v1/{name=projects/*/zones/*/clusters/*}/tables" body: "*" }; + } + + // Lists the names of all tables served from a specified cluster. + rpc ListTables(ListTablesRequest) returns (ListTablesResponse) { + option (google.api.http) = { get: "/v1/{name=projects/*/zones/*/clusters/*}/tables" }; + } + + // Gets the schema of the specified table, including its column families. + rpc GetTable(GetTableRequest) returns (Table) { + option (google.api.http) = { get: "/v1/{name=projects/*/zones/*/clusters/*/tables/*}" }; + } + + // Permanently deletes a specified table and all of its data. + rpc DeleteTable(DeleteTableRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v1/{name=projects/*/zones/*/clusters/*/tables/*}" }; + } + + // Changes the name of a specified table. + // Cannot be used to move tables between clusters, zones, or projects. + rpc RenameTable(RenameTableRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { post: "/v1/{name=projects/*/zones/*/clusters/*/tables/*}:rename" body: "*" }; + } + + // Creates a new column family within a specified table. + rpc CreateColumnFamily(CreateColumnFamilyRequest) returns (ColumnFamily) { + option (google.api.http) = { post: "/v1/{name=projects/*/zones/*/clusters/*/tables/*}/columnFamilies" body: "*" }; + } + + // Changes the configuration of a specified column family. + rpc UpdateColumnFamily(ColumnFamily) returns (ColumnFamily) { + option (google.api.http) = { put: "/v1/{name=projects/*/zones/*/clusters/*/tables/*/columnFamilies/*}" body: "*" }; + } + + // Permanently deletes a specified column family and all of its data. + rpc DeleteColumnFamily(DeleteColumnFamilyRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v1/{name=projects/*/zones/*/clusters/*/tables/*/columnFamilies/*}" }; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service_messages.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service_messages.proto new file mode 100644 index 0000000..73f2a8c --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_bigtable_table_service_messages.proto @@ -0,0 +1,101 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.table.v1; + +import "google/bigtable/admin/table/v1/bigtable_table_data.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableTableServiceMessagesProto"; +option java_package = "com.google.bigtable.admin.table.v1"; + + +message CreateTableRequest { + // The unique name of the cluster in which to create the new table. + string name = 1; + + // The name by which the new table should be referred to within the cluster, + // e.g. "foobar" rather than "/tables/foobar". + string table_id = 2; + + // The Table to create. The `name` field of the Table and all of its + // ColumnFamilies must be left blank, and will be populated in the response. + Table table = 3; + + // The optional list of row keys that will be used to initially split the + // table into several tablets (Tablets are similar to HBase regions). + // Given two split keys, "s1" and "s2", three tablets will be created, + // spanning the key ranges: [, s1), [s1, s2), [s2, ). + // + // Example: + // * Row keys := ["a", "apple", "custom", "customer_1", "customer_2", + // "other", "zz"] + // * initial_split_keys := ["apple", "customer_1", "customer_2", "other"] + // * Key assignment: + // - Tablet 1 [, apple) => {"a"}. + // - Tablet 2 [apple, customer_1) => {"apple", "custom"}. + // - Tablet 3 [customer_1, customer_2) => {"customer_1"}. + // - Tablet 4 [customer_2, other) => {"customer_2"}. + // - Tablet 5 [other, ) => {"other", "zz"}. + repeated string initial_split_keys = 4; +} + +message ListTablesRequest { + // The unique name of the cluster for which tables should be listed. + string name = 1; +} + +message ListTablesResponse { + // The tables present in the requested cluster. + // At present, only the names of the tables are populated. + repeated Table tables = 1; +} + +message GetTableRequest { + // The unique name of the requested table. + string name = 1; +} + +message DeleteTableRequest { + // The unique name of the table to be deleted. + string name = 1; +} + +message RenameTableRequest { + // The current unique name of the table. + string name = 1; + + // The new name by which the table should be referred to within its containing + // cluster, e.g. "foobar" rather than "/tables/foobar". + string new_id = 2; +} + +message CreateColumnFamilyRequest { + // The unique name of the table in which to create the new column family. + string name = 1; + + // The name by which the new column family should be referred to within the + // table, e.g. "foobar" rather than "/columnFamilies/foobar". + string column_family_id = 2; + + // The column family to create. The `name` field must be left blank. + ColumnFamily column_family = 3; +} + +message DeleteColumnFamilyRequest { + // The unique name of the column family to be deleted. + string name = 1; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/_operations.proto b/env/Lib/site-packages/gcloud/bigtable/_generated/_operations.proto new file mode 100644 index 0000000..a358d0a --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/_operations.proto @@ -0,0 +1,144 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.longrunning; + +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/empty.proto"; +import "google/rpc/status.proto"; + +option java_multiple_files = true; +option java_outer_classname = "OperationsProto"; +option java_package = "com.google.longrunning"; + + +// Manages long-running operations with an API service. +// +// When an API method normally takes long time to complete, it can be designed +// to return [Operation][google.longrunning.Operation] to the client, and the client can use this +// interface to receive the real response asynchronously by polling the +// operation resource, or using `google.watcher.v1.Watcher` interface to watch +// the response, or pass the operation resource to another API (such as Google +// Cloud Pub/Sub API) to receive the response. Any API service that returns +// long-running operations should implement the `Operations` interface so +// developers can have a consistent client experience. +service Operations { + // Gets the latest state of a long-running operation. Clients may use this + // method to poll the operation result at intervals as recommended by the API + // service. + rpc GetOperation(GetOperationRequest) returns (Operation) { + option (google.api.http) = { get: "/v1/{name=operations/**}" }; + } + + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) { + option (google.api.http) = { get: "/v1/{name=operations}" }; + } + + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients may use + // [Operations.GetOperation] or other methods to check whether the + // cancellation succeeded or the operation completed despite cancellation. + rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { post: "/v1/{name=operations/**}:cancel" body: "*" }; + } + + // Deletes a long-running operation. It indicates the client is no longer + // interested in the operation result. It does not cancel the operation. + rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v1/{name=operations/**}" }; + } +} + +// This resource represents a long-running operation that is the result of a +// network API call. +message Operation { + // The name of the operation resource, which is only unique within the same + // service that originally returns it. + string name = 1; + + // Some service-specific metadata associated with the operation. It typically + // contains progress information and common metadata such as create time. + // Some services may not provide such metadata. Any method that returns a + // long-running operation should document the metadata type, if any. + google.protobuf.Any metadata = 2; + + // If the value is false, it means the operation is still in progress. + // If true, the operation is completed and the `result` is available. + bool done = 3; + + oneof result { + // The error result of the operation in case of failure. + google.rpc.Status error = 4; + + // The normal response of the operation in case of success. If the original + // method returns no data on success, such as `Delete`, the response will be + // `google.protobuf.Empty`. If the original method is standard + // `Get`/`Create`/`Update`, the response should be the resource. For other + // methods, the response should have the type `XxxResponse`, where `Xxx` + // is the original method name. For example, if the original method name + // is `TakeSnapshot()`, the inferred response type will be + // `TakeSnapshotResponse`. + google.protobuf.Any response = 5; + } +} + +// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation]. +message GetOperationRequest { + // The name of the operation resource. + string name = 1; +} + +// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsRequest { + // The name of the operation collection. + string name = 4; + + // The standard List filter. + string filter = 1; + + // The standard List page size. + int32 page_size = 2; + + // The standard List page token. + string page_token = 3; +} + +// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsResponse { + // A list of operations that match the specified filter in the request. + repeated Operation operations = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation]. +message CancelOperationRequest { + // The name of the operation resource to be cancelled. + string name = 1; +} + +// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation]. +message DeleteOperationRequest { + // The name of the operation resource to be deleted. + string name = 1; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_data_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_data_pb2.py new file mode 100644 index 0000000..4106aab --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_cluster_data_pb2.py @@ -0,0 +1,221 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto', + package='google.bigtable.admin.cluster.v1', + syntax='proto3', + serialized_pb=b'\n""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def ListZones(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def GetCluster(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def ListClusters(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def CreateCluster(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def UpdateCluster(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def DeleteCluster(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def UndeleteCluster(self, request, context): + raise NotImplementedError() + +class BetaBigtableClusterServiceStub(object): + """The interface to which stubs will conform.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def ListZones(self, request, timeout): + raise NotImplementedError() + ListZones.future = None + @abc.abstractmethod + def GetCluster(self, request, timeout): + raise NotImplementedError() + GetCluster.future = None + @abc.abstractmethod + def ListClusters(self, request, timeout): + raise NotImplementedError() + ListClusters.future = None + @abc.abstractmethod + def CreateCluster(self, request, timeout): + raise NotImplementedError() + CreateCluster.future = None + @abc.abstractmethod + def UpdateCluster(self, request, timeout): + raise NotImplementedError() + UpdateCluster.future = None + @abc.abstractmethod + def DeleteCluster(self, request, timeout): + raise NotImplementedError() + DeleteCluster.future = None + @abc.abstractmethod + def UndeleteCluster(self, request, timeout): + raise NotImplementedError() + UndeleteCluster.future = None + +def beta_create_BigtableClusterService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import google.longrunning.operations_pb2 + request_deserializers = { + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'CreateCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.CreateClusterRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'DeleteCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.DeleteClusterRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'GetCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.GetClusterRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListClusters'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListClustersRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListZones'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListZonesRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UndeleteCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.UndeleteClusterRequest.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UpdateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.FromString, + } + response_serializers = { + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'CreateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'DeleteCluster'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'GetCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListClusters'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListClustersResponse.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListZones'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListZonesResponse.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UndeleteCluster'): google.longrunning.operations_pb2.Operation.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UpdateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.SerializeToString, + } + method_implementations = { + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'CreateCluster'): face_utilities.unary_unary_inline(servicer.CreateCluster), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'DeleteCluster'): face_utilities.unary_unary_inline(servicer.DeleteCluster), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'GetCluster'): face_utilities.unary_unary_inline(servicer.GetCluster), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListClusters'): face_utilities.unary_unary_inline(servicer.ListClusters), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListZones'): face_utilities.unary_unary_inline(servicer.ListZones), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UndeleteCluster'): face_utilities.unary_unary_inline(servicer.UndeleteCluster), + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UpdateCluster'): face_utilities.unary_unary_inline(servicer.UpdateCluster), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + +def beta_create_BigtableClusterService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_data_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2 + import google.longrunning.operations_pb2 + request_serializers = { + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'CreateCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.CreateClusterRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'DeleteCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.DeleteClusterRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'GetCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.GetClusterRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListClusters'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListClustersRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListZones'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListZonesRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UndeleteCluster'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.UndeleteClusterRequest.SerializeToString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UpdateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.SerializeToString, + } + response_deserializers = { + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'CreateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'DeleteCluster'): google.protobuf.empty_pb2.Empty.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'GetCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListClusters'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListClustersResponse.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'ListZones'): gcloud.bigtable._generated.bigtable_cluster_service_messages_pb2.ListZonesResponse.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UndeleteCluster'): google.longrunning.operations_pb2.Operation.FromString, + ('google.bigtable.admin.cluster.v1.BigtableClusterService', 'UpdateCluster'): gcloud.bigtable._generated.bigtable_cluster_data_pb2.Cluster.FromString, + } + cardinalities = { + 'CreateCluster': cardinality.Cardinality.UNARY_UNARY, + 'DeleteCluster': cardinality.Cardinality.UNARY_UNARY, + 'GetCluster': cardinality.Cardinality.UNARY_UNARY, + 'ListClusters': cardinality.Cardinality.UNARY_UNARY, + 'ListZones': cardinality.Cardinality.UNARY_UNARY, + 'UndeleteCluster': cardinality.Cardinality.UNARY_UNARY, + 'UpdateCluster': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.admin.cluster.v1.BigtableClusterService', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_data_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_data_pb2.py new file mode 100644 index 0000000..47eb875 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_data_pb2.py @@ -0,0 +1,1226 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/v1/bigtable_data.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/v1/bigtable_data.proto', + package='google.bigtable.v1', + syntax='proto3', + serialized_pb=b'\n&google/bigtable/v1/bigtable_data.proto\x12\x12google.bigtable.v1\"@\n\x03Row\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12,\n\x08\x66\x61milies\x18\x02 \x03(\x0b\x32\x1a.google.bigtable.v1.Family\"C\n\x06\x46\x61mily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x1a.google.bigtable.v1.Column\"D\n\x06\x43olumn\x12\x11\n\tqualifier\x18\x01 \x01(\x0c\x12\'\n\x05\x63\x65lls\x18\x02 \x03(\x0b\x32\x18.google.bigtable.v1.Cell\"?\n\x04\x43\x65ll\x12\x18\n\x10timestamp_micros\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x0c\x12\x0e\n\x06labels\x18\x03 \x03(\t\".\n\x08RowRange\x12\x11\n\tstart_key\x18\x02 \x01(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\"L\n\x06RowSet\x12\x10\n\x08row_keys\x18\x01 \x03(\x0c\x12\x30\n\nrow_ranges\x18\x02 \x03(\x0b\x32\x1c.google.bigtable.v1.RowRange\"\xd6\x01\n\x0b\x43olumnRange\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12#\n\x19start_qualifier_inclusive\x18\x02 \x01(\x0cH\x00\x12#\n\x19start_qualifier_exclusive\x18\x03 \x01(\x0cH\x00\x12!\n\x17\x65nd_qualifier_inclusive\x18\x04 \x01(\x0cH\x01\x12!\n\x17\x65nd_qualifier_exclusive\x18\x05 \x01(\x0cH\x01\x42\x11\n\x0fstart_qualifierB\x0f\n\rend_qualifier\"N\n\x0eTimestampRange\x12\x1e\n\x16start_timestamp_micros\x18\x01 \x01(\x03\x12\x1c\n\x14\x65nd_timestamp_micros\x18\x02 \x01(\x03\"\xa8\x01\n\nValueRange\x12\x1f\n\x15start_value_inclusive\x18\x01 \x01(\x0cH\x00\x12\x1f\n\x15start_value_exclusive\x18\x02 \x01(\x0cH\x00\x12\x1d\n\x13\x65nd_value_inclusive\x18\x03 \x01(\x0cH\x01\x12\x1d\n\x13\x65nd_value_exclusive\x18\x04 \x01(\x0cH\x01\x42\r\n\x0bstart_valueB\x0b\n\tend_value\"\xdf\x08\n\tRowFilter\x12\x34\n\x05\x63hain\x18\x01 \x01(\x0b\x32#.google.bigtable.v1.RowFilter.ChainH\x00\x12>\n\ninterleave\x18\x02 \x01(\x0b\x32(.google.bigtable.v1.RowFilter.InterleaveH\x00\x12<\n\tcondition\x18\x03 \x01(\x0b\x32\'.google.bigtable.v1.RowFilter.ConditionH\x00\x12\x0e\n\x04sink\x18\x10 \x01(\x08H\x00\x12\x19\n\x0fpass_all_filter\x18\x11 \x01(\x08H\x00\x12\x1a\n\x10\x62lock_all_filter\x18\x12 \x01(\x08H\x00\x12\x1e\n\x14row_key_regex_filter\x18\x04 \x01(\x0cH\x00\x12\x1b\n\x11row_sample_filter\x18\x0e \x01(\x01H\x00\x12\"\n\x18\x66\x61mily_name_regex_filter\x18\x05 \x01(\tH\x00\x12\'\n\x1d\x63olumn_qualifier_regex_filter\x18\x06 \x01(\x0cH\x00\x12>\n\x13\x63olumn_range_filter\x18\x07 \x01(\x0b\x32\x1f.google.bigtable.v1.ColumnRangeH\x00\x12\x44\n\x16timestamp_range_filter\x18\x08 \x01(\x0b\x32\".google.bigtable.v1.TimestampRangeH\x00\x12\x1c\n\x12value_regex_filter\x18\t \x01(\x0cH\x00\x12<\n\x12value_range_filter\x18\x0f \x01(\x0b\x32\x1e.google.bigtable.v1.ValueRangeH\x00\x12%\n\x1b\x63\x65lls_per_row_offset_filter\x18\n \x01(\x05H\x00\x12$\n\x1a\x63\x65lls_per_row_limit_filter\x18\x0b \x01(\x05H\x00\x12\'\n\x1d\x63\x65lls_per_column_limit_filter\x18\x0c \x01(\x05H\x00\x12!\n\x17strip_value_transformer\x18\r \x01(\x08H\x00\x12!\n\x17\x61pply_label_transformer\x18\x13 \x01(\tH\x00\x1a\x37\n\x05\x43hain\x12.\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x1a<\n\nInterleave\x12.\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x1a\xad\x01\n\tCondition\x12\x37\n\x10predicate_filter\x18\x01 \x01(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x12\x32\n\x0btrue_filter\x18\x02 \x01(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x12\x33\n\x0c\x66\x61lse_filter\x18\x03 \x01(\x0b\x32\x1d.google.bigtable.v1.RowFilterB\x08\n\x06\x66ilter\"\xc9\x04\n\x08Mutation\x12\x38\n\x08set_cell\x18\x01 \x01(\x0b\x32$.google.bigtable.v1.Mutation.SetCellH\x00\x12K\n\x12\x64\x65lete_from_column\x18\x02 \x01(\x0b\x32-.google.bigtable.v1.Mutation.DeleteFromColumnH\x00\x12K\n\x12\x64\x65lete_from_family\x18\x03 \x01(\x0b\x32-.google.bigtable.v1.Mutation.DeleteFromFamilyH\x00\x12\x45\n\x0f\x64\x65lete_from_row\x18\x04 \x01(\x0b\x32*.google.bigtable.v1.Mutation.DeleteFromRowH\x00\x1a\x61\n\x07SetCell\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x18\n\x10timestamp_micros\x18\x03 \x01(\x03\x12\r\n\x05value\x18\x04 \x01(\x0c\x1ay\n\x10\x44\x65leteFromColumn\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x36\n\ntime_range\x18\x03 \x01(\x0b\x32\".google.bigtable.v1.TimestampRange\x1a\'\n\x10\x44\x65leteFromFamily\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x1a\x0f\n\rDeleteFromRowB\n\n\x08mutation\"\x80\x01\n\x13ReadModifyWriteRule\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x16\n\x0c\x61ppend_value\x18\x03 \x01(\x0cH\x00\x12\x1a\n\x10increment_amount\x18\x04 \x01(\x03H\x00\x42\x06\n\x04ruleB-\n\x16\x63om.google.bigtable.v1B\x11\x42igtableDataProtoP\x01\x62\x06proto3' +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_ROW = _descriptor.Descriptor( + name='Row', + full_name='google.bigtable.v1.Row', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.v1.Row.key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='families', full_name='google.bigtable.v1.Row.families', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=62, + serialized_end=126, +) + + +_FAMILY = _descriptor.Descriptor( + name='Family', + full_name='google.bigtable.v1.Family', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.v1.Family.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='columns', full_name='google.bigtable.v1.Family.columns', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=128, + serialized_end=195, +) + + +_COLUMN = _descriptor.Descriptor( + name='Column', + full_name='google.bigtable.v1.Column', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='qualifier', full_name='google.bigtable.v1.Column.qualifier', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells', full_name='google.bigtable.v1.Column.cells', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=197, + serialized_end=265, +) + + +_CELL = _descriptor.Descriptor( + name='Cell', + full_name='google.bigtable.v1.Cell', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timestamp_micros', full_name='google.bigtable.v1.Cell.timestamp_micros', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.v1.Cell.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.bigtable.v1.Cell.labels', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=267, + serialized_end=330, +) + + +_ROWRANGE = _descriptor.Descriptor( + name='RowRange', + full_name='google.bigtable.v1.RowRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_key', full_name='google.bigtable.v1.RowRange.start_key', index=0, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_key', full_name='google.bigtable.v1.RowRange.end_key', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=332, + serialized_end=378, +) + + +_ROWSET = _descriptor.Descriptor( + name='RowSet', + full_name='google.bigtable.v1.RowSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_keys', full_name='google.bigtable.v1.RowSet.row_keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_ranges', full_name='google.bigtable.v1.RowSet.row_ranges', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=380, + serialized_end=456, +) + + +_COLUMNRANGE = _descriptor.Descriptor( + name='ColumnRange', + full_name='google.bigtable.v1.ColumnRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v1.ColumnRange.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_qualifier_inclusive', full_name='google.bigtable.v1.ColumnRange.start_qualifier_inclusive', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_qualifier_exclusive', full_name='google.bigtable.v1.ColumnRange.start_qualifier_exclusive', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_qualifier_inclusive', full_name='google.bigtable.v1.ColumnRange.end_qualifier_inclusive', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_qualifier_exclusive', full_name='google.bigtable.v1.ColumnRange.end_qualifier_exclusive', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='start_qualifier', full_name='google.bigtable.v1.ColumnRange.start_qualifier', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='end_qualifier', full_name='google.bigtable.v1.ColumnRange.end_qualifier', + index=1, containing_type=None, fields=[]), + ], + serialized_start=459, + serialized_end=673, +) + + +_TIMESTAMPRANGE = _descriptor.Descriptor( + name='TimestampRange', + full_name='google.bigtable.v1.TimestampRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_timestamp_micros', full_name='google.bigtable.v1.TimestampRange.start_timestamp_micros', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_timestamp_micros', full_name='google.bigtable.v1.TimestampRange.end_timestamp_micros', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=675, + serialized_end=753, +) + + +_VALUERANGE = _descriptor.Descriptor( + name='ValueRange', + full_name='google.bigtable.v1.ValueRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_value_inclusive', full_name='google.bigtable.v1.ValueRange.start_value_inclusive', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_value_exclusive', full_name='google.bigtable.v1.ValueRange.start_value_exclusive', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_value_inclusive', full_name='google.bigtable.v1.ValueRange.end_value_inclusive', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_value_exclusive', full_name='google.bigtable.v1.ValueRange.end_value_exclusive', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='start_value', full_name='google.bigtable.v1.ValueRange.start_value', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='end_value', full_name='google.bigtable.v1.ValueRange.end_value', + index=1, containing_type=None, fields=[]), + ], + serialized_start=756, + serialized_end=924, +) + + +_ROWFILTER_CHAIN = _descriptor.Descriptor( + name='Chain', + full_name='google.bigtable.v1.RowFilter.Chain', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filters', full_name='google.bigtable.v1.RowFilter.Chain.filters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1743, + serialized_end=1798, +) + +_ROWFILTER_INTERLEAVE = _descriptor.Descriptor( + name='Interleave', + full_name='google.bigtable.v1.RowFilter.Interleave', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filters', full_name='google.bigtable.v1.RowFilter.Interleave.filters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1800, + serialized_end=1860, +) + +_ROWFILTER_CONDITION = _descriptor.Descriptor( + name='Condition', + full_name='google.bigtable.v1.RowFilter.Condition', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='predicate_filter', full_name='google.bigtable.v1.RowFilter.Condition.predicate_filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='true_filter', full_name='google.bigtable.v1.RowFilter.Condition.true_filter', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='false_filter', full_name='google.bigtable.v1.RowFilter.Condition.false_filter', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1863, + serialized_end=2036, +) + +_ROWFILTER = _descriptor.Descriptor( + name='RowFilter', + full_name='google.bigtable.v1.RowFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='chain', full_name='google.bigtable.v1.RowFilter.chain', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='interleave', full_name='google.bigtable.v1.RowFilter.interleave', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='condition', full_name='google.bigtable.v1.RowFilter.condition', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.bigtable.v1.RowFilter.sink', index=3, + number=16, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pass_all_filter', full_name='google.bigtable.v1.RowFilter.pass_all_filter', index=4, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='block_all_filter', full_name='google.bigtable.v1.RowFilter.block_all_filter', index=5, + number=18, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key_regex_filter', full_name='google.bigtable.v1.RowFilter.row_key_regex_filter', index=6, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_sample_filter', full_name='google.bigtable.v1.RowFilter.row_sample_filter', index=7, + number=14, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='family_name_regex_filter', full_name='google.bigtable.v1.RowFilter.family_name_regex_filter', index=8, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier_regex_filter', full_name='google.bigtable.v1.RowFilter.column_qualifier_regex_filter', index=9, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_range_filter', full_name='google.bigtable.v1.RowFilter.column_range_filter', index=10, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_range_filter', full_name='google.bigtable.v1.RowFilter.timestamp_range_filter', index=11, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_regex_filter', full_name='google.bigtable.v1.RowFilter.value_regex_filter', index=12, + number=9, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_range_filter', full_name='google.bigtable.v1.RowFilter.value_range_filter', index=13, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_row_offset_filter', full_name='google.bigtable.v1.RowFilter.cells_per_row_offset_filter', index=14, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_row_limit_filter', full_name='google.bigtable.v1.RowFilter.cells_per_row_limit_filter', index=15, + number=11, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_column_limit_filter', full_name='google.bigtable.v1.RowFilter.cells_per_column_limit_filter', index=16, + number=12, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='strip_value_transformer', full_name='google.bigtable.v1.RowFilter.strip_value_transformer', index=17, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='apply_label_transformer', full_name='google.bigtable.v1.RowFilter.apply_label_transformer', index=18, + number=19, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='filter', full_name='google.bigtable.v1.RowFilter.filter', + index=0, containing_type=None, fields=[]), + ], + serialized_start=927, + serialized_end=2046, +) + + +_MUTATION_SETCELL = _descriptor.Descriptor( + name='SetCell', + full_name='google.bigtable.v1.Mutation.SetCell', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v1.Mutation.SetCell.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v1.Mutation.SetCell.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_micros', full_name='google.bigtable.v1.Mutation.SetCell.timestamp_micros', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.v1.Mutation.SetCell.value', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2344, + serialized_end=2441, +) + +_MUTATION_DELETEFROMCOLUMN = _descriptor.Descriptor( + name='DeleteFromColumn', + full_name='google.bigtable.v1.Mutation.DeleteFromColumn', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v1.Mutation.DeleteFromColumn.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v1.Mutation.DeleteFromColumn.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time_range', full_name='google.bigtable.v1.Mutation.DeleteFromColumn.time_range', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2443, + serialized_end=2564, +) + +_MUTATION_DELETEFROMFAMILY = _descriptor.Descriptor( + name='DeleteFromFamily', + full_name='google.bigtable.v1.Mutation.DeleteFromFamily', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v1.Mutation.DeleteFromFamily.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2566, + serialized_end=2605, +) + +_MUTATION_DELETEFROMROW = _descriptor.Descriptor( + name='DeleteFromRow', + full_name='google.bigtable.v1.Mutation.DeleteFromRow', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2607, + serialized_end=2622, +) + +_MUTATION = _descriptor.Descriptor( + name='Mutation', + full_name='google.bigtable.v1.Mutation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='set_cell', full_name='google.bigtable.v1.Mutation.set_cell', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_column', full_name='google.bigtable.v1.Mutation.delete_from_column', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_family', full_name='google.bigtable.v1.Mutation.delete_from_family', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_row', full_name='google.bigtable.v1.Mutation.delete_from_row', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MUTATION_SETCELL, _MUTATION_DELETEFROMCOLUMN, _MUTATION_DELETEFROMFAMILY, _MUTATION_DELETEFROMROW, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='mutation', full_name='google.bigtable.v1.Mutation.mutation', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2049, + serialized_end=2634, +) + + +_READMODIFYWRITERULE = _descriptor.Descriptor( + name='ReadModifyWriteRule', + full_name='google.bigtable.v1.ReadModifyWriteRule', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v1.ReadModifyWriteRule.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v1.ReadModifyWriteRule.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='append_value', full_name='google.bigtable.v1.ReadModifyWriteRule.append_value', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='increment_amount', full_name='google.bigtable.v1.ReadModifyWriteRule.increment_amount', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='rule', full_name='google.bigtable.v1.ReadModifyWriteRule.rule', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2637, + serialized_end=2765, +) + +_ROW.fields_by_name['families'].message_type = _FAMILY +_FAMILY.fields_by_name['columns'].message_type = _COLUMN +_COLUMN.fields_by_name['cells'].message_type = _CELL +_ROWSET.fields_by_name['row_ranges'].message_type = _ROWRANGE +_COLUMNRANGE.oneofs_by_name['start_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['start_qualifier_inclusive']) +_COLUMNRANGE.fields_by_name['start_qualifier_inclusive'].containing_oneof = _COLUMNRANGE.oneofs_by_name['start_qualifier'] +_COLUMNRANGE.oneofs_by_name['start_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['start_qualifier_exclusive']) +_COLUMNRANGE.fields_by_name['start_qualifier_exclusive'].containing_oneof = _COLUMNRANGE.oneofs_by_name['start_qualifier'] +_COLUMNRANGE.oneofs_by_name['end_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['end_qualifier_inclusive']) +_COLUMNRANGE.fields_by_name['end_qualifier_inclusive'].containing_oneof = _COLUMNRANGE.oneofs_by_name['end_qualifier'] +_COLUMNRANGE.oneofs_by_name['end_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['end_qualifier_exclusive']) +_COLUMNRANGE.fields_by_name['end_qualifier_exclusive'].containing_oneof = _COLUMNRANGE.oneofs_by_name['end_qualifier'] +_VALUERANGE.oneofs_by_name['start_value'].fields.append( + _VALUERANGE.fields_by_name['start_value_inclusive']) +_VALUERANGE.fields_by_name['start_value_inclusive'].containing_oneof = _VALUERANGE.oneofs_by_name['start_value'] +_VALUERANGE.oneofs_by_name['start_value'].fields.append( + _VALUERANGE.fields_by_name['start_value_exclusive']) +_VALUERANGE.fields_by_name['start_value_exclusive'].containing_oneof = _VALUERANGE.oneofs_by_name['start_value'] +_VALUERANGE.oneofs_by_name['end_value'].fields.append( + _VALUERANGE.fields_by_name['end_value_inclusive']) +_VALUERANGE.fields_by_name['end_value_inclusive'].containing_oneof = _VALUERANGE.oneofs_by_name['end_value'] +_VALUERANGE.oneofs_by_name['end_value'].fields.append( + _VALUERANGE.fields_by_name['end_value_exclusive']) +_VALUERANGE.fields_by_name['end_value_exclusive'].containing_oneof = _VALUERANGE.oneofs_by_name['end_value'] +_ROWFILTER_CHAIN.fields_by_name['filters'].message_type = _ROWFILTER +_ROWFILTER_CHAIN.containing_type = _ROWFILTER +_ROWFILTER_INTERLEAVE.fields_by_name['filters'].message_type = _ROWFILTER +_ROWFILTER_INTERLEAVE.containing_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['predicate_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['true_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['false_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.containing_type = _ROWFILTER +_ROWFILTER.fields_by_name['chain'].message_type = _ROWFILTER_CHAIN +_ROWFILTER.fields_by_name['interleave'].message_type = _ROWFILTER_INTERLEAVE +_ROWFILTER.fields_by_name['condition'].message_type = _ROWFILTER_CONDITION +_ROWFILTER.fields_by_name['column_range_filter'].message_type = _COLUMNRANGE +_ROWFILTER.fields_by_name['timestamp_range_filter'].message_type = _TIMESTAMPRANGE +_ROWFILTER.fields_by_name['value_range_filter'].message_type = _VALUERANGE +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['chain']) +_ROWFILTER.fields_by_name['chain'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['interleave']) +_ROWFILTER.fields_by_name['interleave'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['condition']) +_ROWFILTER.fields_by_name['condition'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['sink']) +_ROWFILTER.fields_by_name['sink'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['pass_all_filter']) +_ROWFILTER.fields_by_name['pass_all_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['block_all_filter']) +_ROWFILTER.fields_by_name['block_all_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['row_key_regex_filter']) +_ROWFILTER.fields_by_name['row_key_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['row_sample_filter']) +_ROWFILTER.fields_by_name['row_sample_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['family_name_regex_filter']) +_ROWFILTER.fields_by_name['family_name_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['column_qualifier_regex_filter']) +_ROWFILTER.fields_by_name['column_qualifier_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['column_range_filter']) +_ROWFILTER.fields_by_name['column_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['timestamp_range_filter']) +_ROWFILTER.fields_by_name['timestamp_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['value_regex_filter']) +_ROWFILTER.fields_by_name['value_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['value_range_filter']) +_ROWFILTER.fields_by_name['value_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_row_offset_filter']) +_ROWFILTER.fields_by_name['cells_per_row_offset_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_row_limit_filter']) +_ROWFILTER.fields_by_name['cells_per_row_limit_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_column_limit_filter']) +_ROWFILTER.fields_by_name['cells_per_column_limit_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['strip_value_transformer']) +_ROWFILTER.fields_by_name['strip_value_transformer'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['apply_label_transformer']) +_ROWFILTER.fields_by_name['apply_label_transformer'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_MUTATION_SETCELL.containing_type = _MUTATION +_MUTATION_DELETEFROMCOLUMN.fields_by_name['time_range'].message_type = _TIMESTAMPRANGE +_MUTATION_DELETEFROMCOLUMN.containing_type = _MUTATION +_MUTATION_DELETEFROMFAMILY.containing_type = _MUTATION +_MUTATION_DELETEFROMROW.containing_type = _MUTATION +_MUTATION.fields_by_name['set_cell'].message_type = _MUTATION_SETCELL +_MUTATION.fields_by_name['delete_from_column'].message_type = _MUTATION_DELETEFROMCOLUMN +_MUTATION.fields_by_name['delete_from_family'].message_type = _MUTATION_DELETEFROMFAMILY +_MUTATION.fields_by_name['delete_from_row'].message_type = _MUTATION_DELETEFROMROW +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['set_cell']) +_MUTATION.fields_by_name['set_cell'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_column']) +_MUTATION.fields_by_name['delete_from_column'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_family']) +_MUTATION.fields_by_name['delete_from_family'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_row']) +_MUTATION.fields_by_name['delete_from_row'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_READMODIFYWRITERULE.oneofs_by_name['rule'].fields.append( + _READMODIFYWRITERULE.fields_by_name['append_value']) +_READMODIFYWRITERULE.fields_by_name['append_value'].containing_oneof = _READMODIFYWRITERULE.oneofs_by_name['rule'] +_READMODIFYWRITERULE.oneofs_by_name['rule'].fields.append( + _READMODIFYWRITERULE.fields_by_name['increment_amount']) +_READMODIFYWRITERULE.fields_by_name['increment_amount'].containing_oneof = _READMODIFYWRITERULE.oneofs_by_name['rule'] +DESCRIPTOR.message_types_by_name['Row'] = _ROW +DESCRIPTOR.message_types_by_name['Family'] = _FAMILY +DESCRIPTOR.message_types_by_name['Column'] = _COLUMN +DESCRIPTOR.message_types_by_name['Cell'] = _CELL +DESCRIPTOR.message_types_by_name['RowRange'] = _ROWRANGE +DESCRIPTOR.message_types_by_name['RowSet'] = _ROWSET +DESCRIPTOR.message_types_by_name['ColumnRange'] = _COLUMNRANGE +DESCRIPTOR.message_types_by_name['TimestampRange'] = _TIMESTAMPRANGE +DESCRIPTOR.message_types_by_name['ValueRange'] = _VALUERANGE +DESCRIPTOR.message_types_by_name['RowFilter'] = _ROWFILTER +DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION +DESCRIPTOR.message_types_by_name['ReadModifyWriteRule'] = _READMODIFYWRITERULE + +Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict( + DESCRIPTOR = _ROW, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Row) + )) +_sym_db.RegisterMessage(Row) + +Family = _reflection.GeneratedProtocolMessageType('Family', (_message.Message,), dict( + DESCRIPTOR = _FAMILY, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Family) + )) +_sym_db.RegisterMessage(Family) + +Column = _reflection.GeneratedProtocolMessageType('Column', (_message.Message,), dict( + DESCRIPTOR = _COLUMN, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Column) + )) +_sym_db.RegisterMessage(Column) + +Cell = _reflection.GeneratedProtocolMessageType('Cell', (_message.Message,), dict( + DESCRIPTOR = _CELL, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Cell) + )) +_sym_db.RegisterMessage(Cell) + +RowRange = _reflection.GeneratedProtocolMessageType('RowRange', (_message.Message,), dict( + DESCRIPTOR = _ROWRANGE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowRange) + )) +_sym_db.RegisterMessage(RowRange) + +RowSet = _reflection.GeneratedProtocolMessageType('RowSet', (_message.Message,), dict( + DESCRIPTOR = _ROWSET, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowSet) + )) +_sym_db.RegisterMessage(RowSet) + +ColumnRange = _reflection.GeneratedProtocolMessageType('ColumnRange', (_message.Message,), dict( + DESCRIPTOR = _COLUMNRANGE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ColumnRange) + )) +_sym_db.RegisterMessage(ColumnRange) + +TimestampRange = _reflection.GeneratedProtocolMessageType('TimestampRange', (_message.Message,), dict( + DESCRIPTOR = _TIMESTAMPRANGE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.TimestampRange) + )) +_sym_db.RegisterMessage(TimestampRange) + +ValueRange = _reflection.GeneratedProtocolMessageType('ValueRange', (_message.Message,), dict( + DESCRIPTOR = _VALUERANGE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ValueRange) + )) +_sym_db.RegisterMessage(ValueRange) + +RowFilter = _reflection.GeneratedProtocolMessageType('RowFilter', (_message.Message,), dict( + + Chain = _reflection.GeneratedProtocolMessageType('Chain', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_CHAIN, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowFilter.Chain) + )) + , + + Interleave = _reflection.GeneratedProtocolMessageType('Interleave', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_INTERLEAVE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowFilter.Interleave) + )) + , + + Condition = _reflection.GeneratedProtocolMessageType('Condition', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_CONDITION, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowFilter.Condition) + )) + , + DESCRIPTOR = _ROWFILTER, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.RowFilter) + )) +_sym_db.RegisterMessage(RowFilter) +_sym_db.RegisterMessage(RowFilter.Chain) +_sym_db.RegisterMessage(RowFilter.Interleave) +_sym_db.RegisterMessage(RowFilter.Condition) + +Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( + + SetCell = _reflection.GeneratedProtocolMessageType('SetCell', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_SETCELL, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Mutation.SetCell) + )) + , + + DeleteFromColumn = _reflection.GeneratedProtocolMessageType('DeleteFromColumn', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMCOLUMN, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Mutation.DeleteFromColumn) + )) + , + + DeleteFromFamily = _reflection.GeneratedProtocolMessageType('DeleteFromFamily', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMFAMILY, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Mutation.DeleteFromFamily) + )) + , + + DeleteFromRow = _reflection.GeneratedProtocolMessageType('DeleteFromRow', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMROW, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Mutation.DeleteFromRow) + )) + , + DESCRIPTOR = _MUTATION, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.Mutation) + )) +_sym_db.RegisterMessage(Mutation) +_sym_db.RegisterMessage(Mutation.SetCell) +_sym_db.RegisterMessage(Mutation.DeleteFromColumn) +_sym_db.RegisterMessage(Mutation.DeleteFromFamily) +_sym_db.RegisterMessage(Mutation.DeleteFromRow) + +ReadModifyWriteRule = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRule', (_message.Message,), dict( + DESCRIPTOR = _READMODIFYWRITERULE, + __module__ = 'google.bigtable.v1.bigtable_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ReadModifyWriteRule) + )) +_sym_db.RegisterMessage(ReadModifyWriteRule) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\026com.google.bigtable.v1B\021BigtableDataProtoP\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_messages_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_messages_pb2.py new file mode 100644 index 0000000..38a478a --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_messages_pb2.py @@ -0,0 +1,678 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/v1/bigtable_service_messages.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from gcloud.bigtable._generated import bigtable_data_pb2 as google_dot_bigtable_dot_v1_dot_bigtable__data__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/v1/bigtable_service_messages.proto', + package='google.bigtable.v1', + syntax='proto3', + serialized_pb=b'\n2google/bigtable/v1/bigtable_service_messages.proto\x12\x12google.bigtable.v1\x1a&google/bigtable/v1/bigtable_data.proto\x1a\x17google/rpc/status.proto\"\x8b\x02\n\x0fReadRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x11\n\x07row_key\x18\x02 \x01(\x0cH\x00\x12\x31\n\trow_range\x18\x03 \x01(\x0b\x32\x1c.google.bigtable.v1.RowRangeH\x00\x12-\n\x07row_set\x18\x08 \x01(\x0b\x32\x1a.google.bigtable.v1.RowSetH\x00\x12-\n\x06\x66ilter\x18\x05 \x01(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x12\x1e\n\x16\x61llow_row_interleaving\x18\x06 \x01(\x08\x12\x16\n\x0enum_rows_limit\x18\x07 \x01(\x03\x42\x08\n\x06target\"\xd0\x01\n\x10ReadRowsResponse\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12:\n\x06\x63hunks\x18\x02 \x03(\x0b\x32*.google.bigtable.v1.ReadRowsResponse.Chunk\x1ao\n\x05\x43hunk\x12\x32\n\x0crow_contents\x18\x01 \x01(\x0b\x32\x1a.google.bigtable.v1.FamilyH\x00\x12\x13\n\treset_row\x18\x02 \x01(\x08H\x00\x12\x14\n\ncommit_row\x18\x03 \x01(\x08H\x00\x42\x07\n\x05\x63hunk\"*\n\x14SampleRowKeysRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\">\n\x15SampleRowKeysResponse\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12\x14\n\x0coffset_bytes\x18\x02 \x01(\x03\"h\n\x10MutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12/\n\tmutations\x18\x03 \x03(\x0b\x32\x1c.google.bigtable.v1.Mutation\"\xb0\x01\n\x11MutateRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12<\n\x07\x65ntries\x18\x02 \x03(\x0b\x32+.google.bigtable.v1.MutateRowsRequest.Entry\x1aI\n\x05\x45ntry\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12/\n\tmutations\x18\x02 \x03(\x0b\x32\x1c.google.bigtable.v1.Mutation\":\n\x12MutateRowsResponse\x12$\n\x08statuses\x18\x01 \x03(\x0b\x32\x12.google.rpc.Status\"\xe5\x01\n\x18\x43heckAndMutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x37\n\x10predicate_filter\x18\x06 \x01(\x0b\x32\x1d.google.bigtable.v1.RowFilter\x12\x34\n\x0etrue_mutations\x18\x04 \x03(\x0b\x32\x1c.google.bigtable.v1.Mutation\x12\x35\n\x0f\x66\x61lse_mutations\x18\x05 \x03(\x0b\x32\x1c.google.bigtable.v1.Mutation\"6\n\x19\x43heckAndMutateRowResponse\x12\x19\n\x11predicate_matched\x18\x01 \x01(\x08\"x\n\x19ReadModifyWriteRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x36\n\x05rules\x18\x03 \x03(\x0b\x32\'.google.bigtable.v1.ReadModifyWriteRuleB8\n\x16\x63om.google.bigtable.v1B\x1c\x42igtableServiceMessagesProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_bigtable_dot_v1_dot_bigtable__data__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_READROWSREQUEST = _descriptor.Descriptor( + name='ReadRowsRequest', + full_name='google.bigtable.v1.ReadRowsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.ReadRowsRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.ReadRowsRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_range', full_name='google.bigtable.v1.ReadRowsRequest.row_range', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_set', full_name='google.bigtable.v1.ReadRowsRequest.row_set', index=3, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.bigtable.v1.ReadRowsRequest.filter', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_row_interleaving', full_name='google.bigtable.v1.ReadRowsRequest.allow_row_interleaving', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='num_rows_limit', full_name='google.bigtable.v1.ReadRowsRequest.num_rows_limit', index=6, + number=7, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.bigtable.v1.ReadRowsRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=140, + serialized_end=407, +) + + +_READROWSRESPONSE_CHUNK = _descriptor.Descriptor( + name='Chunk', + full_name='google.bigtable.v1.ReadRowsResponse.Chunk', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_contents', full_name='google.bigtable.v1.ReadRowsResponse.Chunk.row_contents', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reset_row', full_name='google.bigtable.v1.ReadRowsResponse.Chunk.reset_row', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='commit_row', full_name='google.bigtable.v1.ReadRowsResponse.Chunk.commit_row', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='chunk', full_name='google.bigtable.v1.ReadRowsResponse.Chunk.chunk', + index=0, containing_type=None, fields=[]), + ], + serialized_start=507, + serialized_end=618, +) + +_READROWSRESPONSE = _descriptor.Descriptor( + name='ReadRowsResponse', + full_name='google.bigtable.v1.ReadRowsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.ReadRowsResponse.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='chunks', full_name='google.bigtable.v1.ReadRowsResponse.chunks', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_READROWSRESPONSE_CHUNK, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=410, + serialized_end=618, +) + + +_SAMPLEROWKEYSREQUEST = _descriptor.Descriptor( + name='SampleRowKeysRequest', + full_name='google.bigtable.v1.SampleRowKeysRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.SampleRowKeysRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=620, + serialized_end=662, +) + + +_SAMPLEROWKEYSRESPONSE = _descriptor.Descriptor( + name='SampleRowKeysResponse', + full_name='google.bigtable.v1.SampleRowKeysResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.SampleRowKeysResponse.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='offset_bytes', full_name='google.bigtable.v1.SampleRowKeysResponse.offset_bytes', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=664, + serialized_end=726, +) + + +_MUTATEROWREQUEST = _descriptor.Descriptor( + name='MutateRowRequest', + full_name='google.bigtable.v1.MutateRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.MutateRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.MutateRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.bigtable.v1.MutateRowRequest.mutations', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=728, + serialized_end=832, +) + + +_MUTATEROWSREQUEST_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='google.bigtable.v1.MutateRowsRequest.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.MutateRowsRequest.Entry.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.bigtable.v1.MutateRowsRequest.Entry.mutations', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=938, + serialized_end=1011, +) + +_MUTATEROWSREQUEST = _descriptor.Descriptor( + name='MutateRowsRequest', + full_name='google.bigtable.v1.MutateRowsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.MutateRowsRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entries', full_name='google.bigtable.v1.MutateRowsRequest.entries', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MUTATEROWSREQUEST_ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=835, + serialized_end=1011, +) + + +_MUTATEROWSRESPONSE = _descriptor.Descriptor( + name='MutateRowsResponse', + full_name='google.bigtable.v1.MutateRowsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='statuses', full_name='google.bigtable.v1.MutateRowsResponse.statuses', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1013, + serialized_end=1071, +) + + +_CHECKANDMUTATEROWREQUEST = _descriptor.Descriptor( + name='CheckAndMutateRowRequest', + full_name='google.bigtable.v1.CheckAndMutateRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.CheckAndMutateRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.CheckAndMutateRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='predicate_filter', full_name='google.bigtable.v1.CheckAndMutateRowRequest.predicate_filter', index=2, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='true_mutations', full_name='google.bigtable.v1.CheckAndMutateRowRequest.true_mutations', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='false_mutations', full_name='google.bigtable.v1.CheckAndMutateRowRequest.false_mutations', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1074, + serialized_end=1303, +) + + +_CHECKANDMUTATEROWRESPONSE = _descriptor.Descriptor( + name='CheckAndMutateRowResponse', + full_name='google.bigtable.v1.CheckAndMutateRowResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='predicate_matched', full_name='google.bigtable.v1.CheckAndMutateRowResponse.predicate_matched', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1305, + serialized_end=1359, +) + + +_READMODIFYWRITEROWREQUEST = _descriptor.Descriptor( + name='ReadModifyWriteRowRequest', + full_name='google.bigtable.v1.ReadModifyWriteRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v1.ReadModifyWriteRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v1.ReadModifyWriteRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.v1.ReadModifyWriteRowRequest.rules', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1361, + serialized_end=1481, +) + +_READROWSREQUEST.fields_by_name['row_range'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._ROWRANGE +_READROWSREQUEST.fields_by_name['row_set'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._ROWSET +_READROWSREQUEST.fields_by_name['filter'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._ROWFILTER +_READROWSREQUEST.oneofs_by_name['target'].fields.append( + _READROWSREQUEST.fields_by_name['row_key']) +_READROWSREQUEST.fields_by_name['row_key'].containing_oneof = _READROWSREQUEST.oneofs_by_name['target'] +_READROWSREQUEST.oneofs_by_name['target'].fields.append( + _READROWSREQUEST.fields_by_name['row_range']) +_READROWSREQUEST.fields_by_name['row_range'].containing_oneof = _READROWSREQUEST.oneofs_by_name['target'] +_READROWSREQUEST.oneofs_by_name['target'].fields.append( + _READROWSREQUEST.fields_by_name['row_set']) +_READROWSREQUEST.fields_by_name['row_set'].containing_oneof = _READROWSREQUEST.oneofs_by_name['target'] +_READROWSRESPONSE_CHUNK.fields_by_name['row_contents'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._FAMILY +_READROWSRESPONSE_CHUNK.containing_type = _READROWSRESPONSE +_READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'].fields.append( + _READROWSRESPONSE_CHUNK.fields_by_name['row_contents']) +_READROWSRESPONSE_CHUNK.fields_by_name['row_contents'].containing_oneof = _READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'] +_READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'].fields.append( + _READROWSRESPONSE_CHUNK.fields_by_name['reset_row']) +_READROWSRESPONSE_CHUNK.fields_by_name['reset_row'].containing_oneof = _READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'] +_READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'].fields.append( + _READROWSRESPONSE_CHUNK.fields_by_name['commit_row']) +_READROWSRESPONSE_CHUNK.fields_by_name['commit_row'].containing_oneof = _READROWSRESPONSE_CHUNK.oneofs_by_name['chunk'] +_READROWSRESPONSE.fields_by_name['chunks'].message_type = _READROWSRESPONSE_CHUNK +_MUTATEROWREQUEST.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._MUTATION +_MUTATEROWSREQUEST_ENTRY.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._MUTATION +_MUTATEROWSREQUEST_ENTRY.containing_type = _MUTATEROWSREQUEST +_MUTATEROWSREQUEST.fields_by_name['entries'].message_type = _MUTATEROWSREQUEST_ENTRY +_MUTATEROWSRESPONSE.fields_by_name['statuses'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_CHECKANDMUTATEROWREQUEST.fields_by_name['predicate_filter'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._ROWFILTER +_CHECKANDMUTATEROWREQUEST.fields_by_name['true_mutations'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._MUTATION +_CHECKANDMUTATEROWREQUEST.fields_by_name['false_mutations'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._MUTATION +_READMODIFYWRITEROWREQUEST.fields_by_name['rules'].message_type = google_dot_bigtable_dot_v1_dot_bigtable__data__pb2._READMODIFYWRITERULE +DESCRIPTOR.message_types_by_name['ReadRowsRequest'] = _READROWSREQUEST +DESCRIPTOR.message_types_by_name['ReadRowsResponse'] = _READROWSRESPONSE +DESCRIPTOR.message_types_by_name['SampleRowKeysRequest'] = _SAMPLEROWKEYSREQUEST +DESCRIPTOR.message_types_by_name['SampleRowKeysResponse'] = _SAMPLEROWKEYSRESPONSE +DESCRIPTOR.message_types_by_name['MutateRowRequest'] = _MUTATEROWREQUEST +DESCRIPTOR.message_types_by_name['MutateRowsRequest'] = _MUTATEROWSREQUEST +DESCRIPTOR.message_types_by_name['MutateRowsResponse'] = _MUTATEROWSRESPONSE +DESCRIPTOR.message_types_by_name['CheckAndMutateRowRequest'] = _CHECKANDMUTATEROWREQUEST +DESCRIPTOR.message_types_by_name['CheckAndMutateRowResponse'] = _CHECKANDMUTATEROWRESPONSE +DESCRIPTOR.message_types_by_name['ReadModifyWriteRowRequest'] = _READMODIFYWRITEROWREQUEST + +ReadRowsRequest = _reflection.GeneratedProtocolMessageType('ReadRowsRequest', (_message.Message,), dict( + DESCRIPTOR = _READROWSREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ReadRowsRequest) + )) +_sym_db.RegisterMessage(ReadRowsRequest) + +ReadRowsResponse = _reflection.GeneratedProtocolMessageType('ReadRowsResponse', (_message.Message,), dict( + + Chunk = _reflection.GeneratedProtocolMessageType('Chunk', (_message.Message,), dict( + DESCRIPTOR = _READROWSRESPONSE_CHUNK, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ReadRowsResponse.Chunk) + )) + , + DESCRIPTOR = _READROWSRESPONSE, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ReadRowsResponse) + )) +_sym_db.RegisterMessage(ReadRowsResponse) +_sym_db.RegisterMessage(ReadRowsResponse.Chunk) + +SampleRowKeysRequest = _reflection.GeneratedProtocolMessageType('SampleRowKeysRequest', (_message.Message,), dict( + DESCRIPTOR = _SAMPLEROWKEYSREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.SampleRowKeysRequest) + )) +_sym_db.RegisterMessage(SampleRowKeysRequest) + +SampleRowKeysResponse = _reflection.GeneratedProtocolMessageType('SampleRowKeysResponse', (_message.Message,), dict( + DESCRIPTOR = _SAMPLEROWKEYSRESPONSE, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.SampleRowKeysResponse) + )) +_sym_db.RegisterMessage(SampleRowKeysResponse) + +MutateRowRequest = _reflection.GeneratedProtocolMessageType('MutateRowRequest', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.MutateRowRequest) + )) +_sym_db.RegisterMessage(MutateRowRequest) + +MutateRowsRequest = _reflection.GeneratedProtocolMessageType('MutateRowsRequest', (_message.Message,), dict( + + Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWSREQUEST_ENTRY, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.MutateRowsRequest.Entry) + )) + , + DESCRIPTOR = _MUTATEROWSREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.MutateRowsRequest) + )) +_sym_db.RegisterMessage(MutateRowsRequest) +_sym_db.RegisterMessage(MutateRowsRequest.Entry) + +MutateRowsResponse = _reflection.GeneratedProtocolMessageType('MutateRowsResponse', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWSRESPONSE, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.MutateRowsResponse) + )) +_sym_db.RegisterMessage(MutateRowsResponse) + +CheckAndMutateRowRequest = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowRequest', (_message.Message,), dict( + DESCRIPTOR = _CHECKANDMUTATEROWREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.CheckAndMutateRowRequest) + )) +_sym_db.RegisterMessage(CheckAndMutateRowRequest) + +CheckAndMutateRowResponse = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowResponse', (_message.Message,), dict( + DESCRIPTOR = _CHECKANDMUTATEROWRESPONSE, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.CheckAndMutateRowResponse) + )) +_sym_db.RegisterMessage(CheckAndMutateRowResponse) + +ReadModifyWriteRowRequest = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRowRequest', (_message.Message,), dict( + DESCRIPTOR = _READMODIFYWRITEROWREQUEST, + __module__ = 'google.bigtable.v1.bigtable_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v1.ReadModifyWriteRowRequest) + )) +_sym_db.RegisterMessage(ReadModifyWriteRowRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\026com.google.bigtable.v1B\034BigtableServiceMessagesProtoP\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_pb2.py new file mode 100644 index 0000000..901ffb6 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_service_pb2.py @@ -0,0 +1,167 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/v1/bigtable_service.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated import bigtable_data_pb2 as google_dot_bigtable_dot_v1_dot_bigtable__data__pb2 +from gcloud.bigtable._generated import bigtable_service_messages_pb2 as google_dot_bigtable_dot_v1_dot_bigtable__service__messages__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/v1/bigtable_service.proto', + package='google.bigtable.v1', + syntax='proto3', + serialized_pb=b'\n)google/bigtable/v1/bigtable_service.proto\x12\x12google.bigtable.v1\x1a\x1cgoogle/api/annotations.proto\x1a&google/bigtable/v1/bigtable_data.proto\x1a\x32google/bigtable/v1/bigtable_service_messages.proto\x1a\x1bgoogle/protobuf/empty.proto2\xdd\x08\n\x0f\x42igtableService\x12\xa5\x01\n\x08ReadRows\x12#.google.bigtable.v1.ReadRowsRequest\x1a$.google.bigtable.v1.ReadRowsResponse\"L\x82\xd3\xe4\x93\x02\x46\"A/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows:read:\x01*0\x01\x12\xb7\x01\n\rSampleRowKeys\x12(.google.bigtable.v1.SampleRowKeysRequest\x1a).google.bigtable.v1.SampleRowKeysResponse\"O\x82\xd3\xe4\x93\x02I\x12G/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows:sampleKeys0\x01\x12\xa3\x01\n\tMutateRow\x12$.google.bigtable.v1.MutateRowRequest\x1a\x16.google.protobuf.Empty\"X\x82\xd3\xe4\x93\x02R\"M/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:mutate:\x01*\x12\xaa\x01\n\nMutateRows\x12%.google.bigtable.v1.MutateRowsRequest\x1a&.google.bigtable.v1.MutateRowsResponse\"M\x82\xd3\xe4\x93\x02G\"B/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}:mutateRows:\x01*\x12\xd2\x01\n\x11\x43heckAndMutateRow\x12,.google.bigtable.v1.CheckAndMutateRowRequest\x1a-.google.bigtable.v1.CheckAndMutateRowResponse\"`\x82\xd3\xe4\x93\x02Z\"U/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:checkAndMutate:\x01*\x12\xbf\x01\n\x12ReadModifyWriteRow\x12-.google.bigtable.v1.ReadModifyWriteRowRequest\x1a\x17.google.bigtable.v1.Row\"a\x82\xd3\xe4\x93\x02[\"V/v1/{table_name=projects/*/zones/*/clusters/*/tables/*}/rows/{row_key}:readModifyWrite:\x01*B4\n\x16\x63om.google.bigtable.v1B\x15\x42igtableServicesProtoP\x01\x88\x01\x01\x62\x06proto3' + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_v1_dot_bigtable__data__pb2.DESCRIPTOR,google_dot_bigtable_dot_v1_dot_bigtable__service__messages__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\026com.google.bigtable.v1B\025BigtableServicesProtoP\001\210\001\001') +import abc +from grpc.beta import implementations as beta_implementations +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + +class BetaBigtableServiceServicer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def ReadRows(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def SampleRowKeys(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def MutateRow(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def MutateRows(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def CheckAndMutateRow(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def ReadModifyWriteRow(self, request, context): + raise NotImplementedError() + +class BetaBigtableServiceStub(object): + """The interface to which stubs will conform.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def ReadRows(self, request, timeout): + raise NotImplementedError() + @abc.abstractmethod + def SampleRowKeys(self, request, timeout): + raise NotImplementedError() + @abc.abstractmethod + def MutateRow(self, request, timeout): + raise NotImplementedError() + MutateRow.future = None + @abc.abstractmethod + def MutateRows(self, request, timeout): + raise NotImplementedError() + MutateRows.future = None + @abc.abstractmethod + def CheckAndMutateRow(self, request, timeout): + raise NotImplementedError() + CheckAndMutateRow.future = None + @abc.abstractmethod + def ReadModifyWriteRow(self, request, timeout): + raise NotImplementedError() + ReadModifyWriteRow.future = None + +def beta_create_BigtableService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_data_pb2 + request_deserializers = { + ('google.bigtable.v1.BigtableService', 'CheckAndMutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.CheckAndMutateRowRequest.FromString, + ('google.bigtable.v1.BigtableService', 'MutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowRequest.FromString, + ('google.bigtable.v1.BigtableService', 'MutateRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowsRequest.FromString, + ('google.bigtable.v1.BigtableService', 'ReadModifyWriteRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadModifyWriteRowRequest.FromString, + ('google.bigtable.v1.BigtableService', 'ReadRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadRowsRequest.FromString, + ('google.bigtable.v1.BigtableService', 'SampleRowKeys'): gcloud.bigtable._generated.bigtable_service_messages_pb2.SampleRowKeysRequest.FromString, + } + response_serializers = { + ('google.bigtable.v1.BigtableService', 'CheckAndMutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.CheckAndMutateRowResponse.SerializeToString, + ('google.bigtable.v1.BigtableService', 'MutateRow'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.bigtable.v1.BigtableService', 'MutateRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowsResponse.SerializeToString, + ('google.bigtable.v1.BigtableService', 'ReadModifyWriteRow'): gcloud.bigtable._generated.bigtable_data_pb2.Row.SerializeToString, + ('google.bigtable.v1.BigtableService', 'ReadRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadRowsResponse.SerializeToString, + ('google.bigtable.v1.BigtableService', 'SampleRowKeys'): gcloud.bigtable._generated.bigtable_service_messages_pb2.SampleRowKeysResponse.SerializeToString, + } + method_implementations = { + ('google.bigtable.v1.BigtableService', 'CheckAndMutateRow'): face_utilities.unary_unary_inline(servicer.CheckAndMutateRow), + ('google.bigtable.v1.BigtableService', 'MutateRow'): face_utilities.unary_unary_inline(servicer.MutateRow), + ('google.bigtable.v1.BigtableService', 'MutateRows'): face_utilities.unary_unary_inline(servicer.MutateRows), + ('google.bigtable.v1.BigtableService', 'ReadModifyWriteRow'): face_utilities.unary_unary_inline(servicer.ReadModifyWriteRow), + ('google.bigtable.v1.BigtableService', 'ReadRows'): face_utilities.unary_stream_inline(servicer.ReadRows), + ('google.bigtable.v1.BigtableService', 'SampleRowKeys'): face_utilities.unary_stream_inline(servicer.SampleRowKeys), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + +def beta_create_BigtableService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_data_pb2 + request_serializers = { + ('google.bigtable.v1.BigtableService', 'CheckAndMutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.CheckAndMutateRowRequest.SerializeToString, + ('google.bigtable.v1.BigtableService', 'MutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowRequest.SerializeToString, + ('google.bigtable.v1.BigtableService', 'MutateRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowsRequest.SerializeToString, + ('google.bigtable.v1.BigtableService', 'ReadModifyWriteRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadModifyWriteRowRequest.SerializeToString, + ('google.bigtable.v1.BigtableService', 'ReadRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadRowsRequest.SerializeToString, + ('google.bigtable.v1.BigtableService', 'SampleRowKeys'): gcloud.bigtable._generated.bigtable_service_messages_pb2.SampleRowKeysRequest.SerializeToString, + } + response_deserializers = { + ('google.bigtable.v1.BigtableService', 'CheckAndMutateRow'): gcloud.bigtable._generated.bigtable_service_messages_pb2.CheckAndMutateRowResponse.FromString, + ('google.bigtable.v1.BigtableService', 'MutateRow'): google.protobuf.empty_pb2.Empty.FromString, + ('google.bigtable.v1.BigtableService', 'MutateRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.MutateRowsResponse.FromString, + ('google.bigtable.v1.BigtableService', 'ReadModifyWriteRow'): gcloud.bigtable._generated.bigtable_data_pb2.Row.FromString, + ('google.bigtable.v1.BigtableService', 'ReadRows'): gcloud.bigtable._generated.bigtable_service_messages_pb2.ReadRowsResponse.FromString, + ('google.bigtable.v1.BigtableService', 'SampleRowKeys'): gcloud.bigtable._generated.bigtable_service_messages_pb2.SampleRowKeysResponse.FromString, + } + cardinalities = { + 'CheckAndMutateRow': cardinality.Cardinality.UNARY_UNARY, + 'MutateRow': cardinality.Cardinality.UNARY_UNARY, + 'MutateRows': cardinality.Cardinality.UNARY_UNARY, + 'ReadModifyWriteRow': cardinality.Cardinality.UNARY_UNARY, + 'ReadRows': cardinality.Cardinality.UNARY_STREAM, + 'SampleRowKeys': cardinality.Cardinality.UNARY_STREAM, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.v1.BigtableService', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_data_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_data_pb2.py new file mode 100644 index 0000000..fd47b56 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_data_pb2.py @@ -0,0 +1,377 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/table/v1/bigtable_table_data.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/table/v1/bigtable_table_data.proto', + package='google.bigtable.admin.table.v1', + syntax='proto3', + serialized_pb=b'\n8google/bigtable/admin/table/v1/bigtable_table_data.proto\x12\x1egoogle.bigtable.admin.table.v1\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\"\xfd\x02\n\x05Table\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x11\x63urrent_operation\x18\x02 \x01(\x0b\x32\x1d.google.longrunning.Operation\x12R\n\x0f\x63olumn_families\x18\x03 \x03(\x0b\x32\x39.google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry\x12O\n\x0bgranularity\x18\x04 \x01(\x0e\x32:.google.bigtable.admin.table.v1.Table.TimestampGranularity\x1a\x63\n\x13\x43olumnFamiliesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.bigtable.admin.table.v1.ColumnFamily:\x02\x38\x01\"\"\n\x14TimestampGranularity\x12\n\n\x06MILLIS\x10\x00\"l\n\x0c\x43olumnFamily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rgc_expression\x18\x02 \x01(\t\x12\x37\n\x07gc_rule\x18\x03 \x01(\x0b\x32&.google.bigtable.admin.table.v1.GcRule\"\xed\x02\n\x06GcRule\x12\x1a\n\x10max_num_versions\x18\x01 \x01(\x05H\x00\x12,\n\x07max_age\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12K\n\x0cintersection\x18\x03 \x01(\x0b\x32\x33.google.bigtable.admin.table.v1.GcRule.IntersectionH\x00\x12=\n\x05union\x18\x04 \x01(\x0b\x32,.google.bigtable.admin.table.v1.GcRule.UnionH\x00\x1a\x45\n\x0cIntersection\x12\x35\n\x05rules\x18\x01 \x03(\x0b\x32&.google.bigtable.admin.table.v1.GcRule\x1a>\n\x05Union\x12\x35\n\x05rules\x18\x01 \x03(\x0b\x32&.google.bigtable.admin.table.v1.GcRuleB\x06\n\x04ruleB>\n\"com.google.bigtable.admin.table.v1B\x16\x42igtableTableDataProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_TABLE_TIMESTAMPGRANULARITY = _descriptor.EnumDescriptor( + name='TimestampGranularity', + full_name='google.bigtable.admin.table.v1.Table.TimestampGranularity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MILLIS', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=509, + serialized_end=543, +) +_sym_db.RegisterEnumDescriptor(_TABLE_TIMESTAMPGRANULARITY) + + +_TABLE_COLUMNFAMILIESENTRY = _descriptor.Descriptor( + name='ColumnFamiliesEntry', + full_name='google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=408, + serialized_end=507, +) + +_TABLE = _descriptor.Descriptor( + name='Table', + full_name='google.bigtable.admin.table.v1.Table', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.Table.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='current_operation', full_name='google.bigtable.admin.table.v1.Table.current_operation', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_families', full_name='google.bigtable.admin.table.v1.Table.column_families', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='granularity', full_name='google.bigtable.admin.table.v1.Table.granularity', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TABLE_COLUMNFAMILIESENTRY, ], + enum_types=[ + _TABLE_TIMESTAMPGRANULARITY, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=162, + serialized_end=543, +) + + +_COLUMNFAMILY = _descriptor.Descriptor( + name='ColumnFamily', + full_name='google.bigtable.admin.table.v1.ColumnFamily', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.ColumnFamily.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gc_expression', full_name='google.bigtable.admin.table.v1.ColumnFamily.gc_expression', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gc_rule', full_name='google.bigtable.admin.table.v1.ColumnFamily.gc_rule', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=545, + serialized_end=653, +) + + +_GCRULE_INTERSECTION = _descriptor.Descriptor( + name='Intersection', + full_name='google.bigtable.admin.table.v1.GcRule.Intersection', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.admin.table.v1.GcRule.Intersection.rules', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=880, + serialized_end=949, +) + +_GCRULE_UNION = _descriptor.Descriptor( + name='Union', + full_name='google.bigtable.admin.table.v1.GcRule.Union', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.admin.table.v1.GcRule.Union.rules', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=951, + serialized_end=1013, +) + +_GCRULE = _descriptor.Descriptor( + name='GcRule', + full_name='google.bigtable.admin.table.v1.GcRule', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='max_num_versions', full_name='google.bigtable.admin.table.v1.GcRule.max_num_versions', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_age', full_name='google.bigtable.admin.table.v1.GcRule.max_age', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='intersection', full_name='google.bigtable.admin.table.v1.GcRule.intersection', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='union', full_name='google.bigtable.admin.table.v1.GcRule.union', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='rule', full_name='google.bigtable.admin.table.v1.GcRule.rule', + index=0, containing_type=None, fields=[]), + ], + serialized_start=656, + serialized_end=1021, +) + +_TABLE_COLUMNFAMILIESENTRY.fields_by_name['value'].message_type = _COLUMNFAMILY +_TABLE_COLUMNFAMILIESENTRY.containing_type = _TABLE +_TABLE.fields_by_name['current_operation'].message_type = google_dot_longrunning_dot_operations__pb2._OPERATION +_TABLE.fields_by_name['column_families'].message_type = _TABLE_COLUMNFAMILIESENTRY +_TABLE.fields_by_name['granularity'].enum_type = _TABLE_TIMESTAMPGRANULARITY +_TABLE_TIMESTAMPGRANULARITY.containing_type = _TABLE +_COLUMNFAMILY.fields_by_name['gc_rule'].message_type = _GCRULE +_GCRULE_INTERSECTION.fields_by_name['rules'].message_type = _GCRULE +_GCRULE_INTERSECTION.containing_type = _GCRULE +_GCRULE_UNION.fields_by_name['rules'].message_type = _GCRULE +_GCRULE_UNION.containing_type = _GCRULE +_GCRULE.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_GCRULE.fields_by_name['intersection'].message_type = _GCRULE_INTERSECTION +_GCRULE.fields_by_name['union'].message_type = _GCRULE_UNION +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['max_num_versions']) +_GCRULE.fields_by_name['max_num_versions'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['max_age']) +_GCRULE.fields_by_name['max_age'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['intersection']) +_GCRULE.fields_by_name['intersection'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['union']) +_GCRULE.fields_by_name['union'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +DESCRIPTOR.message_types_by_name['Table'] = _TABLE +DESCRIPTOR.message_types_by_name['ColumnFamily'] = _COLUMNFAMILY +DESCRIPTOR.message_types_by_name['GcRule'] = _GCRULE + +Table = _reflection.GeneratedProtocolMessageType('Table', (_message.Message,), dict( + + ColumnFamiliesEntry = _reflection.GeneratedProtocolMessageType('ColumnFamiliesEntry', (_message.Message,), dict( + DESCRIPTOR = _TABLE_COLUMNFAMILIESENTRY, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry) + )) + , + DESCRIPTOR = _TABLE, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.Table) + )) +_sym_db.RegisterMessage(Table) +_sym_db.RegisterMessage(Table.ColumnFamiliesEntry) + +ColumnFamily = _reflection.GeneratedProtocolMessageType('ColumnFamily', (_message.Message,), dict( + DESCRIPTOR = _COLUMNFAMILY, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ColumnFamily) + )) +_sym_db.RegisterMessage(ColumnFamily) + +GcRule = _reflection.GeneratedProtocolMessageType('GcRule', (_message.Message,), dict( + + Intersection = _reflection.GeneratedProtocolMessageType('Intersection', (_message.Message,), dict( + DESCRIPTOR = _GCRULE_INTERSECTION, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.GcRule.Intersection) + )) + , + + Union = _reflection.GeneratedProtocolMessageType('Union', (_message.Message,), dict( + DESCRIPTOR = _GCRULE_UNION, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.GcRule.Union) + )) + , + DESCRIPTOR = _GCRULE, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.GcRule) + )) +_sym_db.RegisterMessage(GcRule) +_sym_db.RegisterMessage(GcRule.Intersection) +_sym_db.RegisterMessage(GcRule.Union) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\"com.google.bigtable.admin.table.v1B\026BigtableTableDataProtoP\001') +_TABLE_COLUMNFAMILIESENTRY.has_options = True +_TABLE_COLUMNFAMILIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py new file mode 100644 index 0000000..582dfed --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py @@ -0,0 +1,389 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from gcloud.bigtable._generated import bigtable_table_data_pb2 as google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/table/v1/bigtable_table_service_messages.proto', + package='google.bigtable.admin.table.v1', + syntax='proto3', + serialized_pb=b'\nDgoogle/bigtable/admin/table/v1/bigtable_table_service_messages.proto\x12\x1egoogle.bigtable.admin.table.v1\x1a\x38google/bigtable/admin/table/v1/bigtable_table_data.proto\"\x86\x01\n\x12\x43reateTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08table_id\x18\x02 \x01(\t\x12\x34\n\x05table\x18\x03 \x01(\x0b\x32%.google.bigtable.admin.table.v1.Table\x12\x1a\n\x12initial_split_keys\x18\x04 \x03(\t\"!\n\x11ListTablesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"K\n\x12ListTablesResponse\x12\x35\n\x06tables\x18\x01 \x03(\x0b\x32%.google.bigtable.admin.table.v1.Table\"\x1f\n\x0fGetTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\"\n\x12\x44\x65leteTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"2\n\x12RenameTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06new_id\x18\x02 \x01(\t\"\x88\x01\n\x19\x43reateColumnFamilyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_family_id\x18\x02 \x01(\t\x12\x43\n\rcolumn_family\x18\x03 \x01(\x0b\x32,.google.bigtable.admin.table.v1.ColumnFamily\")\n\x19\x44\x65leteColumnFamilyRequest\x12\x0c\n\x04name\x18\x01 \x01(\tBI\n\"com.google.bigtable.admin.table.v1B!BigtableTableServiceMessagesProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_CREATETABLEREQUEST = _descriptor.Descriptor( + name='CreateTableRequest', + full_name='google.bigtable.admin.table.v1.CreateTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.CreateTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='table_id', full_name='google.bigtable.admin.table.v1.CreateTableRequest.table_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='table', full_name='google.bigtable.admin.table.v1.CreateTableRequest.table', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='initial_split_keys', full_name='google.bigtable.admin.table.v1.CreateTableRequest.initial_split_keys', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=163, + serialized_end=297, +) + + +_LISTTABLESREQUEST = _descriptor.Descriptor( + name='ListTablesRequest', + full_name='google.bigtable.admin.table.v1.ListTablesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.ListTablesRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=299, + serialized_end=332, +) + + +_LISTTABLESRESPONSE = _descriptor.Descriptor( + name='ListTablesResponse', + full_name='google.bigtable.admin.table.v1.ListTablesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tables', full_name='google.bigtable.admin.table.v1.ListTablesResponse.tables', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=334, + serialized_end=409, +) + + +_GETTABLEREQUEST = _descriptor.Descriptor( + name='GetTableRequest', + full_name='google.bigtable.admin.table.v1.GetTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.GetTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=411, + serialized_end=442, +) + + +_DELETETABLEREQUEST = _descriptor.Descriptor( + name='DeleteTableRequest', + full_name='google.bigtable.admin.table.v1.DeleteTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.DeleteTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=444, + serialized_end=478, +) + + +_RENAMETABLEREQUEST = _descriptor.Descriptor( + name='RenameTableRequest', + full_name='google.bigtable.admin.table.v1.RenameTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.RenameTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='new_id', full_name='google.bigtable.admin.table.v1.RenameTableRequest.new_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=480, + serialized_end=530, +) + + +_CREATECOLUMNFAMILYREQUEST = _descriptor.Descriptor( + name='CreateColumnFamilyRequest', + full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_family_id', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.column_family_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_family', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.column_family', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=533, + serialized_end=669, +) + + +_DELETECOLUMNFAMILYREQUEST = _descriptor.Descriptor( + name='DeleteColumnFamilyRequest', + full_name='google.bigtable.admin.table.v1.DeleteColumnFamilyRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.table.v1.DeleteColumnFamilyRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=671, + serialized_end=712, +) + +_CREATETABLEREQUEST.fields_by_name['table'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._TABLE +_LISTTABLESRESPONSE.fields_by_name['tables'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._TABLE +_CREATECOLUMNFAMILYREQUEST.fields_by_name['column_family'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._COLUMNFAMILY +DESCRIPTOR.message_types_by_name['CreateTableRequest'] = _CREATETABLEREQUEST +DESCRIPTOR.message_types_by_name['ListTablesRequest'] = _LISTTABLESREQUEST +DESCRIPTOR.message_types_by_name['ListTablesResponse'] = _LISTTABLESRESPONSE +DESCRIPTOR.message_types_by_name['GetTableRequest'] = _GETTABLEREQUEST +DESCRIPTOR.message_types_by_name['DeleteTableRequest'] = _DELETETABLEREQUEST +DESCRIPTOR.message_types_by_name['RenameTableRequest'] = _RENAMETABLEREQUEST +DESCRIPTOR.message_types_by_name['CreateColumnFamilyRequest'] = _CREATECOLUMNFAMILYREQUEST +DESCRIPTOR.message_types_by_name['DeleteColumnFamilyRequest'] = _DELETECOLUMNFAMILYREQUEST + +CreateTableRequest = _reflection.GeneratedProtocolMessageType('CreateTableRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATETABLEREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.CreateTableRequest) + )) +_sym_db.RegisterMessage(CreateTableRequest) + +ListTablesRequest = _reflection.GeneratedProtocolMessageType('ListTablesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTABLESREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesRequest) + )) +_sym_db.RegisterMessage(ListTablesRequest) + +ListTablesResponse = _reflection.GeneratedProtocolMessageType('ListTablesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTABLESRESPONSE, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesResponse) + )) +_sym_db.RegisterMessage(ListTablesResponse) + +GetTableRequest = _reflection.GeneratedProtocolMessageType('GetTableRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTABLEREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.GetTableRequest) + )) +_sym_db.RegisterMessage(GetTableRequest) + +DeleteTableRequest = _reflection.GeneratedProtocolMessageType('DeleteTableRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETABLEREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.DeleteTableRequest) + )) +_sym_db.RegisterMessage(DeleteTableRequest) + +RenameTableRequest = _reflection.GeneratedProtocolMessageType('RenameTableRequest', (_message.Message,), dict( + DESCRIPTOR = _RENAMETABLEREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.RenameTableRequest) + )) +_sym_db.RegisterMessage(RenameTableRequest) + +CreateColumnFamilyRequest = _reflection.GeneratedProtocolMessageType('CreateColumnFamilyRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATECOLUMNFAMILYREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.CreateColumnFamilyRequest) + )) +_sym_db.RegisterMessage(CreateColumnFamilyRequest) + +DeleteColumnFamilyRequest = _reflection.GeneratedProtocolMessageType('DeleteColumnFamilyRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETECOLUMNFAMILYREQUEST, + __module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.DeleteColumnFamilyRequest) + )) +_sym_db.RegisterMessage(DeleteColumnFamilyRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\"com.google.bigtable.admin.table.v1B!BigtableTableServiceMessagesProtoP\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_pb2.py new file mode 100644 index 0000000..c77a092 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/bigtable_table_service_pb2.py @@ -0,0 +1,203 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/table/v1/bigtable_table_service.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated import bigtable_table_data_pb2 as google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2 +from gcloud.bigtable._generated import bigtable_table_service_messages_pb2 as google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__service__messages__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/table/v1/bigtable_table_service.proto', + package='google.bigtable.admin.table.v1', + syntax='proto3', + serialized_pb=b'\n;google/bigtable/admin/table/v1/bigtable_table_service.proto\x12\x1egoogle.bigtable.admin.table.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x38google/bigtable/admin/table/v1/bigtable_table_data.proto\x1a\x44google/bigtable/admin/table/v1/bigtable_table_service_messages.proto\x1a\x1bgoogle/protobuf/empty.proto2\x89\x0b\n\x14\x42igtableTableService\x12\xa4\x01\n\x0b\x43reateTable\x12\x32.google.bigtable.admin.table.v1.CreateTableRequest\x1a%.google.bigtable.admin.table.v1.Table\":\x82\xd3\xe4\x93\x02\x34\"//v1/{name=projects/*/zones/*/clusters/*}/tables:\x01*\x12\xac\x01\n\nListTables\x12\x31.google.bigtable.admin.table.v1.ListTablesRequest\x1a\x32.google.bigtable.admin.table.v1.ListTablesResponse\"7\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=projects/*/zones/*/clusters/*}/tables\x12\x9d\x01\n\x08GetTable\x12/.google.bigtable.admin.table.v1.GetTableRequest\x1a%.google.bigtable.admin.table.v1.Table\"9\x82\xd3\xe4\x93\x02\x33\x12\x31/v1/{name=projects/*/zones/*/clusters/*/tables/*}\x12\x94\x01\n\x0b\x44\x65leteTable\x12\x32.google.bigtable.admin.table.v1.DeleteTableRequest\x1a\x16.google.protobuf.Empty\"9\x82\xd3\xe4\x93\x02\x33*1/v1/{name=projects/*/zones/*/clusters/*/tables/*}\x12\x9e\x01\n\x0bRenameTable\x12\x32.google.bigtable.admin.table.v1.RenameTableRequest\x1a\x16.google.protobuf.Empty\"C\x82\xd3\xe4\x93\x02=\"8/v1/{name=projects/*/zones/*/clusters/*/tables/*}:rename:\x01*\x12\xca\x01\n\x12\x43reateColumnFamily\x12\x39.google.bigtable.admin.table.v1.CreateColumnFamilyRequest\x1a,.google.bigtable.admin.table.v1.ColumnFamily\"K\x82\xd3\xe4\x93\x02\x45\"@/v1/{name=projects/*/zones/*/clusters/*/tables/*}/columnFamilies:\x01*\x12\xbf\x01\n\x12UpdateColumnFamily\x12,.google.bigtable.admin.table.v1.ColumnFamily\x1a,.google.bigtable.admin.table.v1.ColumnFamily\"M\x82\xd3\xe4\x93\x02G\x1a\x42/v1/{name=projects/*/zones/*/clusters/*/tables/*/columnFamilies/*}:\x01*\x12\xb3\x01\n\x12\x44\x65leteColumnFamily\x12\x39.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44*B/v1/{name=projects/*/zones/*/clusters/*/tables/*/columnFamilies/*}BB\n\"com.google.bigtable.admin.table.v1B\x1a\x42igtableTableServicesProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2.DESCRIPTOR,google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__service__messages__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\"com.google.bigtable.admin.table.v1B\032BigtableTableServicesProtoP\001') +import abc +from grpc.beta import implementations as beta_implementations +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + +class BetaBigtableTableServiceServicer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def CreateTable(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def ListTables(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def GetTable(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def DeleteTable(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def RenameTable(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def CreateColumnFamily(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def UpdateColumnFamily(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def DeleteColumnFamily(self, request, context): + raise NotImplementedError() + +class BetaBigtableTableServiceStub(object): + """The interface to which stubs will conform.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def CreateTable(self, request, timeout): + raise NotImplementedError() + CreateTable.future = None + @abc.abstractmethod + def ListTables(self, request, timeout): + raise NotImplementedError() + ListTables.future = None + @abc.abstractmethod + def GetTable(self, request, timeout): + raise NotImplementedError() + GetTable.future = None + @abc.abstractmethod + def DeleteTable(self, request, timeout): + raise NotImplementedError() + DeleteTable.future = None + @abc.abstractmethod + def RenameTable(self, request, timeout): + raise NotImplementedError() + RenameTable.future = None + @abc.abstractmethod + def CreateColumnFamily(self, request, timeout): + raise NotImplementedError() + CreateColumnFamily.future = None + @abc.abstractmethod + def UpdateColumnFamily(self, request, timeout): + raise NotImplementedError() + UpdateColumnFamily.future = None + @abc.abstractmethod + def DeleteColumnFamily(self, request, timeout): + raise NotImplementedError() + DeleteColumnFamily.future = None + +def beta_create_BigtableTableService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + request_deserializers = { + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateColumnFamily'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.CreateColumnFamilyRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.CreateTableRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteColumnFamily'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.DeleteColumnFamilyRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.DeleteTableRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'GetTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.GetTableRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'ListTables'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.ListTablesRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'RenameTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.RenameTableRequest.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'UpdateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.FromString, + } + response_serializers = { + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateTable'): gcloud.bigtable._generated.bigtable_table_data_pb2.Table.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteColumnFamily'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteTable'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'GetTable'): gcloud.bigtable._generated.bigtable_table_data_pb2.Table.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'ListTables'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.ListTablesResponse.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'RenameTable'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'UpdateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.SerializeToString, + } + method_implementations = { + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateColumnFamily'): face_utilities.unary_unary_inline(servicer.CreateColumnFamily), + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateTable'): face_utilities.unary_unary_inline(servicer.CreateTable), + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteColumnFamily'): face_utilities.unary_unary_inline(servicer.DeleteColumnFamily), + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteTable'): face_utilities.unary_unary_inline(servicer.DeleteTable), + ('google.bigtable.admin.table.v1.BigtableTableService', 'GetTable'): face_utilities.unary_unary_inline(servicer.GetTable), + ('google.bigtable.admin.table.v1.BigtableTableService', 'ListTables'): face_utilities.unary_unary_inline(servicer.ListTables), + ('google.bigtable.admin.table.v1.BigtableTableService', 'RenameTable'): face_utilities.unary_unary_inline(servicer.RenameTable), + ('google.bigtable.admin.table.v1.BigtableTableService', 'UpdateColumnFamily'): face_utilities.unary_unary_inline(servicer.UpdateColumnFamily), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + +def beta_create_BigtableTableService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_data_pb2 + import gcloud.bigtable._generated.bigtable_table_service_messages_pb2 + import google.protobuf.empty_pb2 + request_serializers = { + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateColumnFamily'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.CreateColumnFamilyRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.CreateTableRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteColumnFamily'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.DeleteColumnFamilyRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.DeleteTableRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'GetTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.GetTableRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'ListTables'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.ListTablesRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'RenameTable'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.RenameTableRequest.SerializeToString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'UpdateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.SerializeToString, + } + response_deserializers = { + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'CreateTable'): gcloud.bigtable._generated.bigtable_table_data_pb2.Table.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteColumnFamily'): google.protobuf.empty_pb2.Empty.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'DeleteTable'): google.protobuf.empty_pb2.Empty.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'GetTable'): gcloud.bigtable._generated.bigtable_table_data_pb2.Table.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'ListTables'): gcloud.bigtable._generated.bigtable_table_service_messages_pb2.ListTablesResponse.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'RenameTable'): google.protobuf.empty_pb2.Empty.FromString, + ('google.bigtable.admin.table.v1.BigtableTableService', 'UpdateColumnFamily'): gcloud.bigtable._generated.bigtable_table_data_pb2.ColumnFamily.FromString, + } + cardinalities = { + 'CreateColumnFamily': cardinality.Cardinality.UNARY_UNARY, + 'CreateTable': cardinality.Cardinality.UNARY_UNARY, + 'DeleteColumnFamily': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTable': cardinality.Cardinality.UNARY_UNARY, + 'GetTable': cardinality.Cardinality.UNARY_UNARY, + 'ListTables': cardinality.Cardinality.UNARY_UNARY, + 'RenameTable': cardinality.Cardinality.UNARY_UNARY, + 'UpdateColumnFamily': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.admin.table.v1.BigtableTableService', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated/operations_grpc_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated/operations_grpc_pb2.py new file mode 100644 index 0000000..e4911b3 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated/operations_grpc_pb2.py @@ -0,0 +1,100 @@ +import abc +from grpc.beta import implementations as beta_implementations +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + +class BetaOperationsServicer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def GetOperation(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def ListOperations(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def CancelOperation(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def DeleteOperation(self, request, context): + raise NotImplementedError() + +class BetaOperationsStub(object): + """The interface to which stubs will conform.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def GetOperation(self, request, timeout): + raise NotImplementedError() + GetOperation.future = None + @abc.abstractmethod + def ListOperations(self, request, timeout): + raise NotImplementedError() + ListOperations.future = None + @abc.abstractmethod + def CancelOperation(self, request, timeout): + raise NotImplementedError() + CancelOperation.future = None + @abc.abstractmethod + def DeleteOperation(self, request, timeout): + raise NotImplementedError() + DeleteOperation.future = None + +def beta_create_Operations_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.protobuf.empty_pb2 + import google.longrunning.operations_pb2 + import google.protobuf.empty_pb2 + request_deserializers = { + ('google.longrunning.Operations', 'CancelOperation'): google.longrunning.operations_pb2.CancelOperationRequest.FromString, + ('google.longrunning.Operations', 'DeleteOperation'): google.longrunning.operations_pb2.DeleteOperationRequest.FromString, + ('google.longrunning.Operations', 'GetOperation'): google.longrunning.operations_pb2.GetOperationRequest.FromString, + ('google.longrunning.Operations', 'ListOperations'): google.longrunning.operations_pb2.ListOperationsRequest.FromString, + } + response_serializers = { + ('google.longrunning.Operations', 'CancelOperation'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.longrunning.Operations', 'DeleteOperation'): google.protobuf.empty_pb2.Empty.SerializeToString, + ('google.longrunning.Operations', 'GetOperation'): google.longrunning.operations_pb2.Operation.SerializeToString, + ('google.longrunning.Operations', 'ListOperations'): google.longrunning.operations_pb2.ListOperationsResponse.SerializeToString, + } + method_implementations = { + ('google.longrunning.Operations', 'CancelOperation'): face_utilities.unary_unary_inline(servicer.CancelOperation), + ('google.longrunning.Operations', 'DeleteOperation'): face_utilities.unary_unary_inline(servicer.DeleteOperation), + ('google.longrunning.Operations', 'GetOperation'): face_utilities.unary_unary_inline(servicer.GetOperation), + ('google.longrunning.Operations', 'ListOperations'): face_utilities.unary_unary_inline(servicer.ListOperations), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + +def beta_create_Operations_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.longrunning.operations_pb2 + import google.protobuf.empty_pb2 + import google.longrunning.operations_pb2 + import google.protobuf.empty_pb2 + request_serializers = { + ('google.longrunning.Operations', 'CancelOperation'): google.longrunning.operations_pb2.CancelOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'DeleteOperation'): google.longrunning.operations_pb2.DeleteOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'GetOperation'): google.longrunning.operations_pb2.GetOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'ListOperations'): google.longrunning.operations_pb2.ListOperationsRequest.SerializeToString, + } + response_deserializers = { + ('google.longrunning.Operations', 'CancelOperation'): google.protobuf.empty_pb2.Empty.FromString, + ('google.longrunning.Operations', 'DeleteOperation'): google.protobuf.empty_pb2.Empty.FromString, + ('google.longrunning.Operations', 'GetOperation'): google.longrunning.operations_pb2.Operation.FromString, + ('google.longrunning.Operations', 'ListOperations'): google.longrunning.operations_pb2.ListOperationsResponse.FromString, + } + cardinalities = { + 'CancelOperation': cardinality.Cardinality.UNARY_UNARY, + 'DeleteOperation': cardinality.Cardinality.UNARY_UNARY, + 'GetOperation': cardinality.Cardinality.UNARY_UNARY, + 'ListOperations': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.longrunning.Operations', cardinalities, options=stub_options) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/__init__.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/__init__.py new file mode 100644 index 0000000..ad35adc --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generated protobuf modules for Google Cloud Bigtable API.""" diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable.proto new file mode 100644 index 0000000..49e27ca --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable.proto @@ -0,0 +1,321 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.v2; + +import "google/api/annotations.proto"; +import "google/bigtable/v2/data.proto"; +import "google/protobuf/wrappers.proto"; +import "google/rpc/status.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableProto"; +option java_package = "com.google.bigtable.v2"; + + +// Service for reading from and writing to existing Bigtable tables. +service Bigtable { + // Streams back the contents of all requested rows, optionally + // applying the same Reader filter to each. Depending on their size, + // rows and cells may be broken up across multiple responses, but + // atomicity of each row will still be preserved. See the + // ReadRowsResponse documentation for details. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { post: "/v2/{table_name=projects/*/instances/*/tables/*}:readRows" body: "*" }; + } + + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + rpc SampleRowKeys(SampleRowKeysRequest) returns (stream SampleRowKeysResponse) { + option (google.api.http) = { get: "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" }; + } + + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by `mutation`. + rpc MutateRow(MutateRowRequest) returns (MutateRowResponse) { + option (google.api.http) = { post: "/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow" body: "*" }; + } + + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + rpc MutateRows(MutateRowsRequest) returns (stream MutateRowsResponse) { + option (google.api.http) = { post: "/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows" body: "*" }; + } + + // Mutates a row atomically based on the output of a predicate Reader filter. + rpc CheckAndMutateRow(CheckAndMutateRowRequest) returns (CheckAndMutateRowResponse) { + option (google.api.http) = { post: "/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow" body: "*" }; + } + + // Modifies a row atomically. The method reads the latest existing timestamp + // and value from the specified columns and writes a new entry based on + // pre-defined read/modify/write rules. The new value for the timestamp is the + // greater of the existing timestamp or the current server time. The method + // returns the new contents of all modified cells. + rpc ReadModifyWriteRow(ReadModifyWriteRowRequest) returns (ReadModifyWriteRowResponse) { + option (google.api.http) = { post: "/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow" body: "*" }; + } +} + +// Request message for Bigtable.ReadRows. +message ReadRowsRequest { + // The unique name of the table from which to read. + // Values are of the form + // projects/<project>/instances/<instance>/tables/<table> + string table_name = 1; + + // The row keys and/or ranges to read. If not specified, reads from all rows. + RowSet rows = 2; + + // The filter to apply to the contents of the specified row(s). If unset, + // reads the entirety of each row. + RowFilter filter = 3; + + // The read will terminate after committing to N rows' worth of results. The + // default (zero) is to return all results. + int64 rows_limit = 4; +} + +// Response message for Bigtable.ReadRows. +message ReadRowsResponse { + // Specifies a piece of a row's contents returned as part of the read + // response stream. + message CellChunk { + // The row key for this chunk of data. If the row key is empty, + // this CellChunk is a continuation of the same row as the previous + // CellChunk in the response stream, even if that CellChunk was in a + // previous ReadRowsResponse message. + bytes row_key = 1; + + // The column family name for this chunk of data. If this message + // is not present this CellChunk is a continuation of the same column + // family as the previous CellChunk. The empty string can occur as a + // column family name in a response so clients must check + // explicitly for the presence of this message, not just for + // `family_name.value` being non-empty. + google.protobuf.StringValue family_name = 2; + + // The column qualifier for this chunk of data. If this message + // is not present, this CellChunk is a continuation of the same column + // as the previous CellChunk. Column qualifiers may be empty so + // clients must check for the presence of this message, not just + // for `qualifier.value` being non-empty. + google.protobuf.BytesValue qualifier = 3; + + // The cell's stored timestamp, which also uniquely identifies it + // within its column. Values are always expressed in + // microseconds, but individual tables may set a coarser + // granularity to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will + // only allow values of `timestamp_micros` which are multiples of + // 1000. Timestamps are only set in the first CellChunk per cell + // (for cells split into multiple chunks). + int64 timestamp_micros = 4; + + // Labels applied to the cell by a + // [RowFilter][google.bigtable.v2.RowFilter]. Labels are only set + // on the first CellChunk per cell. + repeated string labels = 5; + + // The value stored in the cell. Cell values can be split across + // multiple CellChunks. In that case only the value field will be + // set in CellChunks after the first: the timestamp and labels + // will only be present in the first CellChunk, even if the first + // CellChunk came in a previous ReadRowsResponse. + bytes value = 6; + + // If this CellChunk is part of a chunked cell value and this is + // not the final chunk of that cell, value_size will be set to the + // total length of the cell value. The client can use this size + // to pre-allocate memory to hold the full cell value. + int32 value_size = 7; + + oneof row_status { + // Indicates that the client should drop all previous chunks for + // `row_key`, as it will be re-read from the beginning. + bool reset_row = 8; + + // Indicates that the client can safely process all previous chunks for + // `row_key`, as its data has been fully read. + bool commit_row = 9; + } + } + + repeated CellChunk chunks = 1; + + // Optionally the server might return the row key of the last row it + // has scanned. The client can use this to construct a more + // efficient retry request if needed: any row keys or portions of + // ranges less than this row key can be dropped from the request. + // This is primarily useful for cases where the server has read a + // lot of data that was filtered out since the last committed row + // key, allowing the client to skip that work on a retry. + bytes last_scanned_row_key = 2; +} + +// Request message for Bigtable.SampleRowKeys. +message SampleRowKeysRequest { + // The unique name of the table from which to sample row keys. + // Values are of the form + // projects/<project>/instances/<instance>/tables/<table> + string table_name = 1; +} + +// Response message for Bigtable.SampleRowKeys. +message SampleRowKeysResponse { + // Sorted streamed sequence of sample row keys in the table. The table might + // have contents before the first row key in the list and after the last one, + // but a key containing the empty string indicates "end of table" and will be + // the last response given, if present. + // Note that row keys in this list may not have ever been written to or read + // from, and users should therefore not make any assumptions about the row key + // structure that are specific to their use case. + bytes row_key = 1; + + // Approximate total storage space used by all rows in the table which precede + // `row_key`. Buffering the contents of all rows between two subsequent + // samples would require space roughly equal to the difference in their + // `offset_bytes` fields. + int64 offset_bytes = 2; +} + +// Request message for Bigtable.MutateRow. +message MutateRowRequest { + // The unique name of the table to which the mutation should be applied. + // Values are of the form + // projects/<project>/instances/<instance>/tables/<table> + string table_name = 1; + + // The key of the row to which the mutation should be applied. + bytes row_key = 2; + + // Changes to be atomically applied to the specified row. Entries are applied + // in order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry and at most 100000. + repeated Mutation mutations = 3; +} + +// Response message for Bigtable.MutateRow. +message MutateRowResponse { + +} + +// Request message for BigtableService.MutateRows. +message MutateRowsRequest { + message Entry { + // The key of the row to which the `mutations` should be applied. + bytes row_key = 1; + + // Changes to be atomically applied to the specified row. Mutations are + // applied in order, meaning that earlier mutations can be masked by + // later ones. + // You must specify at least one mutation. + repeated Mutation mutations = 2; + } + + // The unique name of the table to which the mutations should be applied. + string table_name = 1; + + // The row keys and corresponding mutations to be applied in bulk. + // Each entry is applied as an atomic mutation, but the entries may be + // applied in arbitrary order (even between entries for the same row). + // At least one entry must be specified, and in total the entries can + // contain at most 100000 mutations. + repeated Entry entries = 2; +} + +// Response message for BigtableService.MutateRows. +message MutateRowsResponse { + message Entry { + // The index into the original request's `entries` list of the Entry + // for which a result is being reported. + int64 index = 1; + + // The result of the request Entry identified by `index`. + // Depending on how requests are batched during execution, it is possible + // for one Entry to fail due to an error with another Entry. In the event + // that this occurs, the same error will be reported for both entries. + google.rpc.Status status = 2; + } + + // One or more results for Entries from the batch request. + repeated Entry entries = 1; +} + +// Request message for Bigtable.CheckAndMutateRow. +message CheckAndMutateRowRequest { + // The unique name of the table to which the conditional mutation should be + // applied. + // Values are of the form + // projects/<project>/instances/<instance>/tables/<table> + string table_name = 1; + + // The key of the row to which the conditional mutation should be applied. + bytes row_key = 2; + + // The filter to be applied to the contents of the specified row. Depending + // on whether or not any results are yielded, either `true_mutations` or + // `false_mutations` will be executed. If unset, checks that the row contains + // any values at all. + RowFilter predicate_filter = 6; + + // Changes to be atomically applied to the specified row if `predicate_filter` + // yields at least one cell when applied to `row_key`. Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if `false_mutations` is empty, and at most + // 100000. + repeated Mutation true_mutations = 4; + + // Changes to be atomically applied to the specified row if `predicate_filter` + // does not yield any cells when applied to `row_key`. Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if `true_mutations` is empty, and at most + // 100000. + repeated Mutation false_mutations = 5; +} + +// Response message for Bigtable.CheckAndMutateRow. +message CheckAndMutateRowResponse { + // Whether or not the request's `predicate_filter` yielded any results for + // the specified row. + bool predicate_matched = 1; +} + +// Request message for Bigtable.ReadModifyWriteRow. +message ReadModifyWriteRowRequest { + // The unique name of the table to which the read/modify/write rules should be + // applied. + // Values are of the form + // projects/<project>/instances/<instance>/tables/<table> + string table_name = 1; + + // The key of the row to which the read/modify/write rules should be applied. + bytes row_key = 2; + + // Rules specifying how the specified row's contents are to be transformed + // into writes. Entries are applied in order, meaning that earlier rules will + // affect the results of later ones. + repeated ReadModifyWriteRule rules = 3; +} + +// Response message for Bigtable.ReadModifyWriteRow. +message ReadModifyWriteRowResponse { + // A Row containing the new contents of all cells modified by the request. + Row row = 1; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_instance_admin.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_instance_admin.proto new file mode 100644 index 0000000..bda5d21 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_instance_admin.proto @@ -0,0 +1,232 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.v2; + +import "google/api/annotations.proto"; +import "google/bigtable/admin/v2/instance.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableInstanceAdminProto"; +option java_package = "com.google.bigtable.admin.v2"; + + +// Service for creating, configuring, and deleting Cloud Bigtable Instances and +// Clusters. Provides access to the Instance and Cluster schemas only, not the +// tables metadata or data stored in those tables. +service BigtableInstanceAdmin { + // Create an instance within a project. + rpc CreateInstance(CreateInstanceRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { post: "/v2/{parent=projects/*}/instances" body: "*" }; + } + + // Gets information about an instance. + rpc GetInstance(GetInstanceRequest) returns (Instance) { + option (google.api.http) = { get: "/v2/{name=projects/*/instances/*}" }; + } + + // Lists information about instances in a project. + rpc ListInstances(ListInstancesRequest) returns (ListInstancesResponse) { + option (google.api.http) = { get: "/v2/{parent=projects/*}/instances" }; + } + + // Updates an instance within a project. + rpc UpdateInstance(Instance) returns (Instance) { + option (google.api.http) = { put: "/v2/{name=projects/*/instances/*}" body: "*" }; + } + + // Delete an instance from a project. + rpc DeleteInstance(DeleteInstanceRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v2/{name=projects/*/instances/*}" }; + } + + // Creates a cluster within an instance. + rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { post: "/v2/{parent=projects/*/instances/*}/clusters" body: "cluster" }; + } + + // Gets information about a cluster. + rpc GetCluster(GetClusterRequest) returns (Cluster) { + option (google.api.http) = { get: "/v2/{name=projects/*/instances/*/clusters/*}" }; + } + + // Lists information about clusters in an instance. + rpc ListClusters(ListClustersRequest) returns (ListClustersResponse) { + option (google.api.http) = { get: "/v2/{parent=projects/*/instances/*}/clusters" }; + } + + // Updates a cluster within an instance. + rpc UpdateCluster(Cluster) returns (google.longrunning.Operation) { + option (google.api.http) = { put: "/v2/{name=projects/*/instances/*/clusters/*}" body: "*" }; + } + + // Deletes a cluster from an instance. + rpc DeleteCluster(DeleteClusterRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v2/{name=projects/*/instances/*/clusters/*}" }; + } +} + +// Request message for BigtableInstanceAdmin.CreateInstance. +message CreateInstanceRequest { + // The unique name of the project in which to create the new instance. + // Values are of the form projects/ + string parent = 1; + + // The id to be used when referring to the new instance within its project, + // e.g. just the "myinstance" section of the full name + // "projects/myproject/instances/myinstance" + string instance_id = 2; + + // The instance to create. + // Fields marked "@OutputOnly" must be left blank. + Instance instance = 3; + + // The clusters to be created within the instance, mapped by desired + // cluster ID (e.g. just the "mycluster" part of the full name + // "projects/myproject/instances/myinstance/clusters/mycluster"). + // Fields marked "@OutputOnly" must be left blank. + // Currently exactly one cluster must be specified. + map clusters = 4; +} + +// Request message for BigtableInstanceAdmin.GetInstance. +message GetInstanceRequest { + // The unique name of the requested instance. Values are of the form + // projects//instances/ + string name = 1; +} + +// Request message for BigtableInstanceAdmin.ListInstances. +message ListInstancesRequest { + // The unique name of the project for which a list of instances is requested. + // Values are of the form projects/ + string parent = 1; + + // The value of `next_page_token` returned by a previous call. + string page_token = 2; +} + +// Response message for BigtableInstanceAdmin.ListInstances. +message ListInstancesResponse { + // The list of requested instances. + repeated Instance instances = 1; + + // Locations from which Instance information could not be retrieved, + // due to an outage or some other transient condition. + // Instances whose Clusters are all in one of the failed locations + // may be missing from 'instances', and Instances with at least one + // Cluster in a failed location may only have partial information returned. + repeated string failed_locations = 2; + + // Set if not all instances could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + string next_page_token = 3; +} + +// Request message for BigtableInstanceAdmin.DeleteInstance. +message DeleteInstanceRequest { + // The unique name of the instance to be deleted. + // Values are of the form projects//instances/ + string name = 1; +} + +// Request message for BigtableInstanceAdmin.CreateCluster. +message CreateClusterRequest { + // The unique name of the instance in which to create the new cluster. + // Values are of the form + // projects//instances//clusters/[a-z][-a-z0-9]* + string parent = 1; + + // The id to be used when referring to the new cluster within its instance, + // e.g. just the "mycluster" section of the full name + // "projects/myproject/instances/myinstance/clusters/mycluster" + string cluster_id = 2; + + // The cluster to be created. + // Fields marked "@OutputOnly" must be left blank. + Cluster cluster = 3; +} + +// Request message for BigtableInstanceAdmin.GetCluster. +message GetClusterRequest { + // The unique name of the requested cluster. Values are of the form + // projects//instances//clusters/ + string name = 1; +} + +// Request message for BigtableInstanceAdmin.ListClusters. +message ListClustersRequest { + // The unique name of the instance for which a list of clusters is requested. + // Values are of the form projects//instances/ + // Use = '-' to list Clusters for all Instances in a project, + // for example "projects/myproject/instances/-" + string parent = 1; + + // The value of `next_page_token` returned by a previous call. + string page_token = 2; +} + +// Response message for BigtableInstanceAdmin.ListClusters. +message ListClustersResponse { + // The list of requested clusters. + repeated Cluster clusters = 1; + + // Locations from which Cluster information could not be retrieved, + // due to an outage or some other transient condition. + // Clusters from these locations may be missing from 'clusters', + // or may only have partial information returned. + repeated string failed_locations = 2; + + // Set if not all clusters could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + string next_page_token = 3; +} + +// Request message for BigtableInstanceAdmin.DeleteCluster. +message DeleteClusterRequest { + // The unique name of the cluster to be deleted. Values are of the form + // projects//instances//clusters/ + string name = 1; +} + +// The metadata for the Operation returned by CreateInstance. +message CreateInstanceMetadata { + // The request that prompted the initiation of this CreateInstance operation. + CreateInstanceRequest original_request = 1; + + // The time at which the original request was received. + google.protobuf.Timestamp request_time = 2; + + // The time at which the operation failed or was completed successfully. + google.protobuf.Timestamp finish_time = 3; +} + +// The metadata for the Operation returned by UpdateCluster. +message UpdateClusterMetadata { + // The request that prompted the initiation of this UpdateCluster operation. + Cluster original_request = 1; + + // The time at which the original request was received. + google.protobuf.Timestamp request_time = 2; + + // The time at which the operation failed or was completed successfully. + google.protobuf.Timestamp finish_time = 3; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_table_admin.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_table_admin.proto new file mode 100644 index 0000000..0a39e29 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_bigtable_table_admin.proto @@ -0,0 +1,195 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.v2; + +import "google/api/annotations.proto"; +import "google/bigtable/admin/v2/table.proto"; +import "google/protobuf/empty.proto"; + +option java_multiple_files = true; +option java_outer_classname = "BigtableTableAdminProto"; +option java_package = "com.google.bigtable.admin.v2"; + + +// Service for creating, configuring, and deleting Cloud Bigtable tables. +// Provides access to the table schemas only, not the data stored within +// the tables. +service BigtableTableAdmin { + // Creates a new table in the specified instance. + // The table can be created with a full set of initial column families, + // specified in the request. + rpc CreateTable(CreateTableRequest) returns (Table) { + option (google.api.http) = { post: "/v2/{parent=projects/*/instances/*}/tables" body: "*" }; + } + + // Lists all tables served from a specified instance. + rpc ListTables(ListTablesRequest) returns (ListTablesResponse) { + option (google.api.http) = { get: "/v2/{parent=projects/*/instances/*}/tables" }; + } + + // Gets metadata information about the specified table. + rpc GetTable(GetTableRequest) returns (Table) { + option (google.api.http) = { get: "/v2/{name=projects/*/instances/*/tables/*}" }; + } + + // Permanently deletes a specified table and all of its data. + rpc DeleteTable(DeleteTableRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v2/{name=projects/*/instances/*/tables/*}" }; + } + + // Atomically performs a series of column family modifications + // on the specified table. + rpc ModifyColumnFamilies(ModifyColumnFamiliesRequest) returns (Table) { + option (google.api.http) = { post: "/v2/{name=projects/*/instances/*/tables/*}:modifyColumnFamilies" body: "*" }; + } + + // Permanently drop/delete a row range from a specified table. The request can + // specify whether to delete all rows in a table, or only those that match a + // particular prefix. + rpc DropRowRange(DropRowRangeRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { post: "/v2/{name=projects/*/instances/*/tables/*}:dropRowRange" body: "*" }; + } +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.CreateTable][google.bigtable.admin.v2.BigtableTableAdmin.CreateTable] +message CreateTableRequest { + // An initial split point for a newly created table. + message Split { + // Row key to use as an initial tablet boundary. + bytes key = 1; + } + + // The unique name of the instance in which to create the table. + // Values are of the form projects//instances/ + string parent = 1; + + // The name by which the new table should be referred to within the parent + // instance, e.g. "foobar" rather than "/tables/foobar". + string table_id = 2; + + // The Table to create. + Table table = 3; + + // The optional list of row keys that will be used to initially split the + // table into several tablets (Tablets are similar to HBase regions). + // Given two split keys, "s1" and "s2", three tablets will be created, + // spanning the key ranges: [, s1), [s1, s2), [s2, ). + // + // Example: + // * Row keys := ["a", "apple", "custom", "customer_1", "customer_2", + // "other", "zz"] + // * initial_split_keys := ["apple", "customer_1", "customer_2", "other"] + // * Key assignment: + // - Tablet 1 [, apple) => {"a"}. + // - Tablet 2 [apple, customer_1) => {"apple", "custom"}. + // - Tablet 3 [customer_1, customer_2) => {"customer_1"}. + // - Tablet 4 [customer_2, other) => {"customer_2"}. + // - Tablet 5 [other, ) => {"other", "zz"}. + repeated Split initial_splits = 4; +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.DropRowRange][google.bigtable.admin.v2.BigtableTableAdmin.DropRowRange] +message DropRowRangeRequest { + // The unique name of the table on which to drop a range of rows. + // Values are of the form projects//instances//tables/ + string name = 1; + + oneof target { + // Delete all rows that start with this row key prefix. Prefix cannot be + // zero length. + bytes row_key_prefix = 2; + + // Delete all rows in the table. Setting this to false is a no-op. + bool delete_all_data_from_table = 3; + } +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.ListTables][google.bigtable.admin.v2.BigtableTableAdmin.ListTables] +message ListTablesRequest { + // The unique name of the instance for which tables should be listed. + // Values are of the form projects//instances/ + string parent = 1; + + // The view to be applied to the returned tables' fields. + // Defaults to NAME_ONLY if unspecified (no others are currently supported). + Table.View view = 2; + + // The value of `next_page_token` returned by a previous call. + string page_token = 3; +} + +// Response message for [google.bigtable.admin.v2.BigtableTableAdmin.ListTables][google.bigtable.admin.v2.BigtableTableAdmin.ListTables] +message ListTablesResponse { + // The tables present in the requested cluster. + repeated Table tables = 1; + + // Set if not all tables could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + string next_page_token = 2; +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.GetTable][google.bigtable.admin.v2.BigtableTableAdmin.GetTable] +message GetTableRequest { + // The unique name of the requested table. + // Values are of the form projects//instances//tables/
    + string name = 1; + + // The view to be applied to the returned table's fields. + // Defaults to SCHEMA_ONLY if unspecified. + Table.View view = 2; +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.DeleteTable][google.bigtable.admin.v2.BigtableTableAdmin.DeleteTable] +message DeleteTableRequest { + // The unique name of the table to be deleted. + // Values are of the form projects//instances//tables/
    + string name = 1; +} + +// Request message for [google.bigtable.admin.v2.BigtableTableAdmin.ModifyColumnFamilies][google.bigtable.admin.v2.BigtableTableAdmin.ModifyColumnFamilies] +message ModifyColumnFamiliesRequest { + // A create, update, or delete of a particular column family. + message Modification { + // The ID of the column family to be modified. + string id = 1; + + oneof mod { + // Create a new column family with the specified schema, or fail if + // one already exists with the given ID. + ColumnFamily create = 2; + + // Update an existing column family to the specified schema, or fail + // if no column family exists with the given ID. + ColumnFamily update = 3; + + // Drop (delete) the column family with the given ID, or fail if no such + // family exists. + bool drop = 4; + } + } + + // The unique name of the table whose families should be modified. + // Values are of the form projects//instances//tables/
    + string name = 1; + + // Modifications to be atomically applied to the specified table's families. + // Entries are applied in order, meaning that earlier modifications can be + // masked by later ones (in the case of repeated updates to the same family, + // for example). + repeated Modification modifications = 2; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_common.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_common.proto new file mode 100644 index 0000000..1912e03 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_common.proto @@ -0,0 +1,37 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.v2; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; + +option java_multiple_files = true; +option java_outer_classname = "CommonProto"; +option java_package = "com.google.bigtable.admin.v2"; + + +// Storage media types for persisting Bigtable data. +enum StorageType { + // The user did not specify a storage type. + STORAGE_TYPE_UNSPECIFIED = 0; + + // Flash (SSD) storage should be used. + SSD = 1; + + // Magnetic drive (HDD) storage should be used. + HDD = 2; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_data.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_data.proto new file mode 100644 index 0000000..720f482 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_data.proto @@ -0,0 +1,532 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.v2; + +option java_multiple_files = true; +option java_outer_classname = "DataProto"; +option java_package = "com.google.bigtable.v2"; + + +// Specifies the complete (requested) contents of a single row of a table. +// Rows which exceed 256MiB in size cannot be read in full. +message Row { + // The unique key which identifies this row within its table. This is the same + // key that's used to identify the row in, for example, a MutateRowRequest. + // May contain any non-empty byte string up to 4KiB in length. + bytes key = 1; + + // May be empty, but only if the entire row is empty. + // The mutual ordering of column families is not specified. + repeated Family families = 2; +} + +// Specifies (some of) the contents of a single row/column family intersection +// of a table. +message Family { + // The unique key which identifies this family within its row. This is the + // same key that's used to identify the family in, for example, a RowFilter + // which sets its "family_name_regex_filter" field. + // Must match `[-_.a-zA-Z0-9]+`, except that AggregatingRowProcessors may + // produce cells in a sentinel family with an empty name. + // Must be no greater than 64 characters in length. + string name = 1; + + // Must not be empty. Sorted in order of increasing "qualifier". + repeated Column columns = 2; +} + +// Specifies (some of) the contents of a single row/column intersection of a +// table. +message Column { + // The unique key which identifies this column within its family. This is the + // same key that's used to identify the column in, for example, a RowFilter + // which sets its `column_qualifier_regex_filter` field. + // May contain any byte string, including the empty string, up to 16kiB in + // length. + bytes qualifier = 1; + + // Must not be empty. Sorted in order of decreasing "timestamp_micros". + repeated Cell cells = 2; +} + +// Specifies (some of) the contents of a single row/column/timestamp of a table. +message Cell { + // The cell's stored timestamp, which also uniquely identifies it within + // its column. + // Values are always expressed in microseconds, but individual tables may set + // a coarser granularity to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will only allow + // values of `timestamp_micros` which are multiples of 1000. + int64 timestamp_micros = 1; + + // The value stored in the cell. + // May contain any byte string, including the empty string, up to 100MiB in + // length. + bytes value = 2; + + // Labels applied to the cell by a [RowFilter][google.bigtable.v2.RowFilter]. + repeated string labels = 3; +} + +// Specifies a contiguous range of rows. +message RowRange { + // The row key at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + oneof start_key { + // Used when giving an inclusive lower bound for the range. + bytes start_key_closed = 1; + + // Used when giving an exclusive lower bound for the range. + bytes start_key_open = 2; + } + + // The row key at which to end the range. + // If neither field is set, interpreted as the infinite row key, exclusive. + oneof end_key { + // Used when giving an inclusive upper bound for the range. + bytes end_key_open = 3; + + // Used when giving an exclusive upper bound for the range. + bytes end_key_closed = 4; + } +} + +// Specifies a non-contiguous set of rows. +message RowSet { + // Single rows included in the set. + repeated bytes row_keys = 1; + + // Contiguous row ranges included in the set. + repeated RowRange row_ranges = 2; +} + +// Specifies a contiguous range of columns within a single column family. +// The range spans from <column_family>:<start_qualifier> to +// <column_family>:<end_qualifier>, where both bounds can be either +// inclusive or exclusive. +message ColumnRange { + // The name of the column family within which this range falls. + string family_name = 1; + + // The column qualifier at which to start the range (within `column_family`). + // If neither field is set, interpreted as the empty string, inclusive. + oneof start_qualifier { + // Used when giving an inclusive lower bound for the range. + bytes start_qualifier_closed = 2; + + // Used when giving an exclusive lower bound for the range. + bytes start_qualifier_open = 3; + } + + // The column qualifier at which to end the range (within `column_family`). + // If neither field is set, interpreted as the infinite string, exclusive. + oneof end_qualifier { + // Used when giving an inclusive upper bound for the range. + bytes end_qualifier_closed = 4; + + // Used when giving an exclusive upper bound for the range. + bytes end_qualifier_open = 5; + } +} + +// Specified a contiguous range of microsecond timestamps. +message TimestampRange { + // Inclusive lower bound. If left empty, interpreted as 0. + int64 start_timestamp_micros = 1; + + // Exclusive upper bound. If left empty, interpreted as infinity. + int64 end_timestamp_micros = 2; +} + +// Specifies a contiguous range of raw byte values. +message ValueRange { + // The value at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + oneof start_value { + // Used when giving an inclusive lower bound for the range. + bytes start_value_closed = 1; + + // Used when giving an exclusive lower bound for the range. + bytes start_value_open = 2; + } + + // The value at which to end the range. + // If neither field is set, interpreted as the infinite string, exclusive. + oneof end_value { + // Used when giving an inclusive upper bound for the range. + bytes end_value_closed = 3; + + // Used when giving an exclusive upper bound for the range. + bytes end_value_open = 4; + } +} + +// Takes a row as input and produces an alternate view of the row based on +// specified rules. For example, a RowFilter might trim down a row to include +// just the cells from columns matching a given regular expression, or might +// return all the cells of a row but not their values. More complicated filters +// can be composed out of these components to express requests such as, "within +// every column of a particular family, give just the two most recent cells +// which are older than timestamp X." +// +// There are two broad categories of RowFilters (true filters and transformers), +// as well as two ways to compose simple filters into more complex ones +// (chains and interleaves). They work as follows: +// +// * True filters alter the input row by excluding some of its cells wholesale +// from the output row. An example of a true filter is the `value_regex_filter`, +// which excludes cells whose values don't match the specified pattern. All +// regex true filters use RE2 syntax (https://github.com/google/re2/wiki/Syntax) +// in raw byte mode (RE2::Latin1), and are evaluated as full matches. An +// important point to keep in mind is that `RE2(.)` is equivalent by default to +// `RE2([^\n])`, meaning that it does not match newlines. When attempting to +// match an arbitrary byte, you should therefore use the escape sequence `\C`, +// which may need to be further escaped as `\\C` in your client language. +// +// * Transformers alter the input row by changing the values of some of its +// cells in the output, without excluding them completely. Currently, the only +// supported transformer is the `strip_value_transformer`, which replaces every +// cell's value with the empty string. +// +// * Chains and interleaves are described in more detail in the +// RowFilter.Chain and RowFilter.Interleave documentation. +// +// The total serialized size of a RowFilter message must not +// exceed 4096 bytes, and RowFilters may not be nested within each other +// (in Chains or Interleaves) to a depth of more than 20. +message RowFilter { + // A RowFilter which sends rows through several RowFilters in sequence. + message Chain { + // The elements of "filters" are chained together to process the input row: + // in row -> f(0) -> intermediate row -> f(1) -> ... -> f(N) -> out row + // The full chain is executed atomically. + repeated RowFilter filters = 1; + } + + // A RowFilter which sends each row to each of several component + // RowFilters and interleaves the results. + message Interleave { + // The elements of "filters" all process a copy of the input row, and the + // results are pooled, sorted, and combined into a single output row. + // If multiple cells are produced with the same column and timestamp, + // they will all appear in the output row in an unspecified mutual order. + // Consider the following example, with three filters: + // + // input row + // | + // ----------------------------------------------------- + // | | | + // f(0) f(1) f(2) + // | | | + // 1: foo,bar,10,x foo,bar,10,z far,bar,7,a + // 2: foo,blah,11,z far,blah,5,x far,blah,5,x + // | | | + // ----------------------------------------------------- + // | + // 1: foo,bar,10,z // could have switched with #2 + // 2: foo,bar,10,x // could have switched with #1 + // 3: foo,blah,11,z + // 4: far,bar,7,a + // 5: far,blah,5,x // identical to #6 + // 6: far,blah,5,x // identical to #5 + // + // All interleaved filters are executed atomically. + repeated RowFilter filters = 1; + } + + // A RowFilter which evaluates one of two possible RowFilters, depending on + // whether or not a predicate RowFilter outputs any cells from the input row. + // + // IMPORTANT NOTE: The predicate filter does not execute atomically with the + // true and false filters, which may lead to inconsistent or unexpected + // results. Additionally, Condition filters have poor performance, especially + // when filters are set for the false condition. + message Condition { + // If `predicate_filter` outputs any cells, then `true_filter` will be + // evaluated on the input row. Otherwise, `false_filter` will be evaluated. + RowFilter predicate_filter = 1; + + // The filter to apply to the input row if `predicate_filter` returns any + // results. If not provided, no results will be returned in the true case. + RowFilter true_filter = 2; + + // The filter to apply to the input row if `predicate_filter` does not + // return any results. If not provided, no results will be returned in the + // false case. + RowFilter false_filter = 3; + } + + // Which of the possible RowFilter types to apply. If none are set, this + // RowFilter returns all cells in the input row. + oneof filter { + // Applies several RowFilters to the data in sequence, progressively + // narrowing the results. + Chain chain = 1; + + // Applies several RowFilters to the data in parallel and combines the + // results. + Interleave interleave = 2; + + // Applies one of two possible RowFilters to the data based on the output of + // a predicate RowFilter. + Condition condition = 3; + + // ADVANCED USE ONLY. + // Hook for introspection into the RowFilter. Outputs all cells directly to + // the output of the read rather than to any parent filter. Consider the + // following example: + // + // Chain( + // FamilyRegex("A"), + // Interleave( + // All(), + // Chain(Label("foo"), Sink()) + // ), + // QualifierRegex("B") + // ) + // + // A,A,1,w + // A,B,2,x + // B,B,4,z + // | + // FamilyRegex("A") + // | + // A,A,1,w + // A,B,2,x + // | + // +------------+-------------+ + // | | + // All() Label(foo) + // | | + // A,A,1,w A,A,1,w,labels:[foo] + // A,B,2,x A,B,2,x,labels:[foo] + // | | + // | Sink() --------------+ + // | | | + // +------------+ x------+ A,A,1,w,labels:[foo] + // | A,B,2,x,labels:[foo] + // A,A,1,w | + // A,B,2,x | + // | | + // QualifierRegex("B") | + // | | + // A,B,2,x | + // | | + // +--------------------------------+ + // | + // A,A,1,w,labels:[foo] + // A,B,2,x,labels:[foo] // could be switched + // A,B,2,x // could be switched + // + // Despite being excluded by the qualifier filter, a copy of every cell + // that reaches the sink is present in the final result. + // + // As with an [Interleave][google.bigtable.v2.RowFilter.Interleave], + // duplicate cells are possible, and appear in an unspecified mutual order. + // In this case we have a duplicate with column "A:B" and timestamp 2, + // because one copy passed through the all filter while the other was + // passed through the label and sink. Note that one copy has label "foo", + // while the other does not. + // + // Cannot be used within the `predicate_filter`, `true_filter`, or + // `false_filter` of a [Condition][google.bigtable.v2.RowFilter.Condition]. + bool sink = 16; + + // Matches all cells, regardless of input. Functionally equivalent to + // leaving `filter` unset, but included for completeness. + bool pass_all_filter = 17; + + // Does not match any cells, regardless of input. Useful for temporarily + // disabling just part of a filter. + bool block_all_filter = 18; + + // Matches only cells from rows whose keys satisfy the given RE2 regex. In + // other words, passes through the entire row when the key matches, and + // otherwise produces an empty row. + // Note that, since row keys can contain arbitrary bytes, the `\C` escape + // sequence must be used if a true wildcard is desired. The `.` character + // will not match the new line character `\n`, which may be present in a + // binary key. + bytes row_key_regex_filter = 4; + + // Matches all cells from a row with probability p, and matches no cells + // from the row with probability 1-p. + double row_sample_filter = 14; + + // Matches only cells from columns whose families satisfy the given RE2 + // regex. For technical reasons, the regex must not contain the `:` + // character, even if it is not being used as a literal. + // Note that, since column families cannot contain the new line character + // `\n`, it is sufficient to use `.` as a full wildcard when matching + // column family names. + string family_name_regex_filter = 5; + + // Matches only cells from columns whose qualifiers satisfy the given RE2 + // regex. + // Note that, since column qualifiers can contain arbitrary bytes, the `\C` + // escape sequence must be used if a true wildcard is desired. The `.` + // character will not match the new line character `\n`, which may be + // present in a binary qualifier. + bytes column_qualifier_regex_filter = 6; + + // Matches only cells from columns within the given range. + ColumnRange column_range_filter = 7; + + // Matches only cells with timestamps within the given range. + TimestampRange timestamp_range_filter = 8; + + // Matches only cells with values that satisfy the given regular expression. + // Note that, since cell values can contain arbitrary bytes, the `\C` escape + // sequence must be used if a true wildcard is desired. The `.` character + // will not match the new line character `\n`, which may be present in a + // binary value. + bytes value_regex_filter = 9; + + // Matches only cells with values that fall within the given range. + ValueRange value_range_filter = 15; + + // Skips the first N cells of each row, matching all subsequent cells. + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_row_offset_filter = 10; + + // Matches only the first N cells of each row. + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_row_limit_filter = 11; + + // Matches only the most recent N cells within each column. For example, + // if N=2, this filter would match column `foo:bar` at timestamps 10 and 9, + // skip all earlier cells in `foo:bar`, and then begin matching again in + // column `foo:bar2`. + // If duplicate cells are present, as is possible when using an Interleave, + // each copy of the cell is counted separately. + int32 cells_per_column_limit_filter = 12; + + // Replaces each cell's value with the empty string. + bool strip_value_transformer = 13; + + // Applies the given label to all cells in the output row. This allows + // the client to determine which results were produced from which part of + // the filter. + // + // Values must be at most 15 characters in length, and match the RE2 + // pattern `[a-z0-9\\-]+` + // + // Due to a technical limitation, it is not currently possible to apply + // multiple labels to a cell. As a result, a Chain may have no more than + // one sub-filter which contains a `apply_label_transformer`. It is okay for + // an Interleave to contain multiple `apply_label_transformers`, as they + // will be applied to separate copies of the input. This may be relaxed in + // the future. + string apply_label_transformer = 19; + } +} + +// Specifies a particular change to be made to the contents of a row. +message Mutation { + // A Mutation which sets the value of the specified cell. + message SetCell { + // The name of the family into which new data should be written. + // Must match `[-_.a-zA-Z0-9]+` + string family_name = 1; + + // The qualifier of the column into which new data should be written. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The timestamp of the cell into which new data should be written. + // Use -1 for current Bigtable server time. + // Otherwise, the client should set this value itself, noting that the + // default value is a timestamp of zero if the field is left unspecified. + // Values must match the granularity of the table (e.g. micros, millis). + int64 timestamp_micros = 3; + + // The value to be written into the specified cell. + bytes value = 4; + } + + // A Mutation which deletes cells from the specified column, optionally + // restricting the deletions to a given timestamp range. + message DeleteFromColumn { + // The name of the family from which cells should be deleted. + // Must match `[-_.a-zA-Z0-9]+` + string family_name = 1; + + // The qualifier of the column from which cells should be deleted. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The range of timestamps within which cells should be deleted. + TimestampRange time_range = 3; + } + + // A Mutation which deletes all cells from the specified column family. + message DeleteFromFamily { + // The name of the family from which cells should be deleted. + // Must match `[-_.a-zA-Z0-9]+` + string family_name = 1; + } + + // A Mutation which deletes all cells from the containing row. + message DeleteFromRow { + + } + + // Which of the possible Mutation types to apply. + oneof mutation { + // Set a cell's value. + SetCell set_cell = 1; + + // Deletes cells from a column. + DeleteFromColumn delete_from_column = 2; + + // Deletes cells from a column family. + DeleteFromFamily delete_from_family = 3; + + // Deletes cells from the entire row. + DeleteFromRow delete_from_row = 4; + } +} + +// Specifies an atomic read/modify/write operation on the latest value of the +// specified column. +message ReadModifyWriteRule { + // The name of the family to which the read/modify/write should be applied. + // Must match `[-_.a-zA-Z0-9]+` + string family_name = 1; + + // The qualifier of the column to which the read/modify/write should be + // applied. + // Can be any byte string, including the empty string. + bytes column_qualifier = 2; + + // The rule used to determine the column's new latest value from its current + // latest value. + oneof rule { + // Rule specifying that `append_value` be appended to the existing value. + // If the targeted cell is unset, it will be treated as containing the + // empty string. + bytes append_value = 3; + + // Rule specifying that `increment_amount` be added to the existing value. + // If the targeted cell is unset, it will be treated as containing a zero. + // Otherwise, the targeted cell must contain an 8-byte value (interpreted + // as a 64-bit big-endian signed integer), or the entire request will fail. + int64 increment_amount = 4; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_instance.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_instance.proto new file mode 100644 index 0000000..4aa3f9d --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_instance.proto @@ -0,0 +1,113 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.v2; + +import "google/api/annotations.proto"; +import "google/bigtable/admin/v2/common.proto"; + +option java_multiple_files = true; +option java_outer_classname = "InstanceProto"; +option java_package = "com.google.bigtable.admin.v2"; + + +// A collection of Bigtable [Tables][google.bigtable.admin.v2.Table] and +// the resources that serve them. +// All tables in an instance are served from a single +// [Cluster][google.bigtable.admin.v2.Cluster]. +message Instance { + // Possible states of an instance. + enum State { + // The state of the instance could not be determined. + STATE_NOT_KNOWN = 0; + + // The instance has been successfully created and can serve requests + // to its tables. + READY = 1; + + // The instance is currently being created, and may be destroyed + // if the creation process encounters an error. + CREATING = 2; + } + + // @OutputOnly + // The unique name of the instance. Values are of the form + // projects//instances/[a-z][a-z0-9\\-]+[a-z0-9] + string name = 1; + + // The descriptive name for this instance as it appears in UIs. + // Can be changed at any time, but should be kept globally unique + // to avoid confusion. + string display_name = 2; + + // + // The current state of the instance. + State state = 3; +} + +// A resizable group of nodes in a particular cloud location, capable +// of serving all [Tables][google.bigtable.admin.v2.Table] in the parent +// [Instance][google.bigtable.admin.v2.Instance]. +message Cluster { + // Possible states of a cluster. + enum State { + // The state of the cluster could not be determined. + STATE_NOT_KNOWN = 0; + + // The cluster has been successfully created and is ready to serve requests. + READY = 1; + + // The cluster is currently being created, and may be destroyed + // if the creation process encounters an error. + // A cluster may not be able to serve requests while being created. + CREATING = 2; + + // The cluster is currently being resized, and may revert to its previous + // node count if the process encounters an error. + // A cluster is still capable of serving requests while being resized, + // but may exhibit performance as if its number of allocated nodes is + // between the starting and requested states. + RESIZING = 3; + + // The cluster has no backing nodes. The data (tables) still + // exist, but no operations can be performed on the cluster. + DISABLED = 4; + } + + // @OutputOnly + // The unique name of the cluster. Values are of the form + // projects//instances//clusters/[a-z][-a-z0-9]* + string name = 1; + + // @CreationOnly + // The location where this cluster's nodes and storage reside. For best + // performance, clients should be located as close as possible to this cluster. + // Currently only zones are supported, e.g. projects/*/locations/us-central1-b + string location = 2; + + // @OutputOnly + // The current state of the cluster. + State state = 3; + + // The number of nodes allocated to this cluster. More nodes enable higher + // throughput and more consistent performance. + int32 serve_nodes = 4; + + // @CreationOnly + // The type of storage used by this cluster to serve its + // parent instance's tables, unless explicitly overridden. + StorageType default_storage_type = 5; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_operations.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_operations.proto new file mode 100644 index 0000000..a358d0a --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_operations.proto @@ -0,0 +1,144 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.longrunning; + +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/empty.proto"; +import "google/rpc/status.proto"; + +option java_multiple_files = true; +option java_outer_classname = "OperationsProto"; +option java_package = "com.google.longrunning"; + + +// Manages long-running operations with an API service. +// +// When an API method normally takes long time to complete, it can be designed +// to return [Operation][google.longrunning.Operation] to the client, and the client can use this +// interface to receive the real response asynchronously by polling the +// operation resource, or using `google.watcher.v1.Watcher` interface to watch +// the response, or pass the operation resource to another API (such as Google +// Cloud Pub/Sub API) to receive the response. Any API service that returns +// long-running operations should implement the `Operations` interface so +// developers can have a consistent client experience. +service Operations { + // Gets the latest state of a long-running operation. Clients may use this + // method to poll the operation result at intervals as recommended by the API + // service. + rpc GetOperation(GetOperationRequest) returns (Operation) { + option (google.api.http) = { get: "/v1/{name=operations/**}" }; + } + + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) { + option (google.api.http) = { get: "/v1/{name=operations}" }; + } + + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients may use + // [Operations.GetOperation] or other methods to check whether the + // cancellation succeeded or the operation completed despite cancellation. + rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { post: "/v1/{name=operations/**}:cancel" body: "*" }; + } + + // Deletes a long-running operation. It indicates the client is no longer + // interested in the operation result. It does not cancel the operation. + rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { delete: "/v1/{name=operations/**}" }; + } +} + +// This resource represents a long-running operation that is the result of a +// network API call. +message Operation { + // The name of the operation resource, which is only unique within the same + // service that originally returns it. + string name = 1; + + // Some service-specific metadata associated with the operation. It typically + // contains progress information and common metadata such as create time. + // Some services may not provide such metadata. Any method that returns a + // long-running operation should document the metadata type, if any. + google.protobuf.Any metadata = 2; + + // If the value is false, it means the operation is still in progress. + // If true, the operation is completed and the `result` is available. + bool done = 3; + + oneof result { + // The error result of the operation in case of failure. + google.rpc.Status error = 4; + + // The normal response of the operation in case of success. If the original + // method returns no data on success, such as `Delete`, the response will be + // `google.protobuf.Empty`. If the original method is standard + // `Get`/`Create`/`Update`, the response should be the resource. For other + // methods, the response should have the type `XxxResponse`, where `Xxx` + // is the original method name. For example, if the original method name + // is `TakeSnapshot()`, the inferred response type will be + // `TakeSnapshotResponse`. + google.protobuf.Any response = 5; + } +} + +// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation]. +message GetOperationRequest { + // The name of the operation resource. + string name = 1; +} + +// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsRequest { + // The name of the operation collection. + string name = 4; + + // The standard List filter. + string filter = 1; + + // The standard List page size. + int32 page_size = 2; + + // The standard List page token. + string page_token = 3; +} + +// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsResponse { + // A list of operations that match the specified filter in the request. + repeated Operation operations = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation]. +message CancelOperationRequest { + // The name of the operation resource to be cancelled. + string name = 1; +} + +// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation]. +message DeleteOperationRequest { + // The name of the operation resource to be deleted. + string name = 1; +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_table.proto b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_table.proto new file mode 100644 index 0000000..63e4110 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/_table.proto @@ -0,0 +1,115 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bigtable.admin.v2; + +import "google/api/annotations.proto"; +import "google/protobuf/duration.proto"; + +option java_multiple_files = true; +option java_outer_classname = "TableProto"; +option java_package = "com.google.bigtable.admin.v2"; + + +// A collection of user data indexed by row, column, and timestamp. +// Each table is served using the resources of its parent cluster. +message Table { + // Possible timestamp granularities to use when keeping multiple versions + // of data in a table. + enum TimestampGranularity { + // The user did not specify a granularity. Should not be returned. + // When specified during table creation, MILLIS will be used. + TIMESTAMP_GRANULARITY_UNSPECIFIED = 0; + + // The table keeps data versioned at a granularity of 1ms. + MILLIS = 1; + } + + // Defines a view over a table's fields. + enum View { + // Uses the default view for each method as documented in its request. + VIEW_UNSPECIFIED = 0; + + // Only populates `name`. + NAME_ONLY = 1; + + // Only populates `name` and fields related to the table's schema. + SCHEMA_VIEW = 2; + + // Populates all fields. + FULL = 4; + } + + // The unique name of the table. Values are of the form + // projects//instances//tables/[_a-zA-Z0-9][-_.a-zA-Z0-9]* + // Views: NAME_ONLY, SCHEMA_VIEW, REPLICATION_VIEW, FULL + // @OutputOnly + string name = 1; + + // The column families configured for this table, mapped by column family ID. + // Views: SCHEMA_VIEW, FULL + // @CreationOnly + map column_families = 3; + + // The granularity (e.g. MILLIS, MICROS) at which timestamps are stored in + // this table. Timestamps not matching the granularity will be rejected. + // If unspecified at creation time, the value will be set to MILLIS. + // Views: SCHEMA_VIEW, FULL + // @CreationOnly + TimestampGranularity granularity = 4; +} + +// A set of columns within a table which share a common configuration. +message ColumnFamily { + // Garbage collection rule specified as a protobuf. + // Must serialize to at most 500 bytes. + // + // NOTE: Garbage collection executes opportunistically in the background, and + // so it's possible for reads to return a cell even if it matches the active + // GC expression for its family. + GcRule gc_rule = 1; +} + +// Rule for determining which cells to delete during garbage collection. +message GcRule { + // A GcRule which deletes cells matching all of the given rules. + message Intersection { + // Only delete cells which would be deleted by every element of `rules`. + repeated GcRule rules = 1; + } + + // A GcRule which deletes cells matching any of the given rules. + message Union { + // Delete cells which would be deleted by any element of `rules`. + repeated GcRule rules = 1; + } + + oneof rule { + // Delete all cells in a column except the most recent N. + int32 max_num_versions = 1; + + // Delete cells in a column older than the given age. + // Values must be at least one millisecond, and will be truncated to + // microsecond granularity. + google.protobuf.Duration max_age = 2; + + // Delete cells that would be deleted by every nested rule. + Intersection intersection = 3; + + // Delete cells that would be deleted by any nested rule. + Union union = 4; + } +} diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_instance_admin_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_instance_admin_pb2.py new file mode 100644 index 0000000..9da2364 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_instance_admin_pb2.py @@ -0,0 +1,1061 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/v2/bigtable_instance_admin.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated_v2 import instance_pb2 as google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/v2/bigtable_instance_admin.proto', + package='google.bigtable.admin.v2', + syntax='proto3', + serialized_pb=_b('\n6google/bigtable/admin/v2/bigtable_instance_admin.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a\'google/bigtable/admin/v2/instance.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x97\x02\n\x15\x43reateInstanceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0binstance_id\x18\x02 \x01(\t\x12\x34\n\x08instance\x18\x03 \x01(\x0b\x32\".google.bigtable.admin.v2.Instance\x12O\n\x08\x63lusters\x18\x04 \x03(\x0b\x32=.google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry\x1aR\n\rClustersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.google.bigtable.admin.v2.Cluster:\x02\x38\x01\"\"\n\x12GetInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\x14ListInstancesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\"\x81\x01\n\x15ListInstancesResponse\x12\x35\n\tinstances\x18\x01 \x03(\x0b\x32\".google.bigtable.admin.v2.Instance\x12\x18\n\x10\x66\x61iled_locations\x18\x02 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t\"%\n\x15\x44\x65leteInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"n\n\x14\x43reateClusterRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\ncluster_id\x18\x02 \x01(\t\x12\x32\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.bigtable.admin.v2.Cluster\"!\n\x11GetClusterRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"9\n\x13ListClustersRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\"~\n\x14ListClustersResponse\x12\x33\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.bigtable.admin.v2.Cluster\x12\x18\n\x10\x66\x61iled_locations\x18\x02 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t\"$\n\x14\x44\x65leteClusterRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xc6\x01\n\x16\x43reateInstanceMetadata\x12I\n\x10original_request\x18\x01 \x01(\x0b\x32/.google.bigtable.admin.v2.CreateInstanceRequest\x12\x30\n\x0crequest_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x66inish_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xb7\x01\n\x15UpdateClusterMetadata\x12;\n\x10original_request\x18\x01 \x01(\x0b\x32!.google.bigtable.admin.v2.Cluster\x12\x30\n\x0crequest_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x66inish_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\xdb\x0b\n\x15\x42igtableInstanceAdmin\x12\x8e\x01\n\x0e\x43reateInstance\x12/.google.bigtable.admin.v2.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation\",\x82\xd3\xe4\x93\x02&\"!/v2/{parent=projects/*}/instances:\x01*\x12\x8a\x01\n\x0bGetInstance\x12,.google.bigtable.admin.v2.GetInstanceRequest\x1a\".google.bigtable.admin.v2.Instance\")\x82\xd3\xe4\x93\x02#\x12!/v2/{name=projects/*/instances/*}\x12\x9b\x01\n\rListInstances\x12..google.bigtable.admin.v2.ListInstancesRequest\x1a/.google.bigtable.admin.v2.ListInstancesResponse\")\x82\xd3\xe4\x93\x02#\x12!/v2/{parent=projects/*}/instances\x12\x86\x01\n\x0eUpdateInstance\x12\".google.bigtable.admin.v2.Instance\x1a\".google.bigtable.admin.v2.Instance\",\x82\xd3\xe4\x93\x02&\x1a!/v2/{name=projects/*/instances/*}:\x01*\x12\x84\x01\n\x0e\x44\x65leteInstance\x12/.google.bigtable.admin.v2.DeleteInstanceRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#*!/v2/{name=projects/*/instances/*}\x12\x9d\x01\n\rCreateCluster\x12..google.bigtable.admin.v2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation\"=\x82\xd3\xe4\x93\x02\x37\",/v2/{parent=projects/*/instances/*}/clusters:\x07\x63luster\x12\x92\x01\n\nGetCluster\x12+.google.bigtable.admin.v2.GetClusterRequest\x1a!.google.bigtable.admin.v2.Cluster\"4\x82\xd3\xe4\x93\x02.\x12,/v2/{name=projects/*/instances/*/clusters/*}\x12\xa3\x01\n\x0cListClusters\x12-.google.bigtable.admin.v2.ListClustersRequest\x1a..google.bigtable.admin.v2.ListClustersResponse\"4\x82\xd3\xe4\x93\x02.\x12,/v2/{parent=projects/*/instances/*}/clusters\x12\x8a\x01\n\rUpdateCluster\x12!.google.bigtable.admin.v2.Cluster\x1a\x1d.google.longrunning.Operation\"7\x82\xd3\xe4\x93\x02\x31\x1a,/v2/{name=projects/*/instances/*/clusters/*}:\x01*\x12\x8d\x01\n\rDeleteCluster\x12..google.bigtable.admin.v2.DeleteClusterRequest\x1a\x16.google.protobuf.Empty\"4\x82\xd3\xe4\x93\x02.*,/v2/{name=projects/*/instances/*/clusters/*}B<\n\x1c\x63om.google.bigtable.admin.v2B\x1a\x42igtableInstanceAdminProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_CREATEINSTANCEREQUEST_CLUSTERSENTRY = _descriptor.Descriptor( + name='ClustersEntry', + full_name='google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=452, + serialized_end=534, +) + +_CREATEINSTANCEREQUEST = _descriptor.Descriptor( + name='CreateInstanceRequest', + full_name='google.bigtable.admin.v2.CreateInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.CreateInstanceRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='instance_id', full_name='google.bigtable.admin.v2.CreateInstanceRequest.instance_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='instance', full_name='google.bigtable.admin.v2.CreateInstanceRequest.instance', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='clusters', full_name='google.bigtable.admin.v2.CreateInstanceRequest.clusters', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_CREATEINSTANCEREQUEST_CLUSTERSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=255, + serialized_end=534, +) + + +_GETINSTANCEREQUEST = _descriptor.Descriptor( + name='GetInstanceRequest', + full_name='google.bigtable.admin.v2.GetInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.GetInstanceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=536, + serialized_end=570, +) + + +_LISTINSTANCESREQUEST = _descriptor.Descriptor( + name='ListInstancesRequest', + full_name='google.bigtable.admin.v2.ListInstancesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.ListInstancesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.bigtable.admin.v2.ListInstancesRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=572, + serialized_end=630, +) + + +_LISTINSTANCESRESPONSE = _descriptor.Descriptor( + name='ListInstancesResponse', + full_name='google.bigtable.admin.v2.ListInstancesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='instances', full_name='google.bigtable.admin.v2.ListInstancesResponse.instances', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='failed_locations', full_name='google.bigtable.admin.v2.ListInstancesResponse.failed_locations', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.bigtable.admin.v2.ListInstancesResponse.next_page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=633, + serialized_end=762, +) + + +_DELETEINSTANCEREQUEST = _descriptor.Descriptor( + name='DeleteInstanceRequest', + full_name='google.bigtable.admin.v2.DeleteInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.DeleteInstanceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=764, + serialized_end=801, +) + + +_CREATECLUSTERREQUEST = _descriptor.Descriptor( + name='CreateClusterRequest', + full_name='google.bigtable.admin.v2.CreateClusterRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.CreateClusterRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cluster_id', full_name='google.bigtable.admin.v2.CreateClusterRequest.cluster_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cluster', full_name='google.bigtable.admin.v2.CreateClusterRequest.cluster', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=803, + serialized_end=913, +) + + +_GETCLUSTERREQUEST = _descriptor.Descriptor( + name='GetClusterRequest', + full_name='google.bigtable.admin.v2.GetClusterRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.GetClusterRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=915, + serialized_end=948, +) + + +_LISTCLUSTERSREQUEST = _descriptor.Descriptor( + name='ListClustersRequest', + full_name='google.bigtable.admin.v2.ListClustersRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.ListClustersRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.bigtable.admin.v2.ListClustersRequest.page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=950, + serialized_end=1007, +) + + +_LISTCLUSTERSRESPONSE = _descriptor.Descriptor( + name='ListClustersResponse', + full_name='google.bigtable.admin.v2.ListClustersResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='clusters', full_name='google.bigtable.admin.v2.ListClustersResponse.clusters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='failed_locations', full_name='google.bigtable.admin.v2.ListClustersResponse.failed_locations', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.bigtable.admin.v2.ListClustersResponse.next_page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1009, + serialized_end=1135, +) + + +_DELETECLUSTERREQUEST = _descriptor.Descriptor( + name='DeleteClusterRequest', + full_name='google.bigtable.admin.v2.DeleteClusterRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.DeleteClusterRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1137, + serialized_end=1173, +) + + +_CREATEINSTANCEMETADATA = _descriptor.Descriptor( + name='CreateInstanceMetadata', + full_name='google.bigtable.admin.v2.CreateInstanceMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='original_request', full_name='google.bigtable.admin.v2.CreateInstanceMetadata.original_request', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='request_time', full_name='google.bigtable.admin.v2.CreateInstanceMetadata.request_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='finish_time', full_name='google.bigtable.admin.v2.CreateInstanceMetadata.finish_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1176, + serialized_end=1374, +) + + +_UPDATECLUSTERMETADATA = _descriptor.Descriptor( + name='UpdateClusterMetadata', + full_name='google.bigtable.admin.v2.UpdateClusterMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='original_request', full_name='google.bigtable.admin.v2.UpdateClusterMetadata.original_request', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='request_time', full_name='google.bigtable.admin.v2.UpdateClusterMetadata.request_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='finish_time', full_name='google.bigtable.admin.v2.UpdateClusterMetadata.finish_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1377, + serialized_end=1560, +) + +_CREATEINSTANCEREQUEST_CLUSTERSENTRY.fields_by_name['value'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._CLUSTER +_CREATEINSTANCEREQUEST_CLUSTERSENTRY.containing_type = _CREATEINSTANCEREQUEST +_CREATEINSTANCEREQUEST.fields_by_name['instance'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._INSTANCE +_CREATEINSTANCEREQUEST.fields_by_name['clusters'].message_type = _CREATEINSTANCEREQUEST_CLUSTERSENTRY +_LISTINSTANCESRESPONSE.fields_by_name['instances'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._INSTANCE +_CREATECLUSTERREQUEST.fields_by_name['cluster'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._CLUSTER +_LISTCLUSTERSRESPONSE.fields_by_name['clusters'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._CLUSTER +_CREATEINSTANCEMETADATA.fields_by_name['original_request'].message_type = _CREATEINSTANCEREQUEST +_CREATEINSTANCEMETADATA.fields_by_name['request_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CREATEINSTANCEMETADATA.fields_by_name['finish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATECLUSTERMETADATA.fields_by_name['original_request'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2._CLUSTER +_UPDATECLUSTERMETADATA.fields_by_name['request_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATECLUSTERMETADATA.fields_by_name['finish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +DESCRIPTOR.message_types_by_name['CreateInstanceRequest'] = _CREATEINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['GetInstanceRequest'] = _GETINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['ListInstancesRequest'] = _LISTINSTANCESREQUEST +DESCRIPTOR.message_types_by_name['ListInstancesResponse'] = _LISTINSTANCESRESPONSE +DESCRIPTOR.message_types_by_name['DeleteInstanceRequest'] = _DELETEINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['CreateClusterRequest'] = _CREATECLUSTERREQUEST +DESCRIPTOR.message_types_by_name['GetClusterRequest'] = _GETCLUSTERREQUEST +DESCRIPTOR.message_types_by_name['ListClustersRequest'] = _LISTCLUSTERSREQUEST +DESCRIPTOR.message_types_by_name['ListClustersResponse'] = _LISTCLUSTERSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteClusterRequest'] = _DELETECLUSTERREQUEST +DESCRIPTOR.message_types_by_name['CreateInstanceMetadata'] = _CREATEINSTANCEMETADATA +DESCRIPTOR.message_types_by_name['UpdateClusterMetadata'] = _UPDATECLUSTERMETADATA + +CreateInstanceRequest = _reflection.GeneratedProtocolMessageType('CreateInstanceRequest', (_message.Message,), dict( + + ClustersEntry = _reflection.GeneratedProtocolMessageType('ClustersEntry', (_message.Message,), dict( + DESCRIPTOR = _CREATEINSTANCEREQUEST_CLUSTERSENTRY, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry) + )) + , + DESCRIPTOR = _CREATEINSTANCEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateInstanceRequest) + )) +_sym_db.RegisterMessage(CreateInstanceRequest) +_sym_db.RegisterMessage(CreateInstanceRequest.ClustersEntry) + +GetInstanceRequest = _reflection.GeneratedProtocolMessageType('GetInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _GETINSTANCEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GetInstanceRequest) + )) +_sym_db.RegisterMessage(GetInstanceRequest) + +ListInstancesRequest = _reflection.GeneratedProtocolMessageType('ListInstancesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTINSTANCESREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListInstancesRequest) + )) +_sym_db.RegisterMessage(ListInstancesRequest) + +ListInstancesResponse = _reflection.GeneratedProtocolMessageType('ListInstancesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTINSTANCESRESPONSE, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListInstancesResponse) + )) +_sym_db.RegisterMessage(ListInstancesResponse) + +DeleteInstanceRequest = _reflection.GeneratedProtocolMessageType('DeleteInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEINSTANCEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.DeleteInstanceRequest) + )) +_sym_db.RegisterMessage(DeleteInstanceRequest) + +CreateClusterRequest = _reflection.GeneratedProtocolMessageType('CreateClusterRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATECLUSTERREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateClusterRequest) + )) +_sym_db.RegisterMessage(CreateClusterRequest) + +GetClusterRequest = _reflection.GeneratedProtocolMessageType('GetClusterRequest', (_message.Message,), dict( + DESCRIPTOR = _GETCLUSTERREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GetClusterRequest) + )) +_sym_db.RegisterMessage(GetClusterRequest) + +ListClustersRequest = _reflection.GeneratedProtocolMessageType('ListClustersRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTCLUSTERSREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListClustersRequest) + )) +_sym_db.RegisterMessage(ListClustersRequest) + +ListClustersResponse = _reflection.GeneratedProtocolMessageType('ListClustersResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTCLUSTERSRESPONSE, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListClustersResponse) + )) +_sym_db.RegisterMessage(ListClustersResponse) + +DeleteClusterRequest = _reflection.GeneratedProtocolMessageType('DeleteClusterRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETECLUSTERREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.DeleteClusterRequest) + )) +_sym_db.RegisterMessage(DeleteClusterRequest) + +CreateInstanceMetadata = _reflection.GeneratedProtocolMessageType('CreateInstanceMetadata', (_message.Message,), dict( + DESCRIPTOR = _CREATEINSTANCEMETADATA, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateInstanceMetadata) + )) +_sym_db.RegisterMessage(CreateInstanceMetadata) + +UpdateClusterMetadata = _reflection.GeneratedProtocolMessageType('UpdateClusterMetadata', (_message.Message,), dict( + DESCRIPTOR = _UPDATECLUSTERMETADATA, + __module__ = 'google.bigtable.admin.v2.bigtable_instance_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.UpdateClusterMetadata) + )) +_sym_db.RegisterMessage(UpdateClusterMetadata) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\032BigtableInstanceAdminProtoP\001')) +_CREATEINSTANCEREQUEST_CLUSTERSENTRY.has_options = True +_CREATEINSTANCEREQUEST_CLUSTERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) + +from grpc.beta import implementations as beta_implementations +from grpc.beta import interfaces as beta_interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + + +class BigtableInstanceAdminStub(object): + """Service for creating, configuring, and deleting Cloud Bigtable Instances and + Clusters. Provides access to the Instance and Cluster schemas only, not the + tables metadata or data stored in those tables. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateInstance = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateInstance', + request_serializer=CreateInstanceRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetInstance = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/GetInstance', + request_serializer=GetInstanceRequest.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + ) + self.ListInstances = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/ListInstances', + request_serializer=ListInstancesRequest.SerializeToString, + response_deserializer=ListInstancesResponse.FromString, + ) + self.UpdateInstance = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateInstance', + request_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + ) + self.DeleteInstance = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteInstance', + request_serializer=DeleteInstanceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.CreateCluster = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateCluster', + request_serializer=CreateClusterRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetCluster = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/GetCluster', + request_serializer=GetClusterRequest.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.FromString, + ) + self.ListClusters = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/ListClusters', + request_serializer=ListClustersRequest.SerializeToString, + response_deserializer=ListClustersResponse.FromString, + ) + self.UpdateCluster = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateCluster', + request_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.DeleteCluster = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteCluster', + request_serializer=DeleteClusterRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class BigtableInstanceAdminServicer(object): + """Service for creating, configuring, and deleting Cloud Bigtable Instances and + Clusters. Provides access to the Instance and Cluster schemas only, not the + tables metadata or data stored in those tables. + """ + + def CreateInstance(self, request, context): + """Create an instance within a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInstance(self, request, context): + """Gets information about an instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListInstances(self, request, context): + """Lists information about instances in a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateInstance(self, request, context): + """Updates an instance within a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteInstance(self, request, context): + """Delete an instance from a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateCluster(self, request, context): + """Creates a cluster within an instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCluster(self, request, context): + """Gets information about a cluster. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListClusters(self, request, context): + """Lists information about clusters in an instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateCluster(self, request, context): + """Updates a cluster within an instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteCluster(self, request, context): + """Deletes a cluster from an instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_BigtableInstanceAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateInstance': grpc.unary_unary_rpc_method_handler( + servicer.CreateInstance, + request_deserializer=CreateInstanceRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'GetInstance': grpc.unary_unary_rpc_method_handler( + servicer.GetInstance, + request_deserializer=GetInstanceRequest.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + ), + 'ListInstances': grpc.unary_unary_rpc_method_handler( + servicer.ListInstances, + request_deserializer=ListInstancesRequest.FromString, + response_serializer=ListInstancesResponse.SerializeToString, + ), + 'UpdateInstance': grpc.unary_unary_rpc_method_handler( + servicer.UpdateInstance, + request_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + ), + 'DeleteInstance': grpc.unary_unary_rpc_method_handler( + servicer.DeleteInstance, + request_deserializer=DeleteInstanceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'CreateCluster': grpc.unary_unary_rpc_method_handler( + servicer.CreateCluster, + request_deserializer=CreateClusterRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'GetCluster': grpc.unary_unary_rpc_method_handler( + servicer.GetCluster, + request_deserializer=GetClusterRequest.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.SerializeToString, + ), + 'ListClusters': grpc.unary_unary_rpc_method_handler( + servicer.ListClusters, + request_deserializer=ListClustersRequest.FromString, + response_serializer=ListClustersResponse.SerializeToString, + ), + 'UpdateCluster': grpc.unary_unary_rpc_method_handler( + servicer.UpdateCluster, + request_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'DeleteCluster': grpc.unary_unary_rpc_method_handler( + servicer.DeleteCluster, + request_deserializer=DeleteClusterRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.bigtable.admin.v2.BigtableInstanceAdmin', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class BetaBigtableInstanceAdminServicer(object): + """Service for creating, configuring, and deleting Cloud Bigtable Instances and + Clusters. Provides access to the Instance and Cluster schemas only, not the + tables metadata or data stored in those tables. + """ + def CreateInstance(self, request, context): + """Create an instance within a project. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetInstance(self, request, context): + """Gets information about an instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListInstances(self, request, context): + """Lists information about instances in a project. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateInstance(self, request, context): + """Updates an instance within a project. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteInstance(self, request, context): + """Delete an instance from a project. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateCluster(self, request, context): + """Creates a cluster within an instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetCluster(self, request, context): + """Gets information about a cluster. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListClusters(self, request, context): + """Lists information about clusters in an instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateCluster(self, request, context): + """Updates a cluster within an instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteCluster(self, request, context): + """Deletes a cluster from an instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + +class BetaBigtableInstanceAdminStub(object): + """Service for creating, configuring, and deleting Cloud Bigtable Instances and + Clusters. Provides access to the Instance and Cluster schemas only, not the + tables metadata or data stored in those tables. + """ + def CreateInstance(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Create an instance within a project. + """ + raise NotImplementedError() + CreateInstance.future = None + def GetInstance(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets information about an instance. + """ + raise NotImplementedError() + GetInstance.future = None + def ListInstances(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists information about instances in a project. + """ + raise NotImplementedError() + ListInstances.future = None + def UpdateInstance(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an instance within a project. + """ + raise NotImplementedError() + UpdateInstance.future = None + def DeleteInstance(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Delete an instance from a project. + """ + raise NotImplementedError() + DeleteInstance.future = None + def CreateCluster(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a cluster within an instance. + """ + raise NotImplementedError() + CreateCluster.future = None + def GetCluster(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets information about a cluster. + """ + raise NotImplementedError() + GetCluster.future = None + def ListClusters(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists information about clusters in an instance. + """ + raise NotImplementedError() + ListClusters.future = None + def UpdateCluster(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates a cluster within an instance. + """ + raise NotImplementedError() + UpdateCluster.future = None + def DeleteCluster(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a cluster from an instance. + """ + raise NotImplementedError() + DeleteCluster.future = None + + +def beta_create_BigtableInstanceAdmin_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + request_deserializers = { + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateCluster'): CreateClusterRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateInstance'): CreateInstanceRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteCluster'): DeleteClusterRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteInstance'): DeleteInstanceRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetCluster'): GetClusterRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetInstance'): GetInstanceRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListClusters'): ListClustersRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListInstances'): ListInstancesRequest.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateCluster'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + } + response_serializers = { + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateCluster'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateInstance'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteCluster'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteInstance'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetCluster'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListClusters'): ListClustersResponse.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListInstances'): ListInstancesResponse.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateCluster'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + } + method_implementations = { + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateCluster'): face_utilities.unary_unary_inline(servicer.CreateCluster), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateInstance'): face_utilities.unary_unary_inline(servicer.CreateInstance), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteCluster'): face_utilities.unary_unary_inline(servicer.DeleteCluster), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteInstance'): face_utilities.unary_unary_inline(servicer.DeleteInstance), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetCluster'): face_utilities.unary_unary_inline(servicer.GetCluster), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetInstance'): face_utilities.unary_unary_inline(servicer.GetInstance), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListClusters'): face_utilities.unary_unary_inline(servicer.ListClusters), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListInstances'): face_utilities.unary_unary_inline(servicer.ListInstances), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateCluster'): face_utilities.unary_unary_inline(servicer.UpdateCluster), + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateInstance'): face_utilities.unary_unary_inline(servicer.UpdateInstance), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + +def beta_create_BigtableInstanceAdmin_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + request_serializers = { + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateCluster'): CreateClusterRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateInstance'): CreateInstanceRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteCluster'): DeleteClusterRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteInstance'): DeleteInstanceRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetCluster'): GetClusterRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetInstance'): GetInstanceRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListClusters'): ListClustersRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListInstances'): ListInstancesRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateCluster'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.SerializeToString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.SerializeToString, + } + response_deserializers = { + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateCluster'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'CreateInstance'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteCluster'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'DeleteInstance'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetCluster'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Cluster.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'GetInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListClusters'): ListClustersResponse.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'ListInstances'): ListInstancesResponse.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateCluster'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ('google.bigtable.admin.v2.BigtableInstanceAdmin', 'UpdateInstance'): google_dot_bigtable_dot_admin_dot_v2_dot_instance__pb2.Instance.FromString, + } + cardinalities = { + 'CreateCluster': cardinality.Cardinality.UNARY_UNARY, + 'CreateInstance': cardinality.Cardinality.UNARY_UNARY, + 'DeleteCluster': cardinality.Cardinality.UNARY_UNARY, + 'DeleteInstance': cardinality.Cardinality.UNARY_UNARY, + 'GetCluster': cardinality.Cardinality.UNARY_UNARY, + 'GetInstance': cardinality.Cardinality.UNARY_UNARY, + 'ListClusters': cardinality.Cardinality.UNARY_UNARY, + 'ListInstances': cardinality.Cardinality.UNARY_UNARY, + 'UpdateCluster': cardinality.Cardinality.UNARY_UNARY, + 'UpdateInstance': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.admin.v2.BigtableInstanceAdmin', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_pb2.py new file mode 100644 index 0000000..606b3c8 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_pb2.py @@ -0,0 +1,1100 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/v2/bigtable.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated_v2 import data_pb2 as google_dot_bigtable_dot_v2_dot_data__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/v2/bigtable.proto', + package='google.bigtable.v2', + syntax='proto3', + serialized_pb=_b('\n!google/bigtable/v2/bigtable.proto\x12\x12google.bigtable.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/bigtable/v2/data.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"\x92\x01\n\x0fReadRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12(\n\x04rows\x18\x02 \x01(\x0b\x32\x1a.google.bigtable.v2.RowSet\x12-\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x12\n\nrows_limit\x18\x04 \x01(\x03\"\xf8\x02\n\x10ReadRowsResponse\x12>\n\x06\x63hunks\x18\x01 \x03(\x0b\x32..google.bigtable.v2.ReadRowsResponse.CellChunk\x12\x1c\n\x14last_scanned_row_key\x18\x02 \x01(\x0c\x1a\x85\x02\n\tCellChunk\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12\x31\n\x0b\x66\x61mily_name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\tqualifier\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x18\n\x10timestamp_micros\x18\x04 \x01(\x03\x12\x0e\n\x06labels\x18\x05 \x03(\t\x12\r\n\x05value\x18\x06 \x01(\x0c\x12\x12\n\nvalue_size\x18\x07 \x01(\x05\x12\x13\n\treset_row\x18\x08 \x01(\x08H\x00\x12\x14\n\ncommit_row\x18\t \x01(\x08H\x00\x42\x0c\n\nrow_status\"*\n\x14SampleRowKeysRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\">\n\x15SampleRowKeysResponse\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12\x14\n\x0coffset_bytes\x18\x02 \x01(\x03\"h\n\x10MutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12/\n\tmutations\x18\x03 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"\x13\n\x11MutateRowResponse\"\xb0\x01\n\x11MutateRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12<\n\x07\x65ntries\x18\x02 \x03(\x0b\x32+.google.bigtable.v2.MutateRowsRequest.Entry\x1aI\n\x05\x45ntry\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12/\n\tmutations\x18\x02 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"\x8f\x01\n\x12MutateRowsResponse\x12=\n\x07\x65ntries\x18\x01 \x03(\x0b\x32,.google.bigtable.v2.MutateRowsResponse.Entry\x1a:\n\x05\x45ntry\x12\r\n\x05index\x18\x01 \x01(\x03\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\xe5\x01\n\x18\x43heckAndMutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x37\n\x10predicate_filter\x18\x06 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x34\n\x0etrue_mutations\x18\x04 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\x12\x35\n\x0f\x66\x61lse_mutations\x18\x05 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"6\n\x19\x43heckAndMutateRowResponse\x12\x19\n\x11predicate_matched\x18\x01 \x01(\x08\"x\n\x19ReadModifyWriteRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x36\n\x05rules\x18\x03 \x03(\x0b\x32\'.google.bigtable.v2.ReadModifyWriteRule\"B\n\x1aReadModifyWriteRowResponse\x12$\n\x03row\x18\x01 \x01(\x0b\x32\x17.google.bigtable.v2.Row2\xad\x08\n\x08\x42igtable\x12\x9d\x01\n\x08ReadRows\x12#.google.bigtable.v2.ReadRowsRequest\x1a$.google.bigtable.v2.ReadRowsResponse\"D\x82\xd3\xe4\x93\x02>\"9/v2/{table_name=projects/*/instances/*/tables/*}:readRows:\x01*0\x01\x12\xae\x01\n\rSampleRowKeys\x12(.google.bigtable.v2.SampleRowKeysRequest\x1a).google.bigtable.v2.SampleRowKeysResponse\"F\x82\xd3\xe4\x93\x02@\x12>/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys0\x01\x12\x9f\x01\n\tMutateRow\x12$.google.bigtable.v2.MutateRowRequest\x1a%.google.bigtable.v2.MutateRowResponse\"E\x82\xd3\xe4\x93\x02?\":/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow:\x01*\x12\xa5\x01\n\nMutateRows\x12%.google.bigtable.v2.MutateRowsRequest\x1a&.google.bigtable.v2.MutateRowsResponse\"F\x82\xd3\xe4\x93\x02@\";/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows:\x01*0\x01\x12\xbf\x01\n\x11\x43heckAndMutateRow\x12,.google.bigtable.v2.CheckAndMutateRowRequest\x1a-.google.bigtable.v2.CheckAndMutateRowResponse\"M\x82\xd3\xe4\x93\x02G\"B/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow:\x01*\x12\xc3\x01\n\x12ReadModifyWriteRow\x12-.google.bigtable.v2.ReadModifyWriteRowRequest\x1a..google.bigtable.v2.ReadModifyWriteRowResponse\"N\x82\xd3\xe4\x93\x02H\"C/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow:\x01*B)\n\x16\x63om.google.bigtable.v2B\rBigtableProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_v2_dot_data__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_READROWSREQUEST = _descriptor.Descriptor( + name='ReadRowsRequest', + full_name='google.bigtable.v2.ReadRowsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.ReadRowsRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='google.bigtable.v2.ReadRowsRequest.rows', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.bigtable.v2.ReadRowsRequest.filter', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows_limit', full_name='google.bigtable.v2.ReadRowsRequest.rows_limit', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=176, + serialized_end=322, +) + + +_READROWSRESPONSE_CELLCHUNK = _descriptor.Descriptor( + name='CellChunk', + full_name='google.bigtable.v2.ReadRowsResponse.CellChunk', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.family_name', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='qualifier', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_micros', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.labels', index=4, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.value', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_size', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.value_size', index=6, + number=7, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reset_row', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.reset_row', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='commit_row', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.commit_row', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='row_status', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.row_status', + index=0, containing_type=None, fields=[]), + ], + serialized_start=440, + serialized_end=701, +) + +_READROWSRESPONSE = _descriptor.Descriptor( + name='ReadRowsResponse', + full_name='google.bigtable.v2.ReadRowsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='chunks', full_name='google.bigtable.v2.ReadRowsResponse.chunks', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_scanned_row_key', full_name='google.bigtable.v2.ReadRowsResponse.last_scanned_row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_READROWSRESPONSE_CELLCHUNK, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=325, + serialized_end=701, +) + + +_SAMPLEROWKEYSREQUEST = _descriptor.Descriptor( + name='SampleRowKeysRequest', + full_name='google.bigtable.v2.SampleRowKeysRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.SampleRowKeysRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=703, + serialized_end=745, +) + + +_SAMPLEROWKEYSRESPONSE = _descriptor.Descriptor( + name='SampleRowKeysResponse', + full_name='google.bigtable.v2.SampleRowKeysResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.SampleRowKeysResponse.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='offset_bytes', full_name='google.bigtable.v2.SampleRowKeysResponse.offset_bytes', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=747, + serialized_end=809, +) + + +_MUTATEROWREQUEST = _descriptor.Descriptor( + name='MutateRowRequest', + full_name='google.bigtable.v2.MutateRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.MutateRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.MutateRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.bigtable.v2.MutateRowRequest.mutations', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=811, + serialized_end=915, +) + + +_MUTATEROWRESPONSE = _descriptor.Descriptor( + name='MutateRowResponse', + full_name='google.bigtable.v2.MutateRowResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=917, + serialized_end=936, +) + + +_MUTATEROWSREQUEST_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='google.bigtable.v2.MutateRowsRequest.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.MutateRowsRequest.Entry.row_key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.bigtable.v2.MutateRowsRequest.Entry.mutations', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1042, + serialized_end=1115, +) + +_MUTATEROWSREQUEST = _descriptor.Descriptor( + name='MutateRowsRequest', + full_name='google.bigtable.v2.MutateRowsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.MutateRowsRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entries', full_name='google.bigtable.v2.MutateRowsRequest.entries', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MUTATEROWSREQUEST_ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=939, + serialized_end=1115, +) + + +_MUTATEROWSRESPONSE_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='google.bigtable.v2.MutateRowsResponse.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='index', full_name='google.bigtable.v2.MutateRowsResponse.Entry.index', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='status', full_name='google.bigtable.v2.MutateRowsResponse.Entry.status', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1203, + serialized_end=1261, +) + +_MUTATEROWSRESPONSE = _descriptor.Descriptor( + name='MutateRowsResponse', + full_name='google.bigtable.v2.MutateRowsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='google.bigtable.v2.MutateRowsResponse.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MUTATEROWSRESPONSE_ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1118, + serialized_end=1261, +) + + +_CHECKANDMUTATEROWREQUEST = _descriptor.Descriptor( + name='CheckAndMutateRowRequest', + full_name='google.bigtable.v2.CheckAndMutateRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.CheckAndMutateRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.CheckAndMutateRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='predicate_filter', full_name='google.bigtable.v2.CheckAndMutateRowRequest.predicate_filter', index=2, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='true_mutations', full_name='google.bigtable.v2.CheckAndMutateRowRequest.true_mutations', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='false_mutations', full_name='google.bigtable.v2.CheckAndMutateRowRequest.false_mutations', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1264, + serialized_end=1493, +) + + +_CHECKANDMUTATEROWRESPONSE = _descriptor.Descriptor( + name='CheckAndMutateRowResponse', + full_name='google.bigtable.v2.CheckAndMutateRowResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='predicate_matched', full_name='google.bigtable.v2.CheckAndMutateRowResponse.predicate_matched', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1495, + serialized_end=1549, +) + + +_READMODIFYWRITEROWREQUEST = _descriptor.Descriptor( + name='ReadModifyWriteRowRequest', + full_name='google.bigtable.v2.ReadModifyWriteRowRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table_name', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.table_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.row_key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.rules', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1551, + serialized_end=1671, +) + + +_READMODIFYWRITEROWRESPONSE = _descriptor.Descriptor( + name='ReadModifyWriteRowResponse', + full_name='google.bigtable.v2.ReadModifyWriteRowResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row', full_name='google.bigtable.v2.ReadModifyWriteRowResponse.row', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1673, + serialized_end=1739, +) + +_READROWSREQUEST.fields_by_name['rows'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWSET +_READROWSREQUEST.fields_by_name['filter'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWFILTER +_READROWSRESPONSE_CELLCHUNK.fields_by_name['family_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE +_READROWSRESPONSE_CELLCHUNK.fields_by_name['qualifier'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE +_READROWSRESPONSE_CELLCHUNK.containing_type = _READROWSRESPONSE +_READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'].fields.append( + _READROWSRESPONSE_CELLCHUNK.fields_by_name['reset_row']) +_READROWSRESPONSE_CELLCHUNK.fields_by_name['reset_row'].containing_oneof = _READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'] +_READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'].fields.append( + _READROWSRESPONSE_CELLCHUNK.fields_by_name['commit_row']) +_READROWSRESPONSE_CELLCHUNK.fields_by_name['commit_row'].containing_oneof = _READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'] +_READROWSRESPONSE.fields_by_name['chunks'].message_type = _READROWSRESPONSE_CELLCHUNK +_MUTATEROWREQUEST.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION +_MUTATEROWSREQUEST_ENTRY.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION +_MUTATEROWSREQUEST_ENTRY.containing_type = _MUTATEROWSREQUEST +_MUTATEROWSREQUEST.fields_by_name['entries'].message_type = _MUTATEROWSREQUEST_ENTRY +_MUTATEROWSRESPONSE_ENTRY.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_MUTATEROWSRESPONSE_ENTRY.containing_type = _MUTATEROWSRESPONSE +_MUTATEROWSRESPONSE.fields_by_name['entries'].message_type = _MUTATEROWSRESPONSE_ENTRY +_CHECKANDMUTATEROWREQUEST.fields_by_name['predicate_filter'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWFILTER +_CHECKANDMUTATEROWREQUEST.fields_by_name['true_mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION +_CHECKANDMUTATEROWREQUEST.fields_by_name['false_mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION +_READMODIFYWRITEROWREQUEST.fields_by_name['rules'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._READMODIFYWRITERULE +_READMODIFYWRITEROWRESPONSE.fields_by_name['row'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROW +DESCRIPTOR.message_types_by_name['ReadRowsRequest'] = _READROWSREQUEST +DESCRIPTOR.message_types_by_name['ReadRowsResponse'] = _READROWSRESPONSE +DESCRIPTOR.message_types_by_name['SampleRowKeysRequest'] = _SAMPLEROWKEYSREQUEST +DESCRIPTOR.message_types_by_name['SampleRowKeysResponse'] = _SAMPLEROWKEYSRESPONSE +DESCRIPTOR.message_types_by_name['MutateRowRequest'] = _MUTATEROWREQUEST +DESCRIPTOR.message_types_by_name['MutateRowResponse'] = _MUTATEROWRESPONSE +DESCRIPTOR.message_types_by_name['MutateRowsRequest'] = _MUTATEROWSREQUEST +DESCRIPTOR.message_types_by_name['MutateRowsResponse'] = _MUTATEROWSRESPONSE +DESCRIPTOR.message_types_by_name['CheckAndMutateRowRequest'] = _CHECKANDMUTATEROWREQUEST +DESCRIPTOR.message_types_by_name['CheckAndMutateRowResponse'] = _CHECKANDMUTATEROWRESPONSE +DESCRIPTOR.message_types_by_name['ReadModifyWriteRowRequest'] = _READMODIFYWRITEROWREQUEST +DESCRIPTOR.message_types_by_name['ReadModifyWriteRowResponse'] = _READMODIFYWRITEROWRESPONSE + +ReadRowsRequest = _reflection.GeneratedProtocolMessageType('ReadRowsRequest', (_message.Message,), dict( + DESCRIPTOR = _READROWSREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsRequest) + )) +_sym_db.RegisterMessage(ReadRowsRequest) + +ReadRowsResponse = _reflection.GeneratedProtocolMessageType('ReadRowsResponse', (_message.Message,), dict( + + CellChunk = _reflection.GeneratedProtocolMessageType('CellChunk', (_message.Message,), dict( + DESCRIPTOR = _READROWSRESPONSE_CELLCHUNK, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsResponse.CellChunk) + )) + , + DESCRIPTOR = _READROWSRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsResponse) + )) +_sym_db.RegisterMessage(ReadRowsResponse) +_sym_db.RegisterMessage(ReadRowsResponse.CellChunk) + +SampleRowKeysRequest = _reflection.GeneratedProtocolMessageType('SampleRowKeysRequest', (_message.Message,), dict( + DESCRIPTOR = _SAMPLEROWKEYSREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.SampleRowKeysRequest) + )) +_sym_db.RegisterMessage(SampleRowKeysRequest) + +SampleRowKeysResponse = _reflection.GeneratedProtocolMessageType('SampleRowKeysResponse', (_message.Message,), dict( + DESCRIPTOR = _SAMPLEROWKEYSRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.SampleRowKeysResponse) + )) +_sym_db.RegisterMessage(SampleRowKeysResponse) + +MutateRowRequest = _reflection.GeneratedProtocolMessageType('MutateRowRequest', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowRequest) + )) +_sym_db.RegisterMessage(MutateRowRequest) + +MutateRowResponse = _reflection.GeneratedProtocolMessageType('MutateRowResponse', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowResponse) + )) +_sym_db.RegisterMessage(MutateRowResponse) + +MutateRowsRequest = _reflection.GeneratedProtocolMessageType('MutateRowsRequest', (_message.Message,), dict( + + Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWSREQUEST_ENTRY, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsRequest.Entry) + )) + , + DESCRIPTOR = _MUTATEROWSREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsRequest) + )) +_sym_db.RegisterMessage(MutateRowsRequest) +_sym_db.RegisterMessage(MutateRowsRequest.Entry) + +MutateRowsResponse = _reflection.GeneratedProtocolMessageType('MutateRowsResponse', (_message.Message,), dict( + + Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( + DESCRIPTOR = _MUTATEROWSRESPONSE_ENTRY, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsResponse.Entry) + )) + , + DESCRIPTOR = _MUTATEROWSRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsResponse) + )) +_sym_db.RegisterMessage(MutateRowsResponse) +_sym_db.RegisterMessage(MutateRowsResponse.Entry) + +CheckAndMutateRowRequest = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowRequest', (_message.Message,), dict( + DESCRIPTOR = _CHECKANDMUTATEROWREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.CheckAndMutateRowRequest) + )) +_sym_db.RegisterMessage(CheckAndMutateRowRequest) + +CheckAndMutateRowResponse = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowResponse', (_message.Message,), dict( + DESCRIPTOR = _CHECKANDMUTATEROWRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.CheckAndMutateRowResponse) + )) +_sym_db.RegisterMessage(CheckAndMutateRowResponse) + +ReadModifyWriteRowRequest = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRowRequest', (_message.Message,), dict( + DESCRIPTOR = _READMODIFYWRITEROWREQUEST, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadModifyWriteRowRequest) + )) +_sym_db.RegisterMessage(ReadModifyWriteRowRequest) + +ReadModifyWriteRowResponse = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRowResponse', (_message.Message,), dict( + DESCRIPTOR = _READMODIFYWRITEROWRESPONSE, + __module__ = 'google.bigtable.v2.bigtable_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadModifyWriteRowResponse) + )) +_sym_db.RegisterMessage(ReadModifyWriteRowResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.bigtable.v2B\rBigtableProtoP\001')) + +from grpc.beta import implementations as beta_implementations +from grpc.beta import interfaces as beta_interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + + +class BigtableStub(object): + """Service for reading from and writing to existing Bigtable tables. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ReadRows = channel.unary_stream( + '/google.bigtable.v2.Bigtable/ReadRows', + request_serializer=ReadRowsRequest.SerializeToString, + response_deserializer=ReadRowsResponse.FromString, + ) + self.SampleRowKeys = channel.unary_stream( + '/google.bigtable.v2.Bigtable/SampleRowKeys', + request_serializer=SampleRowKeysRequest.SerializeToString, + response_deserializer=SampleRowKeysResponse.FromString, + ) + self.MutateRow = channel.unary_unary( + '/google.bigtable.v2.Bigtable/MutateRow', + request_serializer=MutateRowRequest.SerializeToString, + response_deserializer=MutateRowResponse.FromString, + ) + self.MutateRows = channel.unary_stream( + '/google.bigtable.v2.Bigtable/MutateRows', + request_serializer=MutateRowsRequest.SerializeToString, + response_deserializer=MutateRowsResponse.FromString, + ) + self.CheckAndMutateRow = channel.unary_unary( + '/google.bigtable.v2.Bigtable/CheckAndMutateRow', + request_serializer=CheckAndMutateRowRequest.SerializeToString, + response_deserializer=CheckAndMutateRowResponse.FromString, + ) + self.ReadModifyWriteRow = channel.unary_unary( + '/google.bigtable.v2.Bigtable/ReadModifyWriteRow', + request_serializer=ReadModifyWriteRowRequest.SerializeToString, + response_deserializer=ReadModifyWriteRowResponse.FromString, + ) + + +class BigtableServicer(object): + """Service for reading from and writing to existing Bigtable tables. + """ + + def ReadRows(self, request, context): + """Streams back the contents of all requested rows, optionally + applying the same Reader filter to each. Depending on their size, + rows and cells may be broken up across multiple responses, but + atomicity of each row will still be preserved. See the + ReadRowsResponse documentation for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SampleRowKeys(self, request, context): + """Returns a sample of row keys in the table. The returned row keys will + delimit contiguous sections of the table of approximately equal size, + which can be used to break up the data for distributed tasks like + mapreduces. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def MutateRow(self, request, context): + """Mutates a row atomically. Cells already present in the row are left + unchanged unless explicitly changed by `mutation`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def MutateRows(self, request, context): + """Mutates multiple rows in a batch. Each individual row is mutated + atomically as in MutateRow, but the entire batch is not executed + atomically. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CheckAndMutateRow(self, request, context): + """Mutates a row atomically based on the output of a predicate Reader filter. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ReadModifyWriteRow(self, request, context): + """Modifies a row atomically. The method reads the latest existing timestamp + and value from the specified columns and writes a new entry based on + pre-defined read/modify/write rules. The new value for the timestamp is the + greater of the existing timestamp or the current server time. The method + returns the new contents of all modified cells. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_BigtableServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ReadRows': grpc.unary_stream_rpc_method_handler( + servicer.ReadRows, + request_deserializer=ReadRowsRequest.FromString, + response_serializer=ReadRowsResponse.SerializeToString, + ), + 'SampleRowKeys': grpc.unary_stream_rpc_method_handler( + servicer.SampleRowKeys, + request_deserializer=SampleRowKeysRequest.FromString, + response_serializer=SampleRowKeysResponse.SerializeToString, + ), + 'MutateRow': grpc.unary_unary_rpc_method_handler( + servicer.MutateRow, + request_deserializer=MutateRowRequest.FromString, + response_serializer=MutateRowResponse.SerializeToString, + ), + 'MutateRows': grpc.unary_stream_rpc_method_handler( + servicer.MutateRows, + request_deserializer=MutateRowsRequest.FromString, + response_serializer=MutateRowsResponse.SerializeToString, + ), + 'CheckAndMutateRow': grpc.unary_unary_rpc_method_handler( + servicer.CheckAndMutateRow, + request_deserializer=CheckAndMutateRowRequest.FromString, + response_serializer=CheckAndMutateRowResponse.SerializeToString, + ), + 'ReadModifyWriteRow': grpc.unary_unary_rpc_method_handler( + servicer.ReadModifyWriteRow, + request_deserializer=ReadModifyWriteRowRequest.FromString, + response_serializer=ReadModifyWriteRowResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.bigtable.v2.Bigtable', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class BetaBigtableServicer(object): + """Service for reading from and writing to existing Bigtable tables. + """ + def ReadRows(self, request, context): + """Streams back the contents of all requested rows, optionally + applying the same Reader filter to each. Depending on their size, + rows and cells may be broken up across multiple responses, but + atomicity of each row will still be preserved. See the + ReadRowsResponse documentation for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def SampleRowKeys(self, request, context): + """Returns a sample of row keys in the table. The returned row keys will + delimit contiguous sections of the table of approximately equal size, + which can be used to break up the data for distributed tasks like + mapreduces. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def MutateRow(self, request, context): + """Mutates a row atomically. Cells already present in the row are left + unchanged unless explicitly changed by `mutation`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def MutateRows(self, request, context): + """Mutates multiple rows in a batch. Each individual row is mutated + atomically as in MutateRow, but the entire batch is not executed + atomically. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CheckAndMutateRow(self, request, context): + """Mutates a row atomically based on the output of a predicate Reader filter. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ReadModifyWriteRow(self, request, context): + """Modifies a row atomically. The method reads the latest existing timestamp + and value from the specified columns and writes a new entry based on + pre-defined read/modify/write rules. The new value for the timestamp is the + greater of the existing timestamp or the current server time. The method + returns the new contents of all modified cells. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + +class BetaBigtableStub(object): + """Service for reading from and writing to existing Bigtable tables. + """ + def ReadRows(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Streams back the contents of all requested rows, optionally + applying the same Reader filter to each. Depending on their size, + rows and cells may be broken up across multiple responses, but + atomicity of each row will still be preserved. See the + ReadRowsResponse documentation for details. + """ + raise NotImplementedError() + def SampleRowKeys(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Returns a sample of row keys in the table. The returned row keys will + delimit contiguous sections of the table of approximately equal size, + which can be used to break up the data for distributed tasks like + mapreduces. + """ + raise NotImplementedError() + def MutateRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Mutates a row atomically. Cells already present in the row are left + unchanged unless explicitly changed by `mutation`. + """ + raise NotImplementedError() + MutateRow.future = None + def MutateRows(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Mutates multiple rows in a batch. Each individual row is mutated + atomically as in MutateRow, but the entire batch is not executed + atomically. + """ + raise NotImplementedError() + def CheckAndMutateRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Mutates a row atomically based on the output of a predicate Reader filter. + """ + raise NotImplementedError() + CheckAndMutateRow.future = None + def ReadModifyWriteRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies a row atomically. The method reads the latest existing timestamp + and value from the specified columns and writes a new entry based on + pre-defined read/modify/write rules. The new value for the timestamp is the + greater of the existing timestamp or the current server time. The method + returns the new contents of all modified cells. + """ + raise NotImplementedError() + ReadModifyWriteRow.future = None + + +def beta_create_Bigtable_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + request_deserializers = { + ('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowRequest.FromString, + ('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowRequest.FromString, + ('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsRequest.FromString, + ('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowRequest.FromString, + ('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsRequest.FromString, + ('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysRequest.FromString, + } + response_serializers = { + ('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowResponse.SerializeToString, + ('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowResponse.SerializeToString, + ('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsResponse.SerializeToString, + ('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowResponse.SerializeToString, + ('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsResponse.SerializeToString, + ('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysResponse.SerializeToString, + } + method_implementations = { + ('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): face_utilities.unary_unary_inline(servicer.CheckAndMutateRow), + ('google.bigtable.v2.Bigtable', 'MutateRow'): face_utilities.unary_unary_inline(servicer.MutateRow), + ('google.bigtable.v2.Bigtable', 'MutateRows'): face_utilities.unary_stream_inline(servicer.MutateRows), + ('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): face_utilities.unary_unary_inline(servicer.ReadModifyWriteRow), + ('google.bigtable.v2.Bigtable', 'ReadRows'): face_utilities.unary_stream_inline(servicer.ReadRows), + ('google.bigtable.v2.Bigtable', 'SampleRowKeys'): face_utilities.unary_stream_inline(servicer.SampleRowKeys), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + +def beta_create_Bigtable_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + request_serializers = { + ('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowRequest.SerializeToString, + ('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowRequest.SerializeToString, + ('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsRequest.SerializeToString, + ('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowRequest.SerializeToString, + ('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsRequest.SerializeToString, + ('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysRequest.SerializeToString, + } + response_deserializers = { + ('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowResponse.FromString, + ('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowResponse.FromString, + ('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsResponse.FromString, + ('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowResponse.FromString, + ('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsResponse.FromString, + ('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysResponse.FromString, + } + cardinalities = { + 'CheckAndMutateRow': cardinality.Cardinality.UNARY_UNARY, + 'MutateRow': cardinality.Cardinality.UNARY_UNARY, + 'MutateRows': cardinality.Cardinality.UNARY_STREAM, + 'ReadModifyWriteRow': cardinality.Cardinality.UNARY_UNARY, + 'ReadRows': cardinality.Cardinality.UNARY_STREAM, + 'SampleRowKeys': cardinality.Cardinality.UNARY_STREAM, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.v2.Bigtable', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_table_admin_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_table_admin_pb2.py new file mode 100644 index 0000000..c929b22 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/bigtable_table_admin_pb2.py @@ -0,0 +1,784 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/v2/bigtable_table_admin.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated_v2 import table_pb2 as google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/v2/bigtable_table_admin.proto', + package='google.bigtable.admin.v2', + syntax='proto3', + serialized_pb=_b('\n3google/bigtable/admin/v2/bigtable_table_admin.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a$google/bigtable/admin/v2/table.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xc8\x01\n\x12\x43reateTableRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x10\n\x08table_id\x18\x02 \x01(\t\x12.\n\x05table\x18\x03 \x01(\x0b\x32\x1f.google.bigtable.admin.v2.Table\x12J\n\x0einitial_splits\x18\x04 \x03(\x0b\x32\x32.google.bigtable.admin.v2.CreateTableRequest.Split\x1a\x14\n\x05Split\x12\x0b\n\x03key\x18\x01 \x01(\x0c\"m\n\x13\x44ropRowRangeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x0erow_key_prefix\x18\x02 \x01(\x0cH\x00\x12$\n\x1a\x64\x65lete_all_data_from_table\x18\x03 \x01(\x08H\x00\x42\x08\n\x06target\"k\n\x11ListTablesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\x04view\x18\x02 \x01(\x0e\x32$.google.bigtable.admin.v2.Table.View\x12\x12\n\npage_token\x18\x03 \x01(\t\"^\n\x12ListTablesResponse\x12/\n\x06tables\x18\x01 \x03(\x0b\x32\x1f.google.bigtable.admin.v2.Table\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"S\n\x0fGetTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\x04view\x18\x02 \x01(\x0e\x32$.google.bigtable.admin.v2.Table.View\"\"\n\x12\x44\x65leteTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xae\x02\n\x1bModifyColumnFamiliesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Y\n\rmodifications\x18\x02 \x03(\x0b\x32\x42.google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification\x1a\xa5\x01\n\x0cModification\x12\n\n\x02id\x18\x01 \x01(\t\x12\x38\n\x06\x63reate\x18\x02 \x01(\x0b\x32&.google.bigtable.admin.v2.ColumnFamilyH\x00\x12\x38\n\x06update\x18\x03 \x01(\x0b\x32&.google.bigtable.admin.v2.ColumnFamilyH\x00\x12\x0e\n\x04\x64rop\x18\x04 \x01(\x08H\x00\x42\x05\n\x03mod2\xb8\x07\n\x12\x42igtableTableAdmin\x12\x93\x01\n\x0b\x43reateTable\x12,.google.bigtable.admin.v2.CreateTableRequest\x1a\x1f.google.bigtable.admin.v2.Table\"5\x82\xd3\xe4\x93\x02/\"*/v2/{parent=projects/*/instances/*}/tables:\x01*\x12\x9b\x01\n\nListTables\x12+.google.bigtable.admin.v2.ListTablesRequest\x1a,.google.bigtable.admin.v2.ListTablesResponse\"2\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/instances/*}/tables\x12\x8a\x01\n\x08GetTable\x12).google.bigtable.admin.v2.GetTableRequest\x1a\x1f.google.bigtable.admin.v2.Table\"2\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/instances/*/tables/*}\x12\x87\x01\n\x0b\x44\x65leteTable\x12,.google.bigtable.admin.v2.DeleteTableRequest\x1a\x16.google.protobuf.Empty\"2\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/instances/*/tables/*}\x12\xba\x01\n\x14ModifyColumnFamilies\x12\x35.google.bigtable.admin.v2.ModifyColumnFamiliesRequest\x1a\x1f.google.bigtable.admin.v2.Table\"J\x82\xd3\xe4\x93\x02\x44\"?/v2/{name=projects/*/instances/*/tables/*}:modifyColumnFamilies:\x01*\x12\x99\x01\n\x0c\x44ropRowRange\x12-.google.bigtable.admin.v2.DropRowRangeRequest\x1a\x16.google.protobuf.Empty\"B\x82\xd3\xe4\x93\x02<\"7/v2/{name=projects/*/instances/*/tables/*}:dropRowRange:\x01*B9\n\x1c\x63om.google.bigtable.admin.v2B\x17\x42igtableTableAdminProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_CREATETABLEREQUEST_SPLIT = _descriptor.Descriptor( + name='Split', + full_name='google.bigtable.admin.v2.CreateTableRequest.Split', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.admin.v2.CreateTableRequest.Split.key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=359, + serialized_end=379, +) + +_CREATETABLEREQUEST = _descriptor.Descriptor( + name='CreateTableRequest', + full_name='google.bigtable.admin.v2.CreateTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.CreateTableRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='table_id', full_name='google.bigtable.admin.v2.CreateTableRequest.table_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='table', full_name='google.bigtable.admin.v2.CreateTableRequest.table', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='initial_splits', full_name='google.bigtable.admin.v2.CreateTableRequest.initial_splits', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_CREATETABLEREQUEST_SPLIT, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=179, + serialized_end=379, +) + + +_DROPROWRANGEREQUEST = _descriptor.Descriptor( + name='DropRowRangeRequest', + full_name='google.bigtable.admin.v2.DropRowRangeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.DropRowRangeRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key_prefix', full_name='google.bigtable.admin.v2.DropRowRangeRequest.row_key_prefix', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_all_data_from_table', full_name='google.bigtable.admin.v2.DropRowRangeRequest.delete_all_data_from_table', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.bigtable.admin.v2.DropRowRangeRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=381, + serialized_end=490, +) + + +_LISTTABLESREQUEST = _descriptor.Descriptor( + name='ListTablesRequest', + full_name='google.bigtable.admin.v2.ListTablesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.bigtable.admin.v2.ListTablesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='view', full_name='google.bigtable.admin.v2.ListTablesRequest.view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.bigtable.admin.v2.ListTablesRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=492, + serialized_end=599, +) + + +_LISTTABLESRESPONSE = _descriptor.Descriptor( + name='ListTablesResponse', + full_name='google.bigtable.admin.v2.ListTablesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tables', full_name='google.bigtable.admin.v2.ListTablesResponse.tables', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.bigtable.admin.v2.ListTablesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=601, + serialized_end=695, +) + + +_GETTABLEREQUEST = _descriptor.Descriptor( + name='GetTableRequest', + full_name='google.bigtable.admin.v2.GetTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.GetTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='view', full_name='google.bigtable.admin.v2.GetTableRequest.view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=697, + serialized_end=780, +) + + +_DELETETABLEREQUEST = _descriptor.Descriptor( + name='DeleteTableRequest', + full_name='google.bigtable.admin.v2.DeleteTableRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.DeleteTableRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=782, + serialized_end=816, +) + + +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION = _descriptor.Descriptor( + name='Modification', + full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='create', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification.create', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification.update', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='drop', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification.drop', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='mod', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification.mod', + index=0, containing_type=None, fields=[]), + ], + serialized_start=956, + serialized_end=1121, +) + +_MODIFYCOLUMNFAMILIESREQUEST = _descriptor.Descriptor( + name='ModifyColumnFamiliesRequest', + full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modifications', full_name='google.bigtable.admin.v2.ModifyColumnFamiliesRequest.modifications', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=819, + serialized_end=1121, +) + +_CREATETABLEREQUEST_SPLIT.containing_type = _CREATETABLEREQUEST +_CREATETABLEREQUEST.fields_by_name['table'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._TABLE +_CREATETABLEREQUEST.fields_by_name['initial_splits'].message_type = _CREATETABLEREQUEST_SPLIT +_DROPROWRANGEREQUEST.oneofs_by_name['target'].fields.append( + _DROPROWRANGEREQUEST.fields_by_name['row_key_prefix']) +_DROPROWRANGEREQUEST.fields_by_name['row_key_prefix'].containing_oneof = _DROPROWRANGEREQUEST.oneofs_by_name['target'] +_DROPROWRANGEREQUEST.oneofs_by_name['target'].fields.append( + _DROPROWRANGEREQUEST.fields_by_name['delete_all_data_from_table']) +_DROPROWRANGEREQUEST.fields_by_name['delete_all_data_from_table'].containing_oneof = _DROPROWRANGEREQUEST.oneofs_by_name['target'] +_LISTTABLESREQUEST.fields_by_name['view'].enum_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._TABLE_VIEW +_LISTTABLESRESPONSE.fields_by_name['tables'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._TABLE +_GETTABLEREQUEST.fields_by_name['view'].enum_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._TABLE_VIEW +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['create'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._COLUMNFAMILY +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['update'].message_type = google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2._COLUMNFAMILY +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.containing_type = _MODIFYCOLUMNFAMILIESREQUEST +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'].fields.append( + _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['create']) +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['create'].containing_oneof = _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'] +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'].fields.append( + _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['update']) +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['update'].containing_oneof = _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'] +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'].fields.append( + _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['drop']) +_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.fields_by_name['drop'].containing_oneof = _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION.oneofs_by_name['mod'] +_MODIFYCOLUMNFAMILIESREQUEST.fields_by_name['modifications'].message_type = _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION +DESCRIPTOR.message_types_by_name['CreateTableRequest'] = _CREATETABLEREQUEST +DESCRIPTOR.message_types_by_name['DropRowRangeRequest'] = _DROPROWRANGEREQUEST +DESCRIPTOR.message_types_by_name['ListTablesRequest'] = _LISTTABLESREQUEST +DESCRIPTOR.message_types_by_name['ListTablesResponse'] = _LISTTABLESRESPONSE +DESCRIPTOR.message_types_by_name['GetTableRequest'] = _GETTABLEREQUEST +DESCRIPTOR.message_types_by_name['DeleteTableRequest'] = _DELETETABLEREQUEST +DESCRIPTOR.message_types_by_name['ModifyColumnFamiliesRequest'] = _MODIFYCOLUMNFAMILIESREQUEST + +CreateTableRequest = _reflection.GeneratedProtocolMessageType('CreateTableRequest', (_message.Message,), dict( + + Split = _reflection.GeneratedProtocolMessageType('Split', (_message.Message,), dict( + DESCRIPTOR = _CREATETABLEREQUEST_SPLIT, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateTableRequest.Split) + )) + , + DESCRIPTOR = _CREATETABLEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateTableRequest) + )) +_sym_db.RegisterMessage(CreateTableRequest) +_sym_db.RegisterMessage(CreateTableRequest.Split) + +DropRowRangeRequest = _reflection.GeneratedProtocolMessageType('DropRowRangeRequest', (_message.Message,), dict( + DESCRIPTOR = _DROPROWRANGEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.DropRowRangeRequest) + )) +_sym_db.RegisterMessage(DropRowRangeRequest) + +ListTablesRequest = _reflection.GeneratedProtocolMessageType('ListTablesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTABLESREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListTablesRequest) + )) +_sym_db.RegisterMessage(ListTablesRequest) + +ListTablesResponse = _reflection.GeneratedProtocolMessageType('ListTablesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTABLESRESPONSE, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ListTablesResponse) + )) +_sym_db.RegisterMessage(ListTablesResponse) + +GetTableRequest = _reflection.GeneratedProtocolMessageType('GetTableRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTABLEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GetTableRequest) + )) +_sym_db.RegisterMessage(GetTableRequest) + +DeleteTableRequest = _reflection.GeneratedProtocolMessageType('DeleteTableRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETABLEREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.DeleteTableRequest) + )) +_sym_db.RegisterMessage(DeleteTableRequest) + +ModifyColumnFamiliesRequest = _reflection.GeneratedProtocolMessageType('ModifyColumnFamiliesRequest', (_message.Message,), dict( + + Modification = _reflection.GeneratedProtocolMessageType('Modification', (_message.Message,), dict( + DESCRIPTOR = _MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification) + )) + , + DESCRIPTOR = _MODIFYCOLUMNFAMILIESREQUEST, + __module__ = 'google.bigtable.admin.v2.bigtable_table_admin_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ModifyColumnFamiliesRequest) + )) +_sym_db.RegisterMessage(ModifyColumnFamiliesRequest) +_sym_db.RegisterMessage(ModifyColumnFamiliesRequest.Modification) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\027BigtableTableAdminProtoP\001')) + +from grpc.beta import implementations as beta_implementations +from grpc.beta import interfaces as beta_interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + + +class BigtableTableAdminStub(object): + """Service for creating, configuring, and deleting Cloud Bigtable tables. + Provides access to the table schemas only, not the data stored within + the tables. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTable = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable', + request_serializer=CreateTableRequest.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + ) + self.ListTables = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/ListTables', + request_serializer=ListTablesRequest.SerializeToString, + response_deserializer=ListTablesResponse.FromString, + ) + self.GetTable = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/GetTable', + request_serializer=GetTableRequest.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + ) + self.DeleteTable = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable', + request_serializer=DeleteTableRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyColumnFamilies = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies', + request_serializer=ModifyColumnFamiliesRequest.SerializeToString, + response_deserializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + ) + self.DropRowRange = channel.unary_unary( + '/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange', + request_serializer=DropRowRangeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class BigtableTableAdminServicer(object): + """Service for creating, configuring, and deleting Cloud Bigtable tables. + Provides access to the table schemas only, not the data stored within + the tables. + """ + + def CreateTable(self, request, context): + """Creates a new table in the specified instance. + The table can be created with a full set of initial column families, + specified in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTables(self, request, context): + """Lists all tables served from a specified instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTable(self, request, context): + """Gets metadata information about the specified table. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTable(self, request, context): + """Permanently deletes a specified table and all of its data. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyColumnFamilies(self, request, context): + """Atomically performs a series of column family modifications + on the specified table. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DropRowRange(self, request, context): + """Permanently drop/delete a row range from a specified table. The request can + specify whether to delete all rows in a table, or only those that match a + particular prefix. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_BigtableTableAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTable': grpc.unary_unary_rpc_method_handler( + servicer.CreateTable, + request_deserializer=CreateTableRequest.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + ), + 'ListTables': grpc.unary_unary_rpc_method_handler( + servicer.ListTables, + request_deserializer=ListTablesRequest.FromString, + response_serializer=ListTablesResponse.SerializeToString, + ), + 'GetTable': grpc.unary_unary_rpc_method_handler( + servicer.GetTable, + request_deserializer=GetTableRequest.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + ), + 'DeleteTable': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTable, + request_deserializer=DeleteTableRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyColumnFamilies': grpc.unary_unary_rpc_method_handler( + servicer.ModifyColumnFamilies, + request_deserializer=ModifyColumnFamiliesRequest.FromString, + response_serializer=google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + ), + 'DropRowRange': grpc.unary_unary_rpc_method_handler( + servicer.DropRowRange, + request_deserializer=DropRowRangeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.bigtable.admin.v2.BigtableTableAdmin', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class BetaBigtableTableAdminServicer(object): + """Service for creating, configuring, and deleting Cloud Bigtable tables. + Provides access to the table schemas only, not the data stored within + the tables. + """ + def CreateTable(self, request, context): + """Creates a new table in the specified instance. + The table can be created with a full set of initial column families, + specified in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTables(self, request, context): + """Lists all tables served from a specified instance. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTable(self, request, context): + """Gets metadata information about the specified table. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteTable(self, request, context): + """Permanently deletes a specified table and all of its data. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyColumnFamilies(self, request, context): + """Atomically performs a series of column family modifications + on the specified table. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DropRowRange(self, request, context): + """Permanently drop/delete a row range from a specified table. The request can + specify whether to delete all rows in a table, or only those that match a + particular prefix. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + +class BetaBigtableTableAdminStub(object): + """Service for creating, configuring, and deleting Cloud Bigtable tables. + Provides access to the table schemas only, not the data stored within + the tables. + """ + def CreateTable(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a new table in the specified instance. + The table can be created with a full set of initial column families, + specified in the request. + """ + raise NotImplementedError() + CreateTable.future = None + def ListTables(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists all tables served from a specified instance. + """ + raise NotImplementedError() + ListTables.future = None + def GetTable(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets metadata information about the specified table. + """ + raise NotImplementedError() + GetTable.future = None + def DeleteTable(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Permanently deletes a specified table and all of its data. + """ + raise NotImplementedError() + DeleteTable.future = None + def ModifyColumnFamilies(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Atomically performs a series of column family modifications + on the specified table. + """ + raise NotImplementedError() + ModifyColumnFamilies.future = None + def DropRowRange(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Permanently drop/delete a row range from a specified table. The request can + specify whether to delete all rows in a table, or only those that match a + particular prefix. + """ + raise NotImplementedError() + DropRowRange.future = None + + +def beta_create_BigtableTableAdmin_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + request_deserializers = { + ('google.bigtable.admin.v2.BigtableTableAdmin', 'CreateTable'): CreateTableRequest.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DeleteTable'): DeleteTableRequest.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DropRowRange'): DropRowRangeRequest.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'GetTable'): GetTableRequest.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ListTables'): ListTablesRequest.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ModifyColumnFamilies'): ModifyColumnFamiliesRequest.FromString, + } + response_serializers = { + ('google.bigtable.admin.v2.BigtableTableAdmin', 'CreateTable'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DeleteTable'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DropRowRange'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'GetTable'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ListTables'): ListTablesResponse.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ModifyColumnFamilies'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.SerializeToString, + } + method_implementations = { + ('google.bigtable.admin.v2.BigtableTableAdmin', 'CreateTable'): face_utilities.unary_unary_inline(servicer.CreateTable), + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DeleteTable'): face_utilities.unary_unary_inline(servicer.DeleteTable), + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DropRowRange'): face_utilities.unary_unary_inline(servicer.DropRowRange), + ('google.bigtable.admin.v2.BigtableTableAdmin', 'GetTable'): face_utilities.unary_unary_inline(servicer.GetTable), + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ListTables'): face_utilities.unary_unary_inline(servicer.ListTables), + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ModifyColumnFamilies'): face_utilities.unary_unary_inline(servicer.ModifyColumnFamilies), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + +def beta_create_BigtableTableAdmin_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + request_serializers = { + ('google.bigtable.admin.v2.BigtableTableAdmin', 'CreateTable'): CreateTableRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DeleteTable'): DeleteTableRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DropRowRange'): DropRowRangeRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'GetTable'): GetTableRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ListTables'): ListTablesRequest.SerializeToString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ModifyColumnFamilies'): ModifyColumnFamiliesRequest.SerializeToString, + } + response_deserializers = { + ('google.bigtable.admin.v2.BigtableTableAdmin', 'CreateTable'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DeleteTable'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'DropRowRange'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'GetTable'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ListTables'): ListTablesResponse.FromString, + ('google.bigtable.admin.v2.BigtableTableAdmin', 'ModifyColumnFamilies'): google_dot_bigtable_dot_admin_dot_v2_dot_table__pb2.Table.FromString, + } + cardinalities = { + 'CreateTable': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTable': cardinality.Cardinality.UNARY_UNARY, + 'DropRowRange': cardinality.Cardinality.UNARY_UNARY, + 'GetTable': cardinality.Cardinality.UNARY_UNARY, + 'ListTables': cardinality.Cardinality.UNARY_UNARY, + 'ModifyColumnFamilies': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.bigtable.admin.v2.BigtableTableAdmin', cardinalities, options=stub_options) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/common_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/common_pb2.py new file mode 100644 index 0000000..2981304 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/common_pb2.py @@ -0,0 +1,67 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/v2/common.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/v2/common.proto', + package='google.bigtable.admin.v2', + syntax='proto3', + serialized_pb=_b('\n%google/bigtable/admin/v2/common.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto*=\n\x0bStorageType\x12\x1c\n\x18STORAGE_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03SSD\x10\x01\x12\x07\n\x03HDD\x10\x02\x42-\n\x1c\x63om.google.bigtable.admin.v2B\x0b\x43ommonProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_STORAGETYPE = _descriptor.EnumDescriptor( + name='StorageType', + full_name='google.bigtable.admin.v2.StorageType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STORAGE_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SSD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HDD', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=130, + serialized_end=191, +) +_sym_db.RegisterEnumDescriptor(_STORAGETYPE) + +StorageType = enum_type_wrapper.EnumTypeWrapper(_STORAGETYPE) +STORAGE_TYPE_UNSPECIFIED = 0 +SSD = 1 +HDD = 2 + + +DESCRIPTOR.enum_types_by_name['StorageType'] = _STORAGETYPE + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\013CommonProtoP\001')) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/data_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/data_pb2.py new file mode 100644 index 0000000..6db08fb --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/data_pb2.py @@ -0,0 +1,1260 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/v2/data.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/v2/data.proto', + package='google.bigtable.v2', + syntax='proto3', + serialized_pb=_b('\n\x1dgoogle/bigtable/v2/data.proto\x12\x12google.bigtable.v2\"@\n\x03Row\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12,\n\x08\x66\x61milies\x18\x02 \x03(\x0b\x32\x1a.google.bigtable.v2.Family\"C\n\x06\x46\x61mily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x1a.google.bigtable.v2.Column\"D\n\x06\x43olumn\x12\x11\n\tqualifier\x18\x01 \x01(\x0c\x12\'\n\x05\x63\x65lls\x18\x02 \x03(\x0b\x32\x18.google.bigtable.v2.Cell\"?\n\x04\x43\x65ll\x12\x18\n\x10timestamp_micros\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x0c\x12\x0e\n\x06labels\x18\x03 \x03(\t\"\x8a\x01\n\x08RowRange\x12\x1a\n\x10start_key_closed\x18\x01 \x01(\x0cH\x00\x12\x18\n\x0estart_key_open\x18\x02 \x01(\x0cH\x00\x12\x16\n\x0c\x65nd_key_open\x18\x03 \x01(\x0cH\x01\x12\x18\n\x0e\x65nd_key_closed\x18\x04 \x01(\x0cH\x01\x42\x0b\n\tstart_keyB\t\n\x07\x65nd_key\"L\n\x06RowSet\x12\x10\n\x08row_keys\x18\x01 \x03(\x0c\x12\x30\n\nrow_ranges\x18\x02 \x03(\x0b\x32\x1c.google.bigtable.v2.RowRange\"\xc6\x01\n\x0b\x43olumnRange\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12 \n\x16start_qualifier_closed\x18\x02 \x01(\x0cH\x00\x12\x1e\n\x14start_qualifier_open\x18\x03 \x01(\x0cH\x00\x12\x1e\n\x14\x65nd_qualifier_closed\x18\x04 \x01(\x0cH\x01\x12\x1c\n\x12\x65nd_qualifier_open\x18\x05 \x01(\x0cH\x01\x42\x11\n\x0fstart_qualifierB\x0f\n\rend_qualifier\"N\n\x0eTimestampRange\x12\x1e\n\x16start_timestamp_micros\x18\x01 \x01(\x03\x12\x1c\n\x14\x65nd_timestamp_micros\x18\x02 \x01(\x03\"\x98\x01\n\nValueRange\x12\x1c\n\x12start_value_closed\x18\x01 \x01(\x0cH\x00\x12\x1a\n\x10start_value_open\x18\x02 \x01(\x0cH\x00\x12\x1a\n\x10\x65nd_value_closed\x18\x03 \x01(\x0cH\x01\x12\x18\n\x0e\x65nd_value_open\x18\x04 \x01(\x0cH\x01\x42\r\n\x0bstart_valueB\x0b\n\tend_value\"\xdf\x08\n\tRowFilter\x12\x34\n\x05\x63hain\x18\x01 \x01(\x0b\x32#.google.bigtable.v2.RowFilter.ChainH\x00\x12>\n\ninterleave\x18\x02 \x01(\x0b\x32(.google.bigtable.v2.RowFilter.InterleaveH\x00\x12<\n\tcondition\x18\x03 \x01(\x0b\x32\'.google.bigtable.v2.RowFilter.ConditionH\x00\x12\x0e\n\x04sink\x18\x10 \x01(\x08H\x00\x12\x19\n\x0fpass_all_filter\x18\x11 \x01(\x08H\x00\x12\x1a\n\x10\x62lock_all_filter\x18\x12 \x01(\x08H\x00\x12\x1e\n\x14row_key_regex_filter\x18\x04 \x01(\x0cH\x00\x12\x1b\n\x11row_sample_filter\x18\x0e \x01(\x01H\x00\x12\"\n\x18\x66\x61mily_name_regex_filter\x18\x05 \x01(\tH\x00\x12\'\n\x1d\x63olumn_qualifier_regex_filter\x18\x06 \x01(\x0cH\x00\x12>\n\x13\x63olumn_range_filter\x18\x07 \x01(\x0b\x32\x1f.google.bigtable.v2.ColumnRangeH\x00\x12\x44\n\x16timestamp_range_filter\x18\x08 \x01(\x0b\x32\".google.bigtable.v2.TimestampRangeH\x00\x12\x1c\n\x12value_regex_filter\x18\t \x01(\x0cH\x00\x12<\n\x12value_range_filter\x18\x0f \x01(\x0b\x32\x1e.google.bigtable.v2.ValueRangeH\x00\x12%\n\x1b\x63\x65lls_per_row_offset_filter\x18\n \x01(\x05H\x00\x12$\n\x1a\x63\x65lls_per_row_limit_filter\x18\x0b \x01(\x05H\x00\x12\'\n\x1d\x63\x65lls_per_column_limit_filter\x18\x0c \x01(\x05H\x00\x12!\n\x17strip_value_transformer\x18\r \x01(\x08H\x00\x12!\n\x17\x61pply_label_transformer\x18\x13 \x01(\tH\x00\x1a\x37\n\x05\x43hain\x12.\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x1a<\n\nInterleave\x12.\n\x07\x66ilters\x18\x01 \x03(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x1a\xad\x01\n\tCondition\x12\x37\n\x10predicate_filter\x18\x01 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x32\n\x0btrue_filter\x18\x02 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x33\n\x0c\x66\x61lse_filter\x18\x03 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilterB\x08\n\x06\x66ilter\"\xc9\x04\n\x08Mutation\x12\x38\n\x08set_cell\x18\x01 \x01(\x0b\x32$.google.bigtable.v2.Mutation.SetCellH\x00\x12K\n\x12\x64\x65lete_from_column\x18\x02 \x01(\x0b\x32-.google.bigtable.v2.Mutation.DeleteFromColumnH\x00\x12K\n\x12\x64\x65lete_from_family\x18\x03 \x01(\x0b\x32-.google.bigtable.v2.Mutation.DeleteFromFamilyH\x00\x12\x45\n\x0f\x64\x65lete_from_row\x18\x04 \x01(\x0b\x32*.google.bigtable.v2.Mutation.DeleteFromRowH\x00\x1a\x61\n\x07SetCell\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x18\n\x10timestamp_micros\x18\x03 \x01(\x03\x12\r\n\x05value\x18\x04 \x01(\x0c\x1ay\n\x10\x44\x65leteFromColumn\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x36\n\ntime_range\x18\x03 \x01(\x0b\x32\".google.bigtable.v2.TimestampRange\x1a\'\n\x10\x44\x65leteFromFamily\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x1a\x0f\n\rDeleteFromRowB\n\n\x08mutation\"\x80\x01\n\x13ReadModifyWriteRule\x12\x13\n\x0b\x66\x61mily_name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_qualifier\x18\x02 \x01(\x0c\x12\x16\n\x0c\x61ppend_value\x18\x03 \x01(\x0cH\x00\x12\x1a\n\x10increment_amount\x18\x04 \x01(\x03H\x00\x42\x06\n\x04ruleB%\n\x16\x63om.google.bigtable.v2B\tDataProtoP\x01\x62\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_ROW = _descriptor.Descriptor( + name='Row', + full_name='google.bigtable.v2.Row', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.v2.Row.key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='families', full_name='google.bigtable.v2.Row.families', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=53, + serialized_end=117, +) + + +_FAMILY = _descriptor.Descriptor( + name='Family', + full_name='google.bigtable.v2.Family', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.v2.Family.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='columns', full_name='google.bigtable.v2.Family.columns', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=119, + serialized_end=186, +) + + +_COLUMN = _descriptor.Descriptor( + name='Column', + full_name='google.bigtable.v2.Column', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='qualifier', full_name='google.bigtable.v2.Column.qualifier', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells', full_name='google.bigtable.v2.Column.cells', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=188, + serialized_end=256, +) + + +_CELL = _descriptor.Descriptor( + name='Cell', + full_name='google.bigtable.v2.Cell', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timestamp_micros', full_name='google.bigtable.v2.Cell.timestamp_micros', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.v2.Cell.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.bigtable.v2.Cell.labels', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=258, + serialized_end=321, +) + + +_ROWRANGE = _descriptor.Descriptor( + name='RowRange', + full_name='google.bigtable.v2.RowRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_key_closed', full_name='google.bigtable.v2.RowRange.start_key_closed', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_key_open', full_name='google.bigtable.v2.RowRange.start_key_open', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_key_open', full_name='google.bigtable.v2.RowRange.end_key_open', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_key_closed', full_name='google.bigtable.v2.RowRange.end_key_closed', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='start_key', full_name='google.bigtable.v2.RowRange.start_key', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='end_key', full_name='google.bigtable.v2.RowRange.end_key', + index=1, containing_type=None, fields=[]), + ], + serialized_start=324, + serialized_end=462, +) + + +_ROWSET = _descriptor.Descriptor( + name='RowSet', + full_name='google.bigtable.v2.RowSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='row_keys', full_name='google.bigtable.v2.RowSet.row_keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_ranges', full_name='google.bigtable.v2.RowSet.row_ranges', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=464, + serialized_end=540, +) + + +_COLUMNRANGE = _descriptor.Descriptor( + name='ColumnRange', + full_name='google.bigtable.v2.ColumnRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.ColumnRange.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_qualifier_closed', full_name='google.bigtable.v2.ColumnRange.start_qualifier_closed', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_qualifier_open', full_name='google.bigtable.v2.ColumnRange.start_qualifier_open', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_qualifier_closed', full_name='google.bigtable.v2.ColumnRange.end_qualifier_closed', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_qualifier_open', full_name='google.bigtable.v2.ColumnRange.end_qualifier_open', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='start_qualifier', full_name='google.bigtable.v2.ColumnRange.start_qualifier', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='end_qualifier', full_name='google.bigtable.v2.ColumnRange.end_qualifier', + index=1, containing_type=None, fields=[]), + ], + serialized_start=543, + serialized_end=741, +) + + +_TIMESTAMPRANGE = _descriptor.Descriptor( + name='TimestampRange', + full_name='google.bigtable.v2.TimestampRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_timestamp_micros', full_name='google.bigtable.v2.TimestampRange.start_timestamp_micros', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_timestamp_micros', full_name='google.bigtable.v2.TimestampRange.end_timestamp_micros', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=743, + serialized_end=821, +) + + +_VALUERANGE = _descriptor.Descriptor( + name='ValueRange', + full_name='google.bigtable.v2.ValueRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start_value_closed', full_name='google.bigtable.v2.ValueRange.start_value_closed', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_value_open', full_name='google.bigtable.v2.ValueRange.start_value_open', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_value_closed', full_name='google.bigtable.v2.ValueRange.end_value_closed', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_value_open', full_name='google.bigtable.v2.ValueRange.end_value_open', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='start_value', full_name='google.bigtable.v2.ValueRange.start_value', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='end_value', full_name='google.bigtable.v2.ValueRange.end_value', + index=1, containing_type=None, fields=[]), + ], + serialized_start=824, + serialized_end=976, +) + + +_ROWFILTER_CHAIN = _descriptor.Descriptor( + name='Chain', + full_name='google.bigtable.v2.RowFilter.Chain', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filters', full_name='google.bigtable.v2.RowFilter.Chain.filters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1795, + serialized_end=1850, +) + +_ROWFILTER_INTERLEAVE = _descriptor.Descriptor( + name='Interleave', + full_name='google.bigtable.v2.RowFilter.Interleave', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filters', full_name='google.bigtable.v2.RowFilter.Interleave.filters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1852, + serialized_end=1912, +) + +_ROWFILTER_CONDITION = _descriptor.Descriptor( + name='Condition', + full_name='google.bigtable.v2.RowFilter.Condition', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='predicate_filter', full_name='google.bigtable.v2.RowFilter.Condition.predicate_filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='true_filter', full_name='google.bigtable.v2.RowFilter.Condition.true_filter', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='false_filter', full_name='google.bigtable.v2.RowFilter.Condition.false_filter', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1915, + serialized_end=2088, +) + +_ROWFILTER = _descriptor.Descriptor( + name='RowFilter', + full_name='google.bigtable.v2.RowFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='chain', full_name='google.bigtable.v2.RowFilter.chain', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='interleave', full_name='google.bigtable.v2.RowFilter.interleave', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='condition', full_name='google.bigtable.v2.RowFilter.condition', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sink', full_name='google.bigtable.v2.RowFilter.sink', index=3, + number=16, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pass_all_filter', full_name='google.bigtable.v2.RowFilter.pass_all_filter', index=4, + number=17, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='block_all_filter', full_name='google.bigtable.v2.RowFilter.block_all_filter', index=5, + number=18, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_key_regex_filter', full_name='google.bigtable.v2.RowFilter.row_key_regex_filter', index=6, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='row_sample_filter', full_name='google.bigtable.v2.RowFilter.row_sample_filter', index=7, + number=14, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='family_name_regex_filter', full_name='google.bigtable.v2.RowFilter.family_name_regex_filter', index=8, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier_regex_filter', full_name='google.bigtable.v2.RowFilter.column_qualifier_regex_filter', index=9, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_range_filter', full_name='google.bigtable.v2.RowFilter.column_range_filter', index=10, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_range_filter', full_name='google.bigtable.v2.RowFilter.timestamp_range_filter', index=11, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_regex_filter', full_name='google.bigtable.v2.RowFilter.value_regex_filter', index=12, + number=9, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value_range_filter', full_name='google.bigtable.v2.RowFilter.value_range_filter', index=13, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_row_offset_filter', full_name='google.bigtable.v2.RowFilter.cells_per_row_offset_filter', index=14, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_row_limit_filter', full_name='google.bigtable.v2.RowFilter.cells_per_row_limit_filter', index=15, + number=11, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cells_per_column_limit_filter', full_name='google.bigtable.v2.RowFilter.cells_per_column_limit_filter', index=16, + number=12, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='strip_value_transformer', full_name='google.bigtable.v2.RowFilter.strip_value_transformer', index=17, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='apply_label_transformer', full_name='google.bigtable.v2.RowFilter.apply_label_transformer', index=18, + number=19, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='filter', full_name='google.bigtable.v2.RowFilter.filter', + index=0, containing_type=None, fields=[]), + ], + serialized_start=979, + serialized_end=2098, +) + + +_MUTATION_SETCELL = _descriptor.Descriptor( + name='SetCell', + full_name='google.bigtable.v2.Mutation.SetCell', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.Mutation.SetCell.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v2.Mutation.SetCell.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_micros', full_name='google.bigtable.v2.Mutation.SetCell.timestamp_micros', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.v2.Mutation.SetCell.value', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2396, + serialized_end=2493, +) + +_MUTATION_DELETEFROMCOLUMN = _descriptor.Descriptor( + name='DeleteFromColumn', + full_name='google.bigtable.v2.Mutation.DeleteFromColumn', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.Mutation.DeleteFromColumn.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v2.Mutation.DeleteFromColumn.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time_range', full_name='google.bigtable.v2.Mutation.DeleteFromColumn.time_range', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2495, + serialized_end=2616, +) + +_MUTATION_DELETEFROMFAMILY = _descriptor.Descriptor( + name='DeleteFromFamily', + full_name='google.bigtable.v2.Mutation.DeleteFromFamily', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.Mutation.DeleteFromFamily.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2618, + serialized_end=2657, +) + +_MUTATION_DELETEFROMROW = _descriptor.Descriptor( + name='DeleteFromRow', + full_name='google.bigtable.v2.Mutation.DeleteFromRow', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2659, + serialized_end=2674, +) + +_MUTATION = _descriptor.Descriptor( + name='Mutation', + full_name='google.bigtable.v2.Mutation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='set_cell', full_name='google.bigtable.v2.Mutation.set_cell', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_column', full_name='google.bigtable.v2.Mutation.delete_from_column', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_family', full_name='google.bigtable.v2.Mutation.delete_from_family', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete_from_row', full_name='google.bigtable.v2.Mutation.delete_from_row', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MUTATION_SETCELL, _MUTATION_DELETEFROMCOLUMN, _MUTATION_DELETEFROMFAMILY, _MUTATION_DELETEFROMROW, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='mutation', full_name='google.bigtable.v2.Mutation.mutation', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2101, + serialized_end=2686, +) + + +_READMODIFYWRITERULE = _descriptor.Descriptor( + name='ReadModifyWriteRule', + full_name='google.bigtable.v2.ReadModifyWriteRule', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='family_name', full_name='google.bigtable.v2.ReadModifyWriteRule.family_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_qualifier', full_name='google.bigtable.v2.ReadModifyWriteRule.column_qualifier', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='append_value', full_name='google.bigtable.v2.ReadModifyWriteRule.append_value', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='increment_amount', full_name='google.bigtable.v2.ReadModifyWriteRule.increment_amount', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='rule', full_name='google.bigtable.v2.ReadModifyWriteRule.rule', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2689, + serialized_end=2817, +) + +_ROW.fields_by_name['families'].message_type = _FAMILY +_FAMILY.fields_by_name['columns'].message_type = _COLUMN +_COLUMN.fields_by_name['cells'].message_type = _CELL +_ROWRANGE.oneofs_by_name['start_key'].fields.append( + _ROWRANGE.fields_by_name['start_key_closed']) +_ROWRANGE.fields_by_name['start_key_closed'].containing_oneof = _ROWRANGE.oneofs_by_name['start_key'] +_ROWRANGE.oneofs_by_name['start_key'].fields.append( + _ROWRANGE.fields_by_name['start_key_open']) +_ROWRANGE.fields_by_name['start_key_open'].containing_oneof = _ROWRANGE.oneofs_by_name['start_key'] +_ROWRANGE.oneofs_by_name['end_key'].fields.append( + _ROWRANGE.fields_by_name['end_key_open']) +_ROWRANGE.fields_by_name['end_key_open'].containing_oneof = _ROWRANGE.oneofs_by_name['end_key'] +_ROWRANGE.oneofs_by_name['end_key'].fields.append( + _ROWRANGE.fields_by_name['end_key_closed']) +_ROWRANGE.fields_by_name['end_key_closed'].containing_oneof = _ROWRANGE.oneofs_by_name['end_key'] +_ROWSET.fields_by_name['row_ranges'].message_type = _ROWRANGE +_COLUMNRANGE.oneofs_by_name['start_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['start_qualifier_closed']) +_COLUMNRANGE.fields_by_name['start_qualifier_closed'].containing_oneof = _COLUMNRANGE.oneofs_by_name['start_qualifier'] +_COLUMNRANGE.oneofs_by_name['start_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['start_qualifier_open']) +_COLUMNRANGE.fields_by_name['start_qualifier_open'].containing_oneof = _COLUMNRANGE.oneofs_by_name['start_qualifier'] +_COLUMNRANGE.oneofs_by_name['end_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['end_qualifier_closed']) +_COLUMNRANGE.fields_by_name['end_qualifier_closed'].containing_oneof = _COLUMNRANGE.oneofs_by_name['end_qualifier'] +_COLUMNRANGE.oneofs_by_name['end_qualifier'].fields.append( + _COLUMNRANGE.fields_by_name['end_qualifier_open']) +_COLUMNRANGE.fields_by_name['end_qualifier_open'].containing_oneof = _COLUMNRANGE.oneofs_by_name['end_qualifier'] +_VALUERANGE.oneofs_by_name['start_value'].fields.append( + _VALUERANGE.fields_by_name['start_value_closed']) +_VALUERANGE.fields_by_name['start_value_closed'].containing_oneof = _VALUERANGE.oneofs_by_name['start_value'] +_VALUERANGE.oneofs_by_name['start_value'].fields.append( + _VALUERANGE.fields_by_name['start_value_open']) +_VALUERANGE.fields_by_name['start_value_open'].containing_oneof = _VALUERANGE.oneofs_by_name['start_value'] +_VALUERANGE.oneofs_by_name['end_value'].fields.append( + _VALUERANGE.fields_by_name['end_value_closed']) +_VALUERANGE.fields_by_name['end_value_closed'].containing_oneof = _VALUERANGE.oneofs_by_name['end_value'] +_VALUERANGE.oneofs_by_name['end_value'].fields.append( + _VALUERANGE.fields_by_name['end_value_open']) +_VALUERANGE.fields_by_name['end_value_open'].containing_oneof = _VALUERANGE.oneofs_by_name['end_value'] +_ROWFILTER_CHAIN.fields_by_name['filters'].message_type = _ROWFILTER +_ROWFILTER_CHAIN.containing_type = _ROWFILTER +_ROWFILTER_INTERLEAVE.fields_by_name['filters'].message_type = _ROWFILTER +_ROWFILTER_INTERLEAVE.containing_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['predicate_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['true_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.fields_by_name['false_filter'].message_type = _ROWFILTER +_ROWFILTER_CONDITION.containing_type = _ROWFILTER +_ROWFILTER.fields_by_name['chain'].message_type = _ROWFILTER_CHAIN +_ROWFILTER.fields_by_name['interleave'].message_type = _ROWFILTER_INTERLEAVE +_ROWFILTER.fields_by_name['condition'].message_type = _ROWFILTER_CONDITION +_ROWFILTER.fields_by_name['column_range_filter'].message_type = _COLUMNRANGE +_ROWFILTER.fields_by_name['timestamp_range_filter'].message_type = _TIMESTAMPRANGE +_ROWFILTER.fields_by_name['value_range_filter'].message_type = _VALUERANGE +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['chain']) +_ROWFILTER.fields_by_name['chain'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['interleave']) +_ROWFILTER.fields_by_name['interleave'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['condition']) +_ROWFILTER.fields_by_name['condition'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['sink']) +_ROWFILTER.fields_by_name['sink'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['pass_all_filter']) +_ROWFILTER.fields_by_name['pass_all_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['block_all_filter']) +_ROWFILTER.fields_by_name['block_all_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['row_key_regex_filter']) +_ROWFILTER.fields_by_name['row_key_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['row_sample_filter']) +_ROWFILTER.fields_by_name['row_sample_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['family_name_regex_filter']) +_ROWFILTER.fields_by_name['family_name_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['column_qualifier_regex_filter']) +_ROWFILTER.fields_by_name['column_qualifier_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['column_range_filter']) +_ROWFILTER.fields_by_name['column_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['timestamp_range_filter']) +_ROWFILTER.fields_by_name['timestamp_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['value_regex_filter']) +_ROWFILTER.fields_by_name['value_regex_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['value_range_filter']) +_ROWFILTER.fields_by_name['value_range_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_row_offset_filter']) +_ROWFILTER.fields_by_name['cells_per_row_offset_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_row_limit_filter']) +_ROWFILTER.fields_by_name['cells_per_row_limit_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['cells_per_column_limit_filter']) +_ROWFILTER.fields_by_name['cells_per_column_limit_filter'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['strip_value_transformer']) +_ROWFILTER.fields_by_name['strip_value_transformer'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_ROWFILTER.oneofs_by_name['filter'].fields.append( + _ROWFILTER.fields_by_name['apply_label_transformer']) +_ROWFILTER.fields_by_name['apply_label_transformer'].containing_oneof = _ROWFILTER.oneofs_by_name['filter'] +_MUTATION_SETCELL.containing_type = _MUTATION +_MUTATION_DELETEFROMCOLUMN.fields_by_name['time_range'].message_type = _TIMESTAMPRANGE +_MUTATION_DELETEFROMCOLUMN.containing_type = _MUTATION +_MUTATION_DELETEFROMFAMILY.containing_type = _MUTATION +_MUTATION_DELETEFROMROW.containing_type = _MUTATION +_MUTATION.fields_by_name['set_cell'].message_type = _MUTATION_SETCELL +_MUTATION.fields_by_name['delete_from_column'].message_type = _MUTATION_DELETEFROMCOLUMN +_MUTATION.fields_by_name['delete_from_family'].message_type = _MUTATION_DELETEFROMFAMILY +_MUTATION.fields_by_name['delete_from_row'].message_type = _MUTATION_DELETEFROMROW +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['set_cell']) +_MUTATION.fields_by_name['set_cell'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_column']) +_MUTATION.fields_by_name['delete_from_column'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_family']) +_MUTATION.fields_by_name['delete_from_family'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_MUTATION.oneofs_by_name['mutation'].fields.append( + _MUTATION.fields_by_name['delete_from_row']) +_MUTATION.fields_by_name['delete_from_row'].containing_oneof = _MUTATION.oneofs_by_name['mutation'] +_READMODIFYWRITERULE.oneofs_by_name['rule'].fields.append( + _READMODIFYWRITERULE.fields_by_name['append_value']) +_READMODIFYWRITERULE.fields_by_name['append_value'].containing_oneof = _READMODIFYWRITERULE.oneofs_by_name['rule'] +_READMODIFYWRITERULE.oneofs_by_name['rule'].fields.append( + _READMODIFYWRITERULE.fields_by_name['increment_amount']) +_READMODIFYWRITERULE.fields_by_name['increment_amount'].containing_oneof = _READMODIFYWRITERULE.oneofs_by_name['rule'] +DESCRIPTOR.message_types_by_name['Row'] = _ROW +DESCRIPTOR.message_types_by_name['Family'] = _FAMILY +DESCRIPTOR.message_types_by_name['Column'] = _COLUMN +DESCRIPTOR.message_types_by_name['Cell'] = _CELL +DESCRIPTOR.message_types_by_name['RowRange'] = _ROWRANGE +DESCRIPTOR.message_types_by_name['RowSet'] = _ROWSET +DESCRIPTOR.message_types_by_name['ColumnRange'] = _COLUMNRANGE +DESCRIPTOR.message_types_by_name['TimestampRange'] = _TIMESTAMPRANGE +DESCRIPTOR.message_types_by_name['ValueRange'] = _VALUERANGE +DESCRIPTOR.message_types_by_name['RowFilter'] = _ROWFILTER +DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION +DESCRIPTOR.message_types_by_name['ReadModifyWriteRule'] = _READMODIFYWRITERULE + +Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict( + DESCRIPTOR = _ROW, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Row) + )) +_sym_db.RegisterMessage(Row) + +Family = _reflection.GeneratedProtocolMessageType('Family', (_message.Message,), dict( + DESCRIPTOR = _FAMILY, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Family) + )) +_sym_db.RegisterMessage(Family) + +Column = _reflection.GeneratedProtocolMessageType('Column', (_message.Message,), dict( + DESCRIPTOR = _COLUMN, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Column) + )) +_sym_db.RegisterMessage(Column) + +Cell = _reflection.GeneratedProtocolMessageType('Cell', (_message.Message,), dict( + DESCRIPTOR = _CELL, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Cell) + )) +_sym_db.RegisterMessage(Cell) + +RowRange = _reflection.GeneratedProtocolMessageType('RowRange', (_message.Message,), dict( + DESCRIPTOR = _ROWRANGE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowRange) + )) +_sym_db.RegisterMessage(RowRange) + +RowSet = _reflection.GeneratedProtocolMessageType('RowSet', (_message.Message,), dict( + DESCRIPTOR = _ROWSET, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowSet) + )) +_sym_db.RegisterMessage(RowSet) + +ColumnRange = _reflection.GeneratedProtocolMessageType('ColumnRange', (_message.Message,), dict( + DESCRIPTOR = _COLUMNRANGE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ColumnRange) + )) +_sym_db.RegisterMessage(ColumnRange) + +TimestampRange = _reflection.GeneratedProtocolMessageType('TimestampRange', (_message.Message,), dict( + DESCRIPTOR = _TIMESTAMPRANGE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.TimestampRange) + )) +_sym_db.RegisterMessage(TimestampRange) + +ValueRange = _reflection.GeneratedProtocolMessageType('ValueRange', (_message.Message,), dict( + DESCRIPTOR = _VALUERANGE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ValueRange) + )) +_sym_db.RegisterMessage(ValueRange) + +RowFilter = _reflection.GeneratedProtocolMessageType('RowFilter', (_message.Message,), dict( + + Chain = _reflection.GeneratedProtocolMessageType('Chain', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_CHAIN, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowFilter.Chain) + )) + , + + Interleave = _reflection.GeneratedProtocolMessageType('Interleave', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_INTERLEAVE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowFilter.Interleave) + )) + , + + Condition = _reflection.GeneratedProtocolMessageType('Condition', (_message.Message,), dict( + DESCRIPTOR = _ROWFILTER_CONDITION, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowFilter.Condition) + )) + , + DESCRIPTOR = _ROWFILTER, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.RowFilter) + )) +_sym_db.RegisterMessage(RowFilter) +_sym_db.RegisterMessage(RowFilter.Chain) +_sym_db.RegisterMessage(RowFilter.Interleave) +_sym_db.RegisterMessage(RowFilter.Condition) + +Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( + + SetCell = _reflection.GeneratedProtocolMessageType('SetCell', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_SETCELL, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Mutation.SetCell) + )) + , + + DeleteFromColumn = _reflection.GeneratedProtocolMessageType('DeleteFromColumn', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMCOLUMN, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Mutation.DeleteFromColumn) + )) + , + + DeleteFromFamily = _reflection.GeneratedProtocolMessageType('DeleteFromFamily', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMFAMILY, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Mutation.DeleteFromFamily) + )) + , + + DeleteFromRow = _reflection.GeneratedProtocolMessageType('DeleteFromRow', (_message.Message,), dict( + DESCRIPTOR = _MUTATION_DELETEFROMROW, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Mutation.DeleteFromRow) + )) + , + DESCRIPTOR = _MUTATION, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.Mutation) + )) +_sym_db.RegisterMessage(Mutation) +_sym_db.RegisterMessage(Mutation.SetCell) +_sym_db.RegisterMessage(Mutation.DeleteFromColumn) +_sym_db.RegisterMessage(Mutation.DeleteFromFamily) +_sym_db.RegisterMessage(Mutation.DeleteFromRow) + +ReadModifyWriteRule = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRule', (_message.Message,), dict( + DESCRIPTOR = _READMODIFYWRITERULE, + __module__ = 'google.bigtable.v2.data_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadModifyWriteRule) + )) +_sym_db.RegisterMessage(ReadModifyWriteRule) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.bigtable.v2B\tDataProtoP\001')) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/instance_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/instance_pb2.py new file mode 100644 index 0000000..2161bf3 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/instance_pb2.py @@ -0,0 +1,222 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/v2/instance.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.bigtable._generated_v2 import common_pb2 as google_dot_bigtable_dot_admin_dot_v2_dot_common__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/v2/instance.proto', + package='google.bigtable.admin.v2', + syntax='proto3', + serialized_pb=_b('\n\'google/bigtable/admin/v2/instance.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a%google/bigtable/admin/v2/common.proto\"\x9e\x01\n\x08Instance\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x37\n\x05state\x18\x03 \x01(\x0e\x32(.google.bigtable.admin.v2.Instance.State\"5\n\x05State\x12\x13\n\x0fSTATE_NOT_KNOWN\x10\x00\x12\t\n\x05READY\x10\x01\x12\x0c\n\x08\x43REATING\x10\x02\"\x8e\x02\n\x07\x43luster\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08location\x18\x02 \x01(\t\x12\x36\n\x05state\x18\x03 \x01(\x0e\x32\'.google.bigtable.admin.v2.Cluster.State\x12\x13\n\x0bserve_nodes\x18\x04 \x01(\x05\x12\x43\n\x14\x64\x65\x66\x61ult_storage_type\x18\x05 \x01(\x0e\x32%.google.bigtable.admin.v2.StorageType\"Q\n\x05State\x12\x13\n\x0fSTATE_NOT_KNOWN\x10\x00\x12\t\n\x05READY\x10\x01\x12\x0c\n\x08\x43REATING\x10\x02\x12\x0c\n\x08RESIZING\x10\x03\x12\x0c\n\x08\x44ISABLED\x10\x04\x42/\n\x1c\x63om.google.bigtable.admin.v2B\rInstanceProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_admin_dot_v2_dot_common__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_INSTANCE_STATE = _descriptor.EnumDescriptor( + name='State', + full_name='google.bigtable.admin.v2.Instance.State', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STATE_NOT_KNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='READY', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CREATING', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=244, + serialized_end=297, +) +_sym_db.RegisterEnumDescriptor(_INSTANCE_STATE) + +_CLUSTER_STATE = _descriptor.EnumDescriptor( + name='State', + full_name='google.bigtable.admin.v2.Cluster.State', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STATE_NOT_KNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='READY', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CREATING', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RESIZING', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISABLED', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=489, + serialized_end=570, +) +_sym_db.RegisterEnumDescriptor(_CLUSTER_STATE) + + +_INSTANCE = _descriptor.Descriptor( + name='Instance', + full_name='google.bigtable.admin.v2.Instance', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.Instance.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='display_name', full_name='google.bigtable.admin.v2.Instance.display_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='state', full_name='google.bigtable.admin.v2.Instance.state', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _INSTANCE_STATE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=139, + serialized_end=297, +) + + +_CLUSTER = _descriptor.Descriptor( + name='Cluster', + full_name='google.bigtable.admin.v2.Cluster', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.Cluster.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='location', full_name='google.bigtable.admin.v2.Cluster.location', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='state', full_name='google.bigtable.admin.v2.Cluster.state', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='serve_nodes', full_name='google.bigtable.admin.v2.Cluster.serve_nodes', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_storage_type', full_name='google.bigtable.admin.v2.Cluster.default_storage_type', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _CLUSTER_STATE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=300, + serialized_end=570, +) + +_INSTANCE.fields_by_name['state'].enum_type = _INSTANCE_STATE +_INSTANCE_STATE.containing_type = _INSTANCE +_CLUSTER.fields_by_name['state'].enum_type = _CLUSTER_STATE +_CLUSTER.fields_by_name['default_storage_type'].enum_type = google_dot_bigtable_dot_admin_dot_v2_dot_common__pb2._STORAGETYPE +_CLUSTER_STATE.containing_type = _CLUSTER +DESCRIPTOR.message_types_by_name['Instance'] = _INSTANCE +DESCRIPTOR.message_types_by_name['Cluster'] = _CLUSTER + +Instance = _reflection.GeneratedProtocolMessageType('Instance', (_message.Message,), dict( + DESCRIPTOR = _INSTANCE, + __module__ = 'google.bigtable.admin.v2.instance_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Instance) + )) +_sym_db.RegisterMessage(Instance) + +Cluster = _reflection.GeneratedProtocolMessageType('Cluster', (_message.Message,), dict( + DESCRIPTOR = _CLUSTER, + __module__ = 'google.bigtable.admin.v2.instance_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Cluster) + )) +_sym_db.RegisterMessage(Cluster) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\rInstanceProtoP\001')) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/operations_grpc_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/operations_grpc_pb2.py new file mode 100644 index 0000000..5723e1d --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/operations_grpc_pb2.py @@ -0,0 +1,264 @@ +from google.longrunning.operations_pb2 import ( + CancelOperationRequest, + DeleteOperationRequest, + GetOperationRequest, + ListOperationsRequest, + ListOperationsResponse, + Operation, + google_dot_protobuf_dot_empty__pb2, +) +from grpc.beta import implementations as beta_implementations +from grpc.beta import interfaces as beta_interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities + + +class OperationsStub(object): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be designed + to return [Operation][google.longrunning.Operation] to the client, and the client can use this + interface to receive the real response asynchronously by polling the + operation resource, or using `google.watcher.v1.Watcher` interface to watch + the response, or pass the operation resource to another API (such as Google + Cloud Pub/Sub API) to receive the response. Any API service that returns + long-running operations should implement the `Operations` interface so + developers can have a consistent client experience. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetOperation = channel.unary_unary( + '/google.longrunning.Operations/GetOperation', + request_serializer=GetOperationRequest.SerializeToString, + response_deserializer=Operation.FromString, + ) + self.ListOperations = channel.unary_unary( + '/google.longrunning.Operations/ListOperations', + request_serializer=ListOperationsRequest.SerializeToString, + response_deserializer=ListOperationsResponse.FromString, + ) + self.CancelOperation = channel.unary_unary( + '/google.longrunning.Operations/CancelOperation', + request_serializer=CancelOperationRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.DeleteOperation = channel.unary_unary( + '/google.longrunning.Operations/DeleteOperation', + request_serializer=DeleteOperationRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class OperationsServicer(object): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be designed + to return [Operation][google.longrunning.Operation] to the client, and the client can use this + interface to receive the real response asynchronously by polling the + operation resource, or using `google.watcher.v1.Watcher` interface to watch + the response, or pass the operation resource to another API (such as Google + Cloud Pub/Sub API) to receive the response. Any API service that returns + long-running operations should implement the `Operations` interface so + developers can have a consistent client experience. + """ + + def GetOperation(self, request, context): + """Gets the latest state of a long-running operation. Clients may use this + method to poll the operation result at intervals as recommended by the API + service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListOperations(self, request, context): + """Lists operations that match the specified filter in the request. If the + server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelOperation(self, request, context): + """Starts asynchronous cancellation on a long-running operation. The server + makes a best effort to cancel the operation, but success is not + guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Clients may use + [Operations.GetOperation] or other methods to check whether the + cancellation succeeded or the operation completed despite cancellation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteOperation(self, request, context): + """Deletes a long-running operation. It indicates the client is no longer + interested in the operation result. It does not cancel the operation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_OperationsServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetOperation': grpc.unary_unary_rpc_method_handler( + servicer.GetOperation, + request_deserializer=GetOperationRequest.FromString, + response_serializer=Operation.SerializeToString, + ), + 'ListOperations': grpc.unary_unary_rpc_method_handler( + servicer.ListOperations, + request_deserializer=ListOperationsRequest.FromString, + response_serializer=ListOperationsResponse.SerializeToString, + ), + 'CancelOperation': grpc.unary_unary_rpc_method_handler( + servicer.CancelOperation, + request_deserializer=CancelOperationRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'DeleteOperation': grpc.unary_unary_rpc_method_handler( + servicer.DeleteOperation, + request_deserializer=DeleteOperationRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.longrunning.Operations', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class BetaOperationsServicer(object): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be designed + to return [Operation][google.longrunning.Operation] to the client, and the client can use this + interface to receive the real response asynchronously by polling the + operation resource, or using `google.watcher.v1.Watcher` interface to watch + the response, or pass the operation resource to another API (such as Google + Cloud Pub/Sub API) to receive the response. Any API service that returns + long-running operations should implement the `Operations` interface so + developers can have a consistent client experience. + """ + def GetOperation(self, request, context): + """Gets the latest state of a long-running operation. Clients may use this + method to poll the operation result at intervals as recommended by the API + service. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListOperations(self, request, context): + """Lists operations that match the specified filter in the request. If the + server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CancelOperation(self, request, context): + """Starts asynchronous cancellation on a long-running operation. The server + makes a best effort to cancel the operation, but success is not + guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Clients may use + [Operations.GetOperation] or other methods to check whether the + cancellation succeeded or the operation completed despite cancellation. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteOperation(self, request, context): + """Deletes a long-running operation. It indicates the client is no longer + interested in the operation result. It does not cancel the operation. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + +class BetaOperationsStub(object): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be designed + to return [Operation][google.longrunning.Operation] to the client, and the client can use this + interface to receive the real response asynchronously by polling the + operation resource, or using `google.watcher.v1.Watcher` interface to watch + the response, or pass the operation resource to another API (such as Google + Cloud Pub/Sub API) to receive the response. Any API service that returns + long-running operations should implement the `Operations` interface so + developers can have a consistent client experience. + """ + def GetOperation(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the latest state of a long-running operation. Clients may use this + method to poll the operation result at intervals as recommended by the API + service. + """ + raise NotImplementedError() + GetOperation.future = None + def ListOperations(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists operations that match the specified filter in the request. If the + server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + """ + raise NotImplementedError() + ListOperations.future = None + def CancelOperation(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Starts asynchronous cancellation on a long-running operation. The server + makes a best effort to cancel the operation, but success is not + guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Clients may use + [Operations.GetOperation] or other methods to check whether the + cancellation succeeded or the operation completed despite cancellation. + """ + raise NotImplementedError() + CancelOperation.future = None + def DeleteOperation(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes a long-running operation. It indicates the client is no longer + interested in the operation result. It does not cancel the operation. + """ + raise NotImplementedError() + DeleteOperation.future = None + + +def beta_create_Operations_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + request_deserializers = { + ('google.longrunning.Operations', 'CancelOperation'): CancelOperationRequest.FromString, + ('google.longrunning.Operations', 'DeleteOperation'): DeleteOperationRequest.FromString, + ('google.longrunning.Operations', 'GetOperation'): GetOperationRequest.FromString, + ('google.longrunning.Operations', 'ListOperations'): ListOperationsRequest.FromString, + } + response_serializers = { + ('google.longrunning.Operations', 'CancelOperation'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.longrunning.Operations', 'DeleteOperation'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.longrunning.Operations', 'GetOperation'): Operation.SerializeToString, + ('google.longrunning.Operations', 'ListOperations'): ListOperationsResponse.SerializeToString, + } + method_implementations = { + ('google.longrunning.Operations', 'CancelOperation'): face_utilities.unary_unary_inline(servicer.CancelOperation), + ('google.longrunning.Operations', 'DeleteOperation'): face_utilities.unary_unary_inline(servicer.DeleteOperation), + ('google.longrunning.Operations', 'GetOperation'): face_utilities.unary_unary_inline(servicer.GetOperation), + ('google.longrunning.Operations', 'ListOperations'): face_utilities.unary_unary_inline(servicer.ListOperations), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + +def beta_create_Operations_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + request_serializers = { + ('google.longrunning.Operations', 'CancelOperation'): CancelOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'DeleteOperation'): DeleteOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'GetOperation'): GetOperationRequest.SerializeToString, + ('google.longrunning.Operations', 'ListOperations'): ListOperationsRequest.SerializeToString, + } + response_deserializers = { + ('google.longrunning.Operations', 'CancelOperation'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.longrunning.Operations', 'DeleteOperation'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.longrunning.Operations', 'GetOperation'): Operation.FromString, + ('google.longrunning.Operations', 'ListOperations'): ListOperationsResponse.FromString, + } + cardinalities = { + 'CancelOperation': cardinality.Cardinality.UNARY_UNARY, + 'DeleteOperation': cardinality.Cardinality.UNARY_UNARY, + 'GetOperation': cardinality.Cardinality.UNARY_UNARY, + 'ListOperations': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.longrunning.Operations', cardinalities, options=stub_options) diff --git a/env/Lib/site-packages/gcloud/bigtable/_generated_v2/table_pb2.py b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/table_pb2.py new file mode 100644 index 0000000..8400765 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_generated_v2/table_pb2.py @@ -0,0 +1,393 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/bigtable/admin/v2/table.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/bigtable/admin/v2/table.proto', + package='google.bigtable.admin.v2', + syntax='proto3', + serialized_pb=_b('\n$google/bigtable/admin/v2/table.proto\x12\x18google.bigtable.admin.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\"\xa0\x03\n\x05Table\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x0f\x63olumn_families\x18\x03 \x03(\x0b\x32\x33.google.bigtable.admin.v2.Table.ColumnFamiliesEntry\x12I\n\x0bgranularity\x18\x04 \x01(\x0e\x32\x34.google.bigtable.admin.v2.Table.TimestampGranularity\x1a]\n\x13\x43olumnFamiliesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.bigtable.admin.v2.ColumnFamily:\x02\x38\x01\"I\n\x14TimestampGranularity\x12%\n!TIMESTAMP_GRANULARITY_UNSPECIFIED\x10\x00\x12\n\n\x06MILLIS\x10\x01\"F\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\r\n\tNAME_ONLY\x10\x01\x12\x0f\n\x0bSCHEMA_VIEW\x10\x02\x12\x08\n\x04\x46ULL\x10\x04\"A\n\x0c\x43olumnFamily\x12\x31\n\x07gc_rule\x18\x01 \x01(\x0b\x32 .google.bigtable.admin.v2.GcRule\"\xd5\x02\n\x06GcRule\x12\x1a\n\x10max_num_versions\x18\x01 \x01(\x05H\x00\x12,\n\x07max_age\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x45\n\x0cintersection\x18\x03 \x01(\x0b\x32-.google.bigtable.admin.v2.GcRule.IntersectionH\x00\x12\x37\n\x05union\x18\x04 \x01(\x0b\x32&.google.bigtable.admin.v2.GcRule.UnionH\x00\x1a?\n\x0cIntersection\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRule\x1a\x38\n\x05Union\x12/\n\x05rules\x18\x01 \x03(\x0b\x32 .google.bigtable.admin.v2.GcRuleB\x06\n\x04ruleB,\n\x1c\x63om.google.bigtable.admin.v2B\nTableProtoP\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_TABLE_TIMESTAMPGRANULARITY = _descriptor.EnumDescriptor( + name='TimestampGranularity', + full_name='google.bigtable.admin.v2.Table.TimestampGranularity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TIMESTAMP_GRANULARITY_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MILLIS', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=400, + serialized_end=473, +) +_sym_db.RegisterEnumDescriptor(_TABLE_TIMESTAMPGRANULARITY) + +_TABLE_VIEW = _descriptor.EnumDescriptor( + name='View', + full_name='google.bigtable.admin.v2.Table.View', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VIEW_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAME_ONLY', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCHEMA_VIEW', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULL', index=3, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=475, + serialized_end=545, +) +_sym_db.RegisterEnumDescriptor(_TABLE_VIEW) + + +_TABLE_COLUMNFAMILIESENTRY = _descriptor.Descriptor( + name='ColumnFamiliesEntry', + full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.bigtable.admin.v2.Table.ColumnFamiliesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=305, + serialized_end=398, +) + +_TABLE = _descriptor.Descriptor( + name='Table', + full_name='google.bigtable.admin.v2.Table', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.bigtable.admin.v2.Table.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='column_families', full_name='google.bigtable.admin.v2.Table.column_families', index=1, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='granularity', full_name='google.bigtable.admin.v2.Table.granularity', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TABLE_COLUMNFAMILIESENTRY, ], + enum_types=[ + _TABLE_TIMESTAMPGRANULARITY, + _TABLE_VIEW, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=129, + serialized_end=545, +) + + +_COLUMNFAMILY = _descriptor.Descriptor( + name='ColumnFamily', + full_name='google.bigtable.admin.v2.ColumnFamily', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='gc_rule', full_name='google.bigtable.admin.v2.ColumnFamily.gc_rule', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=547, + serialized_end=612, +) + + +_GCRULE_INTERSECTION = _descriptor.Descriptor( + name='Intersection', + full_name='google.bigtable.admin.v2.GcRule.Intersection', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.admin.v2.GcRule.Intersection.rules', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=827, + serialized_end=890, +) + +_GCRULE_UNION = _descriptor.Descriptor( + name='Union', + full_name='google.bigtable.admin.v2.GcRule.Union', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rules', full_name='google.bigtable.admin.v2.GcRule.Union.rules', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=892, + serialized_end=948, +) + +_GCRULE = _descriptor.Descriptor( + name='GcRule', + full_name='google.bigtable.admin.v2.GcRule', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='max_num_versions', full_name='google.bigtable.admin.v2.GcRule.max_num_versions', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_age', full_name='google.bigtable.admin.v2.GcRule.max_age', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='intersection', full_name='google.bigtable.admin.v2.GcRule.intersection', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='union', full_name='google.bigtable.admin.v2.GcRule.union', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='rule', full_name='google.bigtable.admin.v2.GcRule.rule', + index=0, containing_type=None, fields=[]), + ], + serialized_start=615, + serialized_end=956, +) + +_TABLE_COLUMNFAMILIESENTRY.fields_by_name['value'].message_type = _COLUMNFAMILY +_TABLE_COLUMNFAMILIESENTRY.containing_type = _TABLE +_TABLE.fields_by_name['column_families'].message_type = _TABLE_COLUMNFAMILIESENTRY +_TABLE.fields_by_name['granularity'].enum_type = _TABLE_TIMESTAMPGRANULARITY +_TABLE_TIMESTAMPGRANULARITY.containing_type = _TABLE +_TABLE_VIEW.containing_type = _TABLE +_COLUMNFAMILY.fields_by_name['gc_rule'].message_type = _GCRULE +_GCRULE_INTERSECTION.fields_by_name['rules'].message_type = _GCRULE +_GCRULE_INTERSECTION.containing_type = _GCRULE +_GCRULE_UNION.fields_by_name['rules'].message_type = _GCRULE +_GCRULE_UNION.containing_type = _GCRULE +_GCRULE.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_GCRULE.fields_by_name['intersection'].message_type = _GCRULE_INTERSECTION +_GCRULE.fields_by_name['union'].message_type = _GCRULE_UNION +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['max_num_versions']) +_GCRULE.fields_by_name['max_num_versions'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['max_age']) +_GCRULE.fields_by_name['max_age'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['intersection']) +_GCRULE.fields_by_name['intersection'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +_GCRULE.oneofs_by_name['rule'].fields.append( + _GCRULE.fields_by_name['union']) +_GCRULE.fields_by_name['union'].containing_oneof = _GCRULE.oneofs_by_name['rule'] +DESCRIPTOR.message_types_by_name['Table'] = _TABLE +DESCRIPTOR.message_types_by_name['ColumnFamily'] = _COLUMNFAMILY +DESCRIPTOR.message_types_by_name['GcRule'] = _GCRULE + +Table = _reflection.GeneratedProtocolMessageType('Table', (_message.Message,), dict( + + ColumnFamiliesEntry = _reflection.GeneratedProtocolMessageType('ColumnFamiliesEntry', (_message.Message,), dict( + DESCRIPTOR = _TABLE_COLUMNFAMILIESENTRY, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table.ColumnFamiliesEntry) + )) + , + DESCRIPTOR = _TABLE, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.Table) + )) +_sym_db.RegisterMessage(Table) +_sym_db.RegisterMessage(Table.ColumnFamiliesEntry) + +ColumnFamily = _reflection.GeneratedProtocolMessageType('ColumnFamily', (_message.Message,), dict( + DESCRIPTOR = _COLUMNFAMILY, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.ColumnFamily) + )) +_sym_db.RegisterMessage(ColumnFamily) + +GcRule = _reflection.GeneratedProtocolMessageType('GcRule', (_message.Message,), dict( + + Intersection = _reflection.GeneratedProtocolMessageType('Intersection', (_message.Message,), dict( + DESCRIPTOR = _GCRULE_INTERSECTION, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Intersection) + )) + , + + Union = _reflection.GeneratedProtocolMessageType('Union', (_message.Message,), dict( + DESCRIPTOR = _GCRULE_UNION, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule.Union) + )) + , + DESCRIPTOR = _GCRULE, + __module__ = 'google.bigtable.admin.v2.table_pb2' + # @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.GcRule) + )) +_sym_db.RegisterMessage(GcRule) +_sym_db.RegisterMessage(GcRule.Intersection) +_sym_db.RegisterMessage(GcRule.Union) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.bigtable.admin.v2B\nTableProtoP\001')) +_TABLE_COLUMNFAMILIESENTRY.has_options = True +_TABLE_COLUMNFAMILIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/bigtable/_testing.py b/env/Lib/site-packages/gcloud/bigtable/_testing.py new file mode 100644 index 0000000..d5f13c1 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/_testing.py @@ -0,0 +1,57 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Mocks used to emulate gRPC generated objects.""" + + +class _FakeStub(object): + """Acts as a gPRC stub.""" + + def __init__(self, *results): + self.results = results + self.method_calls = [] + self._entered = 0 + self._exited = [] + + def __enter__(self): + self._entered += 1 + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._exited.append((exc_type, exc_val, exc_tb)) + return True + + def __getattr__(self, name): + # We need not worry about attributes set in constructor + # since __getattribute__ will handle them. + return _MethodMock(name, self) + + +class _MethodMock(object): + """Mock for API method attached to a gRPC stub. + + In the beta implementation, these are of type. + :class:`grpc.framework.crust.implementations._UnaryUnaryMultiCallable` + """ + + def __init__(self, name, factory): + self._name = name + self._factory = factory + + def __call__(self, *args, **kwargs): + """Sync method meant to mock a gRPC stub request.""" + self._factory.method_calls.append((self._name, args, kwargs)) + curr_result, self._factory.results = (self._factory.results[0], + self._factory.results[1:]) + return curr_result diff --git a/env/Lib/site-packages/gcloud/bigtable/client.py b/env/Lib/site-packages/gcloud/bigtable/client.py new file mode 100644 index 0000000..cf25d05 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/client.py @@ -0,0 +1,480 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Parent client for calling the Google Cloud Bigtable API. + +This is the base from which all interactions with the API occur. + +In the hierarchy of API concepts + +* a :class:`Client` owns a :class:`.Instance` +* a :class:`.Instance` owns a :class:`Table ` +* a :class:`Table ` owns a + :class:`ColumnFamily <.column_family.ColumnFamily>` +* a :class:`Table ` owns a :class:`Row <.row.Row>` + (and all the cells in the row) +""" + + +from pkg_resources import get_distribution + +from grpc.beta import implementations + +from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as instance_admin_v2_pb2) +# V1 table admin service +from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) +# V1 data service +from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as data_v2_pb2) + +from gcloud.bigtable._generated_v2 import ( + operations_grpc_pb2 as operations_grpc_v2_pb2) + +from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES +from gcloud.bigtable.instance import Instance +from gcloud.bigtable.instance import _EXISTING_INSTANCE_LOCATION_ID +from gcloud.client import _ClientFactoryMixin +from gcloud.client import _ClientProjectMixin +from gcloud.credentials import get_credentials + + +TABLE_STUB_FACTORY_V2 = ( + table_admin_v2_pb2.beta_create_BigtableTableAdmin_stub) +TABLE_ADMIN_HOST_V2 = 'bigtableadmin.googleapis.com' +"""Table Admin API request host.""" +TABLE_ADMIN_PORT_V2 = 443 +"""Table Admin API request port.""" + +INSTANCE_STUB_FACTORY_V2 = ( + instance_admin_v2_pb2.beta_create_BigtableInstanceAdmin_stub) +INSTANCE_ADMIN_HOST_V2 = 'bigtableadmin.googleapis.com' +"""Cluster Admin API request host.""" +INSTANCE_ADMIN_PORT_V2 = 443 +"""Cluster Admin API request port.""" + +DATA_STUB_FACTORY_V2 = data_v2_pb2.beta_create_Bigtable_stub +DATA_API_HOST_V2 = 'bigtable.googleapis.com' +"""Data API request host.""" +DATA_API_PORT_V2 = 443 +"""Data API request port.""" + +OPERATIONS_STUB_FACTORY_V2 = operations_grpc_v2_pb2.beta_create_Operations_stub +OPERATIONS_API_HOST_V2 = INSTANCE_ADMIN_HOST_V2 +OPERATIONS_API_PORT_V2 = INSTANCE_ADMIN_PORT_V2 + +ADMIN_SCOPE = 'https://www.googleapis.com/auth/bigtable.admin' +"""Scope for interacting with the Cluster Admin and Table Admin APIs.""" +DATA_SCOPE = 'https://www.googleapis.com/auth/bigtable.data' +"""Scope for reading and writing table data.""" +READ_ONLY_SCOPE = 'https://www.googleapis.com/auth/bigtable.data.readonly' +"""Scope for reading table data.""" + +DEFAULT_TIMEOUT_SECONDS = 10 +"""The default timeout to use for API requests.""" + +DEFAULT_USER_AGENT = 'gcloud-python/{0}'.format( + get_distribution('gcloud').version) +"""The default user agent for API requests.""" + + +class Client(_ClientFactoryMixin, _ClientProjectMixin): + """Client for interacting with Google Cloud Bigtable API. + + .. note:: + + Since the Cloud Bigtable API requires the gRPC transport, no + ``http`` argument is accepted by this class. + + :type project: :class:`str` or :func:`unicode ` + :param project: (Optional) The ID of the project which owns the + instances, tables and data. If not provided, will + attempt to determine from the environment. + + :type credentials: + :class:`OAuth2Credentials ` or + :data:`NoneType ` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not provided, defaults to the Google + Application Default Credentials. + + :type read_only: bool + :param read_only: (Optional) Boolean indicating if the data scope should be + for reading only (or for writing as well). Defaults to + :data:`False`. + + :type admin: bool + :param admin: (Optional) Boolean indicating if the client will be used to + interact with the Instance Admin or Table Admin APIs. This + requires the :const:`ADMIN_SCOPE`. Defaults to :data:`False`. + + :type user_agent: str + :param user_agent: (Optional) The user agent to be used with API request. + Defaults to :const:`DEFAULT_USER_AGENT`. + + :type timeout_seconds: int + :param timeout_seconds: Number of seconds for request time-out. If not + passed, defaults to + :const:`DEFAULT_TIMEOUT_SECONDS`. + + :raises: :class:`ValueError ` if both ``read_only`` + and ``admin`` are :data:`True` + """ + + def __init__(self, project=None, credentials=None, + read_only=False, admin=False, user_agent=DEFAULT_USER_AGENT, + timeout_seconds=DEFAULT_TIMEOUT_SECONDS): + _ClientProjectMixin.__init__(self, project=project) + if credentials is None: + credentials = get_credentials() + + if read_only and admin: + raise ValueError('A read-only client cannot also perform' + 'administrative actions.') + + scopes = [] + if read_only: + scopes.append(READ_ONLY_SCOPE) + else: + scopes.append(DATA_SCOPE) + + if admin: + scopes.append(ADMIN_SCOPE) + + self._admin = bool(admin) + try: + credentials = credentials.create_scoped(scopes) + except AttributeError: + pass + self._credentials = credentials + self.user_agent = user_agent + self.timeout_seconds = timeout_seconds + + # These will be set in start(). + self._data_stub_internal = None + self._instance_stub_internal = None + self._operations_stub_internal = None + self._table_stub_internal = None + + def copy(self): + """Make a copy of this client. + + Copies the local data stored as simple types but does not copy the + current state of any open connections with the Cloud Bigtable API. + + :rtype: :class:`.Client` + :returns: A copy of the current client. + """ + credentials = self._credentials + copied_creds = credentials.create_scoped(credentials.scopes) + return self.__class__( + self.project, + copied_creds, + READ_ONLY_SCOPE in copied_creds.scopes, + self._admin, + self.user_agent, + self.timeout_seconds, + ) + + @property + def credentials(self): + """Getter for client's credentials. + + :rtype: + :class:`OAuth2Credentials ` + :returns: The credentials stored on the client. + """ + return self._credentials + + @property + def project_name(self): + """Project name to be used with Instance Admin API. + + .. note:: + + This property will not change if ``project`` does not, but the + return value is not cached. + + The project name is of the form + + ``"projects/{project}"`` + + :rtype: str + :returns: The project name to be used with the Cloud Bigtable Admin + API RPC service. + """ + return 'projects/' + self.project + + @property + def _data_stub(self): + """Getter for the gRPC stub used for the Data API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + :raises: :class:`ValueError ` if the current + client has not been :meth:`start`-ed. + """ + if self._data_stub_internal is None: + raise ValueError('Client has not been started.') + return self._data_stub_internal + + @property + def _instance_stub(self): + """Getter for the gRPC stub used for the Instance Admin API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + :raises: :class:`ValueError ` if the current + client is not an admin client or if it has not been + :meth:`start`-ed. + """ + if not self._admin: + raise ValueError('Client is not an admin client.') + if self._instance_stub_internal is None: + raise ValueError('Client has not been started.') + return self._instance_stub_internal + + @property + def _operations_stub(self): + """Getter for the gRPC stub used for the Operations API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + :raises: :class:`ValueError ` if the current + client is not an admin client or if it has not been + :meth:`start`-ed. + """ + if not self._admin: + raise ValueError('Client is not an admin client.') + if self._operations_stub_internal is None: + raise ValueError('Client has not been started.') + return self._operations_stub_internal + + @property + def _table_stub(self): + """Getter for the gRPC stub used for the Table Admin API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + :raises: :class:`ValueError ` if the current + client is not an admin client or if it has not been + :meth:`start`-ed. + """ + if not self._admin: + raise ValueError('Client is not an admin client.') + if self._table_stub_internal is None: + raise ValueError('Client has not been started.') + return self._table_stub_internal + + def _make_data_stub(self): + """Creates gRPC stub to make requests to the Data API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + """ + return _make_stub(self, DATA_STUB_FACTORY_V2, + DATA_API_HOST_V2, DATA_API_PORT_V2) + + def _make_instance_stub(self): + """Creates gRPC stub to make requests to the Instance Admin API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + """ + return _make_stub(self, INSTANCE_STUB_FACTORY_V2, + INSTANCE_ADMIN_HOST_V2, INSTANCE_ADMIN_PORT_V2) + + def _make_operations_stub(self): + """Creates gRPC stub to make requests to the Operations API. + + These are for long-running operations of the Instance Admin API, + hence the host and port matching. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + """ + return _make_stub(self, OPERATIONS_STUB_FACTORY_V2, + OPERATIONS_API_HOST_V2, OPERATIONS_API_PORT_V2) + + def _make_table_stub(self): + """Creates gRPC stub to make requests to the Table Admin API. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: A gRPC stub object. + """ + return _make_stub(self, TABLE_STUB_FACTORY_V2, + TABLE_ADMIN_HOST_V2, TABLE_ADMIN_PORT_V2) + + def is_started(self): + """Check if the client has been started. + + :rtype: bool + :returns: Boolean indicating if the client has been started. + """ + return self._data_stub_internal is not None + + def start(self): + """Prepare the client to make requests. + + Activates gRPC contexts for making requests to the Bigtable + Service(s). + """ + if self.is_started(): + return + + # NOTE: We __enter__ the stubs more-or-less permanently. This is + # because only after entering the context managers is the + # connection created. We don't want to immediately close + # those connections since the client will make many + # requests with it over HTTP/2. + self._data_stub_internal = self._make_data_stub() + self._data_stub_internal.__enter__() + if self._admin: + self._instance_stub_internal = self._make_instance_stub() + self._operations_stub_internal = self._make_operations_stub() + self._table_stub_internal = self._make_table_stub() + + self._instance_stub_internal.__enter__() + self._operations_stub_internal.__enter__() + self._table_stub_internal.__enter__() + + def __enter__(self): + """Starts the client as a context manager.""" + self.start() + return self + + def stop(self): + """Closes all the open gRPC clients.""" + if not self.is_started(): + return + + # When exit-ing, we pass None as the exception type, value and + # traceback to __exit__. + self._data_stub_internal.__exit__(None, None, None) + if self._admin: + self._instance_stub_internal.__exit__(None, None, None) + self._operations_stub_internal.__exit__(None, None, None) + self._table_stub_internal.__exit__(None, None, None) + + self._data_stub_internal = None + self._instance_stub_internal = None + self._operations_stub_internal = None + self._table_stub_internal = None + + def __exit__(self, exc_type, exc_val, exc_t): + """Stops the client as a context manager.""" + self.stop() + + def instance(self, instance_id, location=_EXISTING_INSTANCE_LOCATION_ID, + display_name=None, serve_nodes=DEFAULT_SERVE_NODES): + """Factory to create a instance associated with this client. + + :type instance_id: str + :param instance_id: The ID of the instance. + + :type location: string + :param location: location name, in form + ``projects//locations/``; used to + set up the instance's cluster. + + :type display_name: str + :param display_name: (Optional) The display name for the instance in + the Cloud Console UI. (Must be between 4 and 30 + characters.) If this value is not set in the + constructor, will fall back to the instance ID. + + :type serve_nodes: int + :param serve_nodes: (Optional) The number of nodes in the instance's + cluster; used to set up the instance's cluster. + + :rtype: :class:`.Instance` + :returns: an instance owned by this client. + """ + return Instance(instance_id, self, location, + display_name=display_name, serve_nodes=serve_nodes) + + def list_instances(self): + """List instances owned by the project. + + :rtype: tuple + :returns: A pair of results, the first is a list of + :class:`.Instance` objects returned and the second is a + list of strings (the failed locations in the request). + """ + request_pb = instance_admin_v2_pb2.ListInstancesRequest( + parent=self.project_name) + + response = self._instance_stub.ListInstances( + request_pb, self.timeout_seconds) + + instances = [Instance.from_pb(instance_pb, self) + for instance_pb in response.instances] + return instances, response.failed_locations + + +class _MetadataPlugin(object): + """Callable class to transform metadata for gRPC requests. + + :type client: :class:`.client.Client` + :param client: The client that owns the instance. + Provides authorization and user agent. + """ + + def __init__(self, client): + self._credentials = client.credentials + self._user_agent = client.user_agent + + def __call__(self, unused_context, callback): + """Adds authorization header to request metadata.""" + access_token = self._credentials.get_access_token().access_token + headers = [ + ('Authorization', 'Bearer ' + access_token), + ('User-agent', self._user_agent), + ] + callback(headers, None) + + +def _make_stub(client, stub_factory, host, port): + """Makes a stub for an RPC service. + + Uses / depends on the beta implementation of gRPC. + + :type client: :class:`.client.Client` + :param client: The client that owns the instance. + Provides authorization and user agent. + + :type stub_factory: callable + :param stub_factory: A factory which will create a gRPC stub for + a given service. + + :type host: str + :param host: The host for the service. + + :type port: int + :param port: The port for the service. + + :rtype: :class:`grpc.beta._stub._AutoIntermediary` + :returns: The stub object used to make gRPC requests to a given API. + """ + # Leaving the first argument to ssl_channel_credentials() as None + # loads root certificates from `grpc/_adapter/credentials/roots.pem`. + transport_creds = implementations.ssl_channel_credentials(None, None, None) + custom_metadata_plugin = _MetadataPlugin(client) + auth_creds = implementations.metadata_call_credentials( + custom_metadata_plugin, name='google_creds') + channel_creds = implementations.composite_channel_credentials( + transport_creds, auth_creds) + channel = implementations.secure_channel(host, port, channel_creds) + return stub_factory(channel) diff --git a/env/Lib/site-packages/gcloud/bigtable/cluster.py b/env/Lib/site-packages/gcloud/bigtable/cluster.py new file mode 100644 index 0000000..7867cff --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/cluster.py @@ -0,0 +1,384 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""User friendly container for Google Cloud Bigtable Cluster.""" + + +import re + +from google.longrunning import operations_pb2 + +from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + + +_CLUSTER_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' + r'instances/(?P[^/]+)/clusters/' + r'(?P[a-z][-a-z0-9]*)$') +_OPERATION_NAME_RE = re.compile(r'^operations/' + r'projects/([^/]+)/' + r'instances/([^/]+)/' + r'clusters/([a-z][-a-z0-9]*)/' + r'operations/(?P\d+)$') +_TYPE_URL_MAP = { +} + +DEFAULT_SERVE_NODES = 3 +"""Default number of nodes to use when creating a cluster.""" + + +def _prepare_create_request(cluster): + """Creates a protobuf request for a CreateCluster request. + + :type cluster: :class:`Cluster` + :param cluster: The cluster to be created. + + :rtype: :class:`.messages_v2_pb2.CreateClusterRequest` + :returns: The CreateCluster request object containing the cluster info. + """ + return messages_v2_pb2.CreateClusterRequest( + parent=cluster._instance.name, + cluster_id=cluster.cluster_id, + cluster=data_v2_pb2.Cluster( + serve_nodes=cluster.serve_nodes, + ), + ) + + +def _parse_pb_any_to_native(any_val, expected_type=None): + """Convert a serialized "google.protobuf.Any" value to actual type. + + :type any_val: :class:`google.protobuf.any_pb2.Any` + :param any_val: A serialized protobuf value container. + + :type expected_type: str + :param expected_type: (Optional) The type URL we expect ``any_val`` + to have. + + :rtype: object + :returns: The de-serialized object. + :raises: :class:`ValueError ` if the + ``expected_type`` does not match the ``type_url`` on the input. + """ + if expected_type is not None and expected_type != any_val.type_url: + raise ValueError('Expected type: %s, Received: %s' % ( + expected_type, any_val.type_url)) + container_class = _TYPE_URL_MAP[any_val.type_url] + return container_class.FromString(any_val.value) + + +def _process_operation(operation_pb): + """Processes a create protobuf response. + + :type operation_pb: :class:`google.longrunning.operations_pb2.Operation` + :param operation_pb: The long-running operation response from a + Create/Update/Undelete cluster request. + + :rtype: tuple + :returns: integer ID of the operation (``operation_id``). + :raises: :class:`ValueError ` if the operation name + doesn't match the :data:`_OPERATION_NAME_RE` regex. + """ + match = _OPERATION_NAME_RE.match(operation_pb.name) + if match is None: + raise ValueError('Operation name was not in the expected ' + 'format after a cluster modification.', + operation_pb.name) + operation_id = int(match.group('operation_id')) + + return operation_id + + +class Operation(object): + """Representation of a Google API Long-Running Operation. + + In particular, these will be the result of operations on + clusters using the Cloud Bigtable API. + + :type op_type: str + :param op_type: The type of operation being performed. Expect + ``create``, ``update`` or ``undelete``. + + :type op_id: int + :param op_id: The ID of the operation. + + :type cluster: :class:`Cluster` + :param cluster: The cluster that created the operation. + """ + + def __init__(self, op_type, op_id, cluster=None): + self.op_type = op_type + self.op_id = op_id + self._cluster = cluster + self._complete = False + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.op_type == self.op_type and + other.op_id == self.op_id and + other._cluster == self._cluster and + other._complete == self._complete) + + def __ne__(self, other): + return not self.__eq__(other) + + def finished(self): + """Check if the operation has finished. + + :rtype: bool + :returns: A boolean indicating if the current operation has completed. + :raises: :class:`ValueError ` if the operation + has already completed. + """ + if self._complete: + raise ValueError('The operation has completed.') + + operation_name = ('operations/' + self._cluster.name + + '/operations/%d' % (self.op_id,)) + request_pb = operations_pb2.GetOperationRequest(name=operation_name) + # We expect a `google.longrunning.operations_pb2.Operation`. + client = self._cluster._instance._client + operation_pb = client._operations_stub.GetOperation( + request_pb, client.timeout_seconds) + + if operation_pb.done: + self._complete = True + return True + else: + return False + + +class Cluster(object): + """Representation of a Google Cloud Bigtable Cluster. + + We can use a :class:`Cluster` to: + + * :meth:`reload` itself + * :meth:`create` itself + * :meth:`update` itself + * :meth:`delete` itself + * :meth:`undelete` itself + + .. note:: + + For now, we leave out the ``default_storage_type`` (an enum) + which if not sent will end up as :data:`.data_v2_pb2.STORAGE_SSD`. + + :type cluster_id: str + :param cluster_id: The ID of the cluster. + + :type instance: :class:`.instance.Instance` + :param instance: The instance where the cluster resides. + + :type serve_nodes: int + :param serve_nodes: (Optional) The number of nodes in the cluster. + Defaults to :data:`DEFAULT_SERVE_NODES`. + """ + + def __init__(self, cluster_id, instance, + serve_nodes=DEFAULT_SERVE_NODES): + self.cluster_id = cluster_id + self._instance = instance + self.serve_nodes = serve_nodes + self.location = None + + def _update_from_pb(self, cluster_pb): + """Refresh self from the server-provided protobuf. + + Helper for :meth:`from_pb` and :meth:`reload`. + """ + if not cluster_pb.serve_nodes: # Simple field (int32) + raise ValueError('Cluster protobuf does not contain serve_nodes') + self.serve_nodes = cluster_pb.serve_nodes + self.location = cluster_pb.location + + @classmethod + def from_pb(cls, cluster_pb, instance): + """Creates a cluster instance from a protobuf. + + :type cluster_pb: :class:`instance_pb2.Cluster` + :param cluster_pb: A cluster protobuf object. + + :type instance: :class:`.instance.Instance>` + :param instance: The instance that owns the cluster. + + :rtype: :class:`Cluster` + :returns: The cluster parsed from the protobuf response. + :raises: + :class:`ValueError ` if the cluster + name does not match + ``projects/{project}/instances/{instance}/clusters/{cluster_id}`` + or if the parsed project ID does not match the project ID + on the client. + """ + match = _CLUSTER_NAME_RE.match(cluster_pb.name) + if match is None: + raise ValueError('Cluster protobuf name was not in the ' + 'expected format.', cluster_pb.name) + if match.group('project') != instance._client.project: + raise ValueError('Project ID on cluster does not match the ' + 'project ID on the client') + if match.group('instance') != instance.instance_id: + raise ValueError('Instance ID on cluster does not match the ' + 'instance ID on the client') + + result = cls(match.group('cluster_id'), instance) + result._update_from_pb(cluster_pb) + return result + + def copy(self): + """Make a copy of this cluster. + + Copies the local data stored as simple types and copies the client + attached to this instance. + + :rtype: :class:`.Cluster` + :returns: A copy of the current cluster. + """ + new_instance = self._instance.copy() + return self.__class__(self.cluster_id, new_instance, + serve_nodes=self.serve_nodes) + + @property + def name(self): + """Cluster name used in requests. + + .. note:: + This property will not change if ``_instance`` and ``cluster_id`` + do not, but the return value is not cached. + + The cluster name is of the form + + ``"projects/{project}/instances/{instance}/clusters/{cluster_id}"`` + + :rtype: str + :returns: The cluster name. + """ + return self._instance.name + '/clusters/' + self.cluster_id + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + # NOTE: This does not compare the configuration values, such as + # the serve_nodes. Instead, it only compares + # identifying values instance, cluster ID and client. This is + # intentional, since the same cluster can be in different states + # if not synchronized. Clusters with similar instance/cluster + # settings but different clients can't be used in the same way. + return (other.cluster_id == self.cluster_id and + other._instance == self._instance) + + def __ne__(self, other): + return not self.__eq__(other) + + def reload(self): + """Reload the metadata for this cluster.""" + request_pb = messages_v2_pb2.GetClusterRequest(name=self.name) + # We expect a `._generated_v2.instance_pb2.Cluster`. + cluster_pb = self._instance._client._instance_stub.GetCluster( + request_pb, self._instance._client.timeout_seconds) + + # NOTE: _update_from_pb does not check that the project, instance and + # cluster ID on the response match the request. + self._update_from_pb(cluster_pb) + + def create(self): + """Create this cluster. + + .. note:: + + Uses the ``project``, ``instance`` and ``cluster_id`` on the + current :class:`Cluster` in addition to the ``serve_nodes``. + To change them before creating, reset the values via + + .. code:: python + + cluster.serve_nodes = 8 + cluster.cluster_id = 'i-changed-my-mind' + + before calling :meth:`create`. + + :rtype: :class:`Operation` + :returns: The long-running operation corresponding to the + create operation. + """ + request_pb = _prepare_create_request(self) + # We expect a `google.longrunning.operations_pb2.Operation`. + operation_pb = self._instance._client._instance_stub.CreateCluster( + request_pb, self._instance._client.timeout_seconds) + + op_id = _process_operation(operation_pb) + return Operation('create', op_id, cluster=self) + + def update(self): + """Update this cluster. + + .. note:: + + Updates the ``serve_nodes``. If you'd like to + change them before updating, reset the values via + + .. code:: python + + cluster.serve_nodes = 8 + + before calling :meth:`update`. + + :rtype: :class:`Operation` + :returns: The long-running operation corresponding to the + update operation. + """ + request_pb = data_v2_pb2.Cluster( + name=self.name, + serve_nodes=self.serve_nodes, + ) + # Ignore expected `._generated_v2.instance_pb2.Cluster`. + operation_pb = self._instance._client._instance_stub.UpdateCluster( + request_pb, self._instance._client.timeout_seconds) + + op_id = _process_operation(operation_pb) + return Operation('update', op_id, cluster=self) + + def delete(self): + """Delete this cluster. + + Marks a cluster and all of its tables for permanent deletion in 7 days. + + Immediately upon completion of the request: + + * Billing will cease for all of the cluster's reserved resources. + * The cluster's ``delete_time`` field will be set 7 days in the future. + + Soon afterward: + + * All tables within the cluster will become unavailable. + + Prior to the cluster's ``delete_time``: + + * The cluster can be recovered with a call to ``UndeleteCluster``. + * All other attempts to modify or delete the cluster will be rejected. + + At the cluster's ``delete_time``: + + * The cluster and **all of its tables** will immediately and + irrevocably disappear from the API, and their data will be + permanently deleted. + """ + request_pb = messages_v2_pb2.DeleteClusterRequest(name=self.name) + # We expect a `google.protobuf.empty_pb2.Empty` + self._instance._client._instance_stub.DeleteCluster( + request_pb, self._instance._client.timeout_seconds) diff --git a/env/Lib/site-packages/gcloud/bigtable/column_family.py b/env/Lib/site-packages/gcloud/bigtable/column_family.py new file mode 100644 index 0000000..10127aa --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/column_family.py @@ -0,0 +1,339 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""User friendly container for Google Cloud Bigtable Column Family.""" + + +import datetime + +from google.protobuf import duration_pb2 + +from gcloud._helpers import _total_seconds +from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + + +def _timedelta_to_duration_pb(timedelta_val): + """Convert a Python timedelta object to a duration protobuf. + + .. note:: + + The Python timedelta has a granularity of microseconds while + the protobuf duration type has a duration of nanoseconds. + + :type timedelta_val: :class:`datetime.timedelta` + :param timedelta_val: A timedelta object. + + :rtype: :class:`google.protobuf.duration_pb2.Duration` + :returns: A duration object equivalent to the time delta. + """ + seconds_decimal = _total_seconds(timedelta_val) + # Truncate the parts other than the integer. + seconds = int(seconds_decimal) + if seconds_decimal < 0: + signed_micros = timedelta_val.microseconds - 10**6 + else: + signed_micros = timedelta_val.microseconds + # Convert nanoseconds to microseconds. + nanos = 1000 * signed_micros + return duration_pb2.Duration(seconds=seconds, nanos=nanos) + + +def _duration_pb_to_timedelta(duration_pb): + """Convert a duration protobuf to a Python timedelta object. + + .. note:: + + The Python timedelta has a granularity of microseconds while + the protobuf duration type has a duration of nanoseconds. + + :type duration_pb: :class:`google.protobuf.duration_pb2.Duration` + :param duration_pb: A protobuf duration object. + + :rtype: :class:`datetime.timedelta` + :returns: The converted timedelta object. + """ + return datetime.timedelta( + seconds=duration_pb.seconds, + microseconds=(duration_pb.nanos / 1000.0), + ) + + +class GarbageCollectionRule(object): + """Garbage collection rule for column families within a table. + + Cells in the column family (within a table) fitting the rule will be + deleted during garbage collection. + + .. note:: + + This class is a do-nothing base class for all GC rules. + + .. note:: + + A string ``gc_expression`` can also be used with API requests, but + that value would be superceded by a ``gc_rule``. As a result, we + don't support that feature and instead support via native classes. + """ + + def __ne__(self, other): + return not self.__eq__(other) + + +class MaxVersionsGCRule(GarbageCollectionRule): + """Garbage collection limiting the number of versions of a cell. + + :type max_num_versions: int + :param max_num_versions: The maximum number of versions + """ + + def __init__(self, max_num_versions): + self.max_num_versions = max_num_versions + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.max_num_versions == self.max_num_versions + + def to_pb(self): + """Converts the garbage collection rule to a protobuf. + + :rtype: :class:`.table_v2_pb2.GcRule` + :returns: The converted current object. + """ + return table_v2_pb2.GcRule(max_num_versions=self.max_num_versions) + + +class MaxAgeGCRule(GarbageCollectionRule): + """Garbage collection limiting the age of a cell. + + :type max_age: :class:`datetime.timedelta` + :param max_age: The maximum age allowed for a cell in the table. + """ + + def __init__(self, max_age): + self.max_age = max_age + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.max_age == self.max_age + + def to_pb(self): + """Converts the garbage collection rule to a protobuf. + + :rtype: :class:`.table_v2_pb2.GcRule` + :returns: The converted current object. + """ + max_age = _timedelta_to_duration_pb(self.max_age) + return table_v2_pb2.GcRule(max_age=max_age) + + +class GCRuleUnion(GarbageCollectionRule): + """Union of garbage collection rules. + + :type rules: list + :param rules: List of :class:`GarbageCollectionRule`. + """ + + def __init__(self, rules): + self.rules = rules + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.rules == self.rules + + def to_pb(self): + """Converts the union into a single GC rule as a protobuf. + + :rtype: :class:`.table_v2_pb2.GcRule` + :returns: The converted current object. + """ + union = table_v2_pb2.GcRule.Union( + rules=[rule.to_pb() for rule in self.rules]) + return table_v2_pb2.GcRule(union=union) + + +class GCRuleIntersection(GarbageCollectionRule): + """Intersection of garbage collection rules. + + :type rules: list + :param rules: List of :class:`GarbageCollectionRule`. + """ + + def __init__(self, rules): + self.rules = rules + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.rules == self.rules + + def to_pb(self): + """Converts the intersection into a single GC rule as a protobuf. + + :rtype: :class:`.table_v2_pb2.GcRule` + :returns: The converted current object. + """ + intersection = table_v2_pb2.GcRule.Intersection( + rules=[rule.to_pb() for rule in self.rules]) + return table_v2_pb2.GcRule(intersection=intersection) + + +class ColumnFamily(object): + """Representation of a Google Cloud Bigtable Column Family. + + We can use a :class:`ColumnFamily` to: + + * :meth:`create` itself + * :meth:`update` itself + * :meth:`delete` itself + + :type column_family_id: str + :param column_family_id: The ID of the column family. Must be of the + form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type table: :class:`Table ` + :param table: The table that owns the column family. + + :type gc_rule: :class:`GarbageCollectionRule` + :param gc_rule: (Optional) The garbage collection settings for this + column family. + """ + + def __init__(self, column_family_id, table, gc_rule=None): + self.column_family_id = column_family_id + self._table = table + self.gc_rule = gc_rule + + @property + def name(self): + """Column family name used in requests. + + .. note:: + + This property will not change if ``column_family_id`` does not, but + the return value is not cached. + + The table name is of the form + + ``"projects/../zones/../clusters/../tables/../columnFamilies/.."`` + + :rtype: str + :returns: The column family name. + """ + return self._table.name + '/columnFamilies/' + self.column_family_id + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.column_family_id == self.column_family_id and + other._table == self._table and + other.gc_rule == self.gc_rule) + + def __ne__(self, other): + return not self.__eq__(other) + + def create(self): + """Create this column family.""" + if self.gc_rule is None: + column_family = table_v2_pb2.ColumnFamily() + else: + column_family = table_v2_pb2.ColumnFamily( + gc_rule=self.gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=self._table.name) + request_pb.modifications.add( + id=self.column_family_id, + create=column_family, + ) + client = self._table._instance._client + # We expect a `.table_v2_pb2.ColumnFamily`. We ignore it since the only + # data it contains are the GC rule and the column family ID already + # stored on this instance. + client._table_stub.ModifyColumnFamilies(request_pb, + client.timeout_seconds) + + def update(self): + """Update this column family. + + .. note:: + + Only the GC rule can be updated. By changing the column family ID, + you will simply be referring to a different column family. + """ + if self.gc_rule is None: + column_family = table_v2_pb2.ColumnFamily() + else: + column_family = table_v2_pb2.ColumnFamily( + gc_rule=self.gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=self._table.name) + request_pb.modifications.add( + id=self.column_family_id, + update=column_family) + client = self._table._instance._client + # We expect a `.table_v2_pb2.ColumnFamily`. We ignore it since the only + # data it contains are the GC rule and the column family ID already + # stored on this instance. + client._table_stub.ModifyColumnFamilies(request_pb, + client.timeout_seconds) + + def delete(self): + """Delete this column family.""" + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=self._table.name) + request_pb.modifications.add( + id=self.column_family_id, + drop=True) + client = self._table._instance._client + # We expect a `google.protobuf.empty_pb2.Empty` + client._table_stub.ModifyColumnFamilies(request_pb, + client.timeout_seconds) + + +def _gc_rule_from_pb(gc_rule_pb): + """Convert a protobuf GC rule to a native object. + + :type gc_rule_pb: :class:`.table_v2_pb2.GcRule` + :param gc_rule_pb: The GC rule to convert. + + :rtype: :class:`GarbageCollectionRule` or :data:`NoneType ` + :returns: An instance of one of the native rules defined + in :module:`column_family` or :data:`None` if no values were + set on the protobuf passed in. + :raises: :class:`ValueError ` if the rule name + is unexpected. + """ + rule_name = gc_rule_pb.WhichOneof('rule') + if rule_name is None: + return None + + if rule_name == 'max_num_versions': + return MaxVersionsGCRule(gc_rule_pb.max_num_versions) + elif rule_name == 'max_age': + max_age = _duration_pb_to_timedelta(gc_rule_pb.max_age) + return MaxAgeGCRule(max_age) + elif rule_name == 'union': + return GCRuleUnion([_gc_rule_from_pb(rule) + for rule in gc_rule_pb.union.rules]) + elif rule_name == 'intersection': + rules = [_gc_rule_from_pb(rule) + for rule in gc_rule_pb.intersection.rules] + return GCRuleIntersection(rules) + else: + raise ValueError('Unexpected rule name', rule_name) diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/__init__.py b/env/Lib/site-packages/gcloud/bigtable/happybase/__init__.py new file mode 100644 index 0000000..076a28d --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/__init__.py @@ -0,0 +1,167 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable HappyBase package. + +This package is intended to emulate the HappyBase library using +Google Cloud Bigtable as the backing store. + +Differences in Public API +------------------------- + +Some concepts from HBase/Thrift do not map directly to the Cloud +Bigtable API. As a result, the following instance methods and functions +could not be implemented: + +* :meth:`Connection.enable_table() \ + ` - no + concept of enabled/disabled +* :meth:`Connection.disable_table() \ + ` - no + concept of enabled/disabled +* :meth:`Connection.is_table_enabled() \ + ` + - no concept of enabled/disabled +* :meth:`Connection.compact_table() \ + ` - + table storage is opaque to user +* :meth:`Table.regions() ` + - tables in Cloud Bigtable do not expose internal storage details +* :meth:`Table.counter_set() \ + ` - method can't + be atomic, so we disable it +* The ``__version__`` value for the HappyBase package is :data:`None`. + However, it's worth nothing this implementation was based off HappyBase + 0.9. + +In addition, many of the constants from +:mod:`connection ` +are specific to HBase and are defined as :data:`None` in our module: + +* ``COMPAT_MODES`` +* ``THRIFT_TRANSPORTS`` +* ``THRIFT_PROTOCOLS`` +* ``DEFAULT_HOST`` +* ``DEFAULT_PORT`` +* ``DEFAULT_TRANSPORT`` +* ``DEFAULT_COMPAT`` +* ``DEFAULT_PROTOCOL`` + +Two of these ``DEFAULT_HOST`` and ``DEFAULT_PORT``, are even imported in +the main :mod:`happybase ` package. + +Finally, we do not provide the ``util`` module. Though it is public in the +HappyBase library, it provides no core functionality. + +API Behavior Changes +-------------------- + +* Since there is no concept of an enabled / disabled table, calling + :meth:`Connection.delete_table() \ + ` + with ``disable=True`` can't be supported. + Using that argument will result in a warning. +* The :class:`Connection ` + constructor **disables** the use of several + arguments and will print a warning if any of them are passed in as keyword + arguments. The arguments are: + + * ``host`` + * ``port`` + * ``compat`` + * ``transport`` + * ``protocol`` +* In order to make + :class:`Connection ` + compatible with Cloud Bigtable, we add a ``instance`` keyword argument to + allow users to pass in their own + :class:`Instance ` (which they can + construct beforehand). + + For example: + + .. code:: python + + from gcloud.bigtable.client import Client + client = Client(project=PROJECT_ID, admin=True) + instance = client.instance(instance_id, location_id) + instance.reload() + + from gcloud.bigtable.happybase import Connection + connection = Connection(instance=instance) + +* Any uses of the ``wal`` (Write Ahead Log) argument will result in a + warning as well. This includes uses in: + + * :class:`Batch ` + * :meth:`Batch.put() ` + * :meth:`Batch.delete() ` + * :meth:`Table.put() ` + * :meth:`Table.delete() ` + * :meth:`Table.batch() ` factory +* When calling + :meth:`Connection.create_table() \ + `, the + majority of HBase column family options cannot be used. Among + + * ``max_versions`` + * ``compression`` + * ``in_memory`` + * ``bloom_filter_type`` + * ``bloom_filter_vector_size`` + * ``bloom_filter_nb_hashes`` + * ``block_cache_enabled`` + * ``time_to_live`` + + Only ``max_versions`` and ``time_to_live`` are availabe in Cloud Bigtable + (as + :class:`MaxVersionsGCRule ` + and + :class:`MaxAgeGCRule `). + + In addition to using a dictionary for specifying column family options, + we also accept instances of :class:`.GarbageCollectionRule` or subclasses. +* :meth:`Table.scan() ` no longer + accepts the following arguments (which will result in a warning): + + * ``batch_size`` + * ``scan_batching`` + * ``sorted_columns`` + +* Using a HBase filter string in + :meth:`Table.scan() ` is + not possible with Cloud Bigtable and will result in a + :class:`TypeError `. However, the method now accepts + instances of :class:`.RowFilter` and subclasses. +* :meth:`Batch.delete() ` (and + hence + :meth:`Table.delete() `) + will fail with a :class:`ValueError ` when either a + row or column family delete is attempted with a ``timestamp``. This is + because the Cloud Bigtable API uses the ``DeleteFromFamily`` and + ``DeleteFromRow`` mutations for these deletes, and neither of these + mutations support a timestamp. +""" + +from gcloud.bigtable.happybase.batch import Batch +from gcloud.bigtable.happybase.connection import Connection +from gcloud.bigtable.happybase.connection import DEFAULT_HOST +from gcloud.bigtable.happybase.connection import DEFAULT_PORT +from gcloud.bigtable.happybase.pool import ConnectionPool +from gcloud.bigtable.happybase.pool import NoConnectionsAvailable +from gcloud.bigtable.happybase.table import Table + + +# Values from HappyBase that we don't reproduce / are not relevant. +__version__ = None diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/batch.py b/env/Lib/site-packages/gcloud/bigtable/happybase/batch.py new file mode 100644 index 0000000..25e6d07 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/batch.py @@ -0,0 +1,326 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable HappyBase batch module.""" + + +import datetime +import warnings + +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud.bigtable.row_filters import TimestampRange + + +_WAL_SENTINEL = object() +# Assumed granularity of timestamps in Cloud Bigtable. +_ONE_MILLISECOND = datetime.timedelta(microseconds=1000) +_WARN = warnings.warn +_WAL_WARNING = ('The wal argument (Write-Ahead-Log) is not ' + 'supported by Cloud Bigtable.') + + +class Batch(object): + """Batch class for accumulating mutations. + + .. note:: + + When using a batch with ``transaction=False`` as a context manager + (i.e. in a ``with`` statement), mutations will still be sent as + row mutations even if the context manager exits with an error. + This behavior is in place to match the behavior in the HappyBase + HBase / Thrift implementation. + + :type table: :class:`Table ` + :param table: The table where mutations will be applied. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the epoch) + that all mutations will be applied at. + + :type batch_size: int + :param batch_size: (Optional) The maximum number of mutations to allow + to accumulate before committing them. + + :type transaction: bool + :param transaction: Flag indicating if the mutations should be sent + transactionally or not. If ``transaction=True`` and + an error occurs while a :class:`Batch` is active, + then none of the accumulated mutations will be + committed. If ``batch_size`` is set, the mutation + can't be transactional. + + :type wal: object + :param wal: Unused parameter (Boolean for using the HBase Write Ahead Log). + Provided for compatibility with HappyBase, but irrelevant for + Cloud Bigtable since it does not have a Write Ahead Log. + + :raises: :class:`TypeError ` if ``batch_size`` + is set and ``transaction=True``. + :class:`ValueError ` if ``batch_size`` + is not positive. + """ + + def __init__(self, table, timestamp=None, batch_size=None, + transaction=False, wal=_WAL_SENTINEL): + if wal is not _WAL_SENTINEL: + _WARN(_WAL_WARNING) + + if batch_size is not None: + if transaction: + raise TypeError('When batch_size is set, a Batch cannot be ' + 'transactional') + if batch_size <= 0: + raise ValueError('batch_size must be positive') + + self._table = table + self._batch_size = batch_size + self._timestamp = self._delete_range = None + + # Timestamp is in milliseconds, convert to microseconds. + if timestamp is not None: + self._timestamp = _datetime_from_microseconds(1000 * timestamp) + # For deletes, we get the very next timestamp (assuming timestamp + # granularity is milliseconds). This is because HappyBase users + # expect HBase deletes to go **up to** and **including** the + # timestamp while Cloud Bigtable Time Ranges **exclude** the + # final timestamp. + next_timestamp = self._timestamp + _ONE_MILLISECOND + self._delete_range = TimestampRange(end=next_timestamp) + + self._transaction = transaction + + # Internal state for tracking mutations. + self._row_map = {} + self._mutation_count = 0 + + def send(self): + """Send / commit the batch of mutations to the server.""" + for row in self._row_map.values(): + # commit() does nothing if row hasn't accumulated any mutations. + row.commit() + + self._row_map.clear() + self._mutation_count = 0 + + def _try_send(self): + """Send / commit the batch if mutations have exceeded batch size.""" + if self._batch_size and self._mutation_count >= self._batch_size: + self.send() + + def _get_row(self, row_key): + """Gets a row that will hold mutations. + + If the row is not already cached on the current batch, a new row will + be created. + + :type row_key: str + :param row_key: The row key for a row stored in the map. + + :rtype: :class:`Row ` + :returns: The newly created or stored row that will hold mutations. + """ + if row_key not in self._row_map: + table = self._table._low_level_table + self._row_map[row_key] = table.row(row_key) + + return self._row_map[row_key] + + def put(self, row, data, wal=_WAL_SENTINEL): + """Insert data into a row in the table owned by this batch. + + :type row: str + :param row: The row key where the mutation will be "put". + + :type data: dict + :param data: Dictionary containing the data to be inserted. The keys + are columns names (of the form ``fam:col``) and the values + are strings (bytes) to be stored in those columns. + + :type wal: object + :param wal: Unused parameter (to over-ride the default on the + instance). Provided for compatibility with HappyBase, but + irrelevant for Cloud Bigtable since it does not have a + Write Ahead Log. + """ + if wal is not _WAL_SENTINEL: + _WARN(_WAL_WARNING) + + row_object = self._get_row(row) + # Make sure all the keys are valid before beginning + # to add mutations. + column_pairs = _get_column_pairs(six.iterkeys(data), + require_qualifier=True) + for column_family_id, column_qualifier in column_pairs: + value = data[column_family_id + ':' + column_qualifier] + row_object.set_cell(column_family_id, column_qualifier, + value, timestamp=self._timestamp) + + self._mutation_count += len(data) + self._try_send() + + def _delete_columns(self, columns, row_object): + """Adds delete mutations for a list of columns and column families. + + :type columns: list + :param columns: Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type row_object: :class:`Row ` + :param row_object: The row which will hold the delete mutations. + + :raises: :class:`ValueError ` if the delete + timestamp range is set on the current batch, but a + column family delete is attempted. + """ + column_pairs = _get_column_pairs(columns) + for column_family_id, column_qualifier in column_pairs: + if column_qualifier is None: + if self._delete_range is not None: + raise ValueError('The Cloud Bigtable API does not support ' + 'adding a timestamp to ' + '"DeleteFromFamily" ') + row_object.delete_cells(column_family_id, + columns=row_object.ALL_COLUMNS) + else: + row_object.delete_cell(column_family_id, + column_qualifier, + time_range=self._delete_range) + + def delete(self, row, columns=None, wal=_WAL_SENTINEL): + """Delete data from a row in the table owned by this batch. + + :type row: str + :param row: The row key where the delete will occur. + + :type columns: list + :param columns: (Optional) Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + If not used, will delete the entire row. + + :type wal: object + :param wal: Unused parameter (to over-ride the default on the + instance). Provided for compatibility with HappyBase, but + irrelevant for Cloud Bigtable since it does not have a + Write Ahead Log. + + :raises: If the delete timestamp range is set on the + current batch, but a full row delete is attempted. + """ + if wal is not _WAL_SENTINEL: + _WARN(_WAL_WARNING) + + row_object = self._get_row(row) + + if columns is None: + # Delete entire row. + if self._delete_range is not None: + raise ValueError('The Cloud Bigtable API does not support ' + 'adding a timestamp to "DeleteFromRow" ' + 'mutations') + row_object.delete() + self._mutation_count += 1 + else: + self._delete_columns(columns, row_object) + self._mutation_count += len(columns) + + self._try_send() + + def __enter__(self): + """Enter context manager, no set-up required.""" + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Exit context manager, no set-up required. + + :type exc_type: type + :param exc_type: The type of the exception if one occurred while the + context manager was active. Otherwise, :data:`None`. + + :type exc_value: :class:`Exception ` + :param exc_value: An instance of ``exc_type`` if an exception occurred + while the context was active. + Otherwise, :data:`None`. + + :type traceback: ``traceback`` type + :param traceback: The traceback where the exception occurred (if one + did occur). Otherwise, :data:`None`. + """ + # If the context manager encountered an exception and the batch is + # transactional, we don't commit the mutations. + if self._transaction and exc_type is not None: + return + + # NOTE: For non-transactional batches, this will even commit mutations + # if an error occurred during the context manager. + self.send() + + +def _get_column_pairs(columns, require_qualifier=False): + """Turns a list of column or column families into parsed pairs. + + Turns a column family (``fam`` or ``fam:``) into a pair such + as ``['fam', None]`` and turns a column (``fam:col``) into + ``['fam', 'col']``. + + :type columns: list + :param columns: Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type require_qualifier: bool + :param require_qualifier: Boolean indicating if the columns should + all have a qualifier or not. + + :rtype: list + :returns: List of pairs, where the first element in each pair is the + column family and the second is the column qualifier + (or :data:`None`). + :raises: :class:`ValueError ` if any of the columns + are not of the expected format. + :class:`ValueError ` if + ``require_qualifier`` is :data:`True` and one of the values is + for an entire column family + """ + column_pairs = [] + for column in columns: + if isinstance(column, six.binary_type): + column = column.decode('utf-8') + # Remove trailing colons (i.e. for standalone column family). + if column.endswith(u':'): + column = column[:-1] + num_colons = column.count(u':') + if num_colons == 0: + # column is a column family. + if require_qualifier: + raise ValueError('column does not contain a qualifier', + column) + else: + column_pairs.append([column, None]) + elif num_colons == 1: + column_pairs.append(column.split(u':')) + else: + raise ValueError('Column contains the : separator more than once') + + return column_pairs diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/connection.py b/env/Lib/site-packages/gcloud/bigtable/happybase/connection.py new file mode 100644 index 0000000..ebea84e --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/connection.py @@ -0,0 +1,484 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable HappyBase connection module.""" + + +import datetime +import warnings + +import six + +from grpc.beta import interfaces +from grpc.framework.interfaces.face import face + +try: + from happybase.hbase.ttypes import AlreadyExists +except ImportError: + from gcloud.exceptions import Conflict as AlreadyExists + +from gcloud.bigtable.client import Client +from gcloud.bigtable.column_family import GCRuleIntersection +from gcloud.bigtable.column_family import MaxAgeGCRule +from gcloud.bigtable.column_family import MaxVersionsGCRule +from gcloud.bigtable.happybase.table import Table +from gcloud.bigtable.table import Table as _LowLevelTable + + +# Constants reproduced here for HappyBase compatibility, though values +# are all null. +COMPAT_MODES = None +THRIFT_TRANSPORTS = None +THRIFT_PROTOCOLS = None +DEFAULT_HOST = None +DEFAULT_PORT = None +DEFAULT_TRANSPORT = None +DEFAULT_COMPAT = None +DEFAULT_PROTOCOL = None + +_LEGACY_ARGS = frozenset(('host', 'port', 'compat', 'transport', 'protocol')) +_WARN = warnings.warn +_DISABLE_DELETE_MSG = ('The disable argument should not be used in ' + 'delete_table(). Cloud Bigtable has no concept ' + 'of enabled / disabled tables.') + + +def _get_instance(timeout=None): + """Gets instance for the default project. + + Creates a client with the inferred credentials and project ID from + the local environment. Then uses + :meth:`.bigtable.client.Client.list_instances` to + get the unique instance owned by the project. + + If the request fails for any reason, or if there isn't exactly one instance + owned by the project, then this function will fail. + + :type timeout: int + :param timeout: (Optional) The socket timeout in milliseconds. + + :rtype: :class:`gcloud.bigtable.instance.Instance` + :returns: The unique instance owned by the project inferred from + the environment. + :raises: :class:`ValueError ` if there is a failed + location or any number of instances other than one. + """ + client_kwargs = {'admin': True} + if timeout is not None: + client_kwargs['timeout_seconds'] = timeout / 1000.0 + client = Client(**client_kwargs) + try: + client.start() + instances, failed_locations = client.list_instances() + finally: + client.stop() + + if len(failed_locations) != 0: + raise ValueError('Determining instance via ListInstances encountered ' + 'failed locations.') + if len(instances) == 0: + raise ValueError('This client doesn\'t have access to any instances.') + if len(instances) > 1: + raise ValueError('This client has access to more than one instance. ' + 'Please directly pass the instance you\'d ' + 'like to use.') + return instances[0] + + +class Connection(object): + """Connection to Cloud Bigtable backend. + + .. note:: + + If you pass a ``instance``, it will be :meth:`.Instance.copy`-ed before + being stored on the new connection. This also copies the + :class:`Client ` that created the + :class:`Instance ` instance and the + :class:`Credentials ` stored on the + client. + + The arguments ``host``, ``port``, ``compat``, ``transport`` and + ``protocol`` are allowed (as keyword arguments) for compatibility with + HappyBase. However, they will not be used in any way, and will cause a + warning if passed. + + :type timeout: int + :param timeout: (Optional) The socket timeout in milliseconds. + + :type autoconnect: bool + :param autoconnect: (Optional) Whether the connection should be + :meth:`open`-ed during construction. + + :type table_prefix: str + :param table_prefix: (Optional) Prefix used to construct table names. + + :type table_prefix_separator: str + :param table_prefix_separator: (Optional) Separator used with + ``table_prefix``. Defaults to ``_``. + + :type instance: :class:`Instance ` + :param instance: (Optional) A Cloud Bigtable instance. The instance also + owns a client for making gRPC requests to the Cloud + Bigtable API. If not passed in, defaults to creating client + with ``admin=True`` and using the ``timeout`` here for the + ``timeout_seconds`` argument to the + :class:`Client ` + constructor. The credentials for the client + will be the implicit ones loaded from the environment. + Then that client is used to retrieve all the instances + owned by the client's project. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments. Provided for HappyBase + compatibility. + """ + + _instance = None + + def __init__(self, timeout=None, autoconnect=True, table_prefix=None, + table_prefix_separator='_', instance=None, **kwargs): + self._handle_legacy_args(kwargs) + if table_prefix is not None: + if not isinstance(table_prefix, six.string_types): + raise TypeError('table_prefix must be a string', 'received', + table_prefix, type(table_prefix)) + + if not isinstance(table_prefix_separator, six.string_types): + raise TypeError('table_prefix_separator must be a string', + 'received', table_prefix_separator, + type(table_prefix_separator)) + + self.table_prefix = table_prefix + self.table_prefix_separator = table_prefix_separator + + if instance is None: + self._instance = _get_instance(timeout=timeout) + else: + if timeout is not None: + raise ValueError('Timeout cannot be used when an existing ' + 'instance is passed') + self._instance = instance.copy() + + if autoconnect: + self.open() + + self._initialized = True + + @staticmethod + def _handle_legacy_args(arguments_dict): + """Check legacy HappyBase arguments and warn if set. + + :type arguments_dict: dict + :param arguments_dict: Unused keyword arguments. + + :raises: :class:`TypeError ` if a keyword other + than ``host``, ``port``, ``compat``, ``transport`` or + ``protocol`` is used. + """ + common_args = _LEGACY_ARGS.intersection(six.iterkeys(arguments_dict)) + if common_args: + all_args = ', '.join(common_args) + message = ('The HappyBase legacy arguments %s were used. These ' + 'arguments are unused by gcloud.' % (all_args,)) + _WARN(message) + for arg_name in common_args: + arguments_dict.pop(arg_name) + if arguments_dict: + unexpected_names = arguments_dict.keys() + raise TypeError('Received unexpected arguments', unexpected_names) + + def open(self): + """Open the underlying transport to Cloud Bigtable. + + This method opens the underlying HTTP/2 gRPC connection using a + :class:`Client ` bound to the + :class:`Instance ` owned by + this connection. + """ + self._instance._client.start() + + def close(self): + """Close the underlying transport to Cloud Bigtable. + + This method closes the underlying HTTP/2 gRPC connection using a + :class:`Client ` bound to the + :class:`Instance ` owned by + this connection. + """ + self._instance._client.stop() + + def __del__(self): + if self._instance is not None: + self.close() + + def _table_name(self, name): + """Construct a table name by optionally adding a table name prefix. + + :type name: str + :param name: The name to have a prefix added to it. + + :rtype: str + :returns: The prefixed name, if the current connection has a table + prefix set. + """ + if self.table_prefix is None: + return name + + return self.table_prefix + self.table_prefix_separator + name + + def table(self, name, use_prefix=True): + """Table factory. + + :type name: str + :param name: The name of the table to be created. + + :type use_prefix: bool + :param use_prefix: Whether to use the table prefix (if any). + + :rtype: :class:`Table ` + :returns: Table instance owned by this connection. + """ + if use_prefix: + name = self._table_name(name) + return Table(name, self) + + def tables(self): + """Return a list of table names available to this connection. + + .. note:: + + This lists every table in the instance owned by this connection, + **not** every table that a given user may have access to. + + .. note:: + + If ``table_prefix`` is set on this connection, only returns the + table names which match that prefix. + + :rtype: list + :returns: List of string table names. + """ + low_level_table_instances = self._instance.list_tables() + table_names = [table_instance.table_id + for table_instance in low_level_table_instances] + + # Filter using prefix, and strip prefix from names + if self.table_prefix is not None: + prefix = self._table_name('') + offset = len(prefix) + table_names = [name[offset:] for name in table_names + if name.startswith(prefix)] + + return table_names + + def create_table(self, name, families): + """Create a table. + + .. warning:: + + The only column family options from HappyBase that are able to be + used with Cloud Bigtable are ``max_versions`` and ``time_to_live``. + + .. note:: + + This method is **not** atomic. The Cloud Bigtable API separates + the creation of a table from the creation of column families. Thus + this method needs to send 1 request for the table creation and 1 + request for each column family. If any of these fails, the method + will fail, but the progress made towards completion cannot be + rolled back. + + Values in ``families`` represent column family options. In HappyBase, + these are dictionaries, corresponding to the ``ColumnDescriptor`` + structure in the Thrift API. The accepted keys are: + + * ``max_versions`` (``int``) + * ``compression`` (``str``) + * ``in_memory`` (``bool``) + * ``bloom_filter_type`` (``str``) + * ``bloom_filter_vector_size`` (``int``) + * ``bloom_filter_nb_hashes`` (``int``) + * ``block_cache_enabled`` (``bool``) + * ``time_to_live`` (``int``) + + :type name: str + :param name: The name of the table to be created. + + :type families: dict + :param families: Dictionary with column family names as keys and column + family options as the values. The options can be among + + * :class:`dict` + * :class:`.GarbageCollectionRule` + + :raises: :class:`TypeError ` if ``families`` is + not a dictionary, + :class:`ValueError ` if ``families`` + has no entries + """ + if not isinstance(families, dict): + raise TypeError('families arg must be a dictionary') + + if not families: + raise ValueError('Cannot create table %r (no column ' + 'families specified)' % (name,)) + + # Parse all keys before making any API requests. + gc_rule_dict = {} + for column_family_name, option in families.items(): + if isinstance(column_family_name, six.binary_type): + column_family_name = column_family_name.decode('utf-8') + if column_family_name.endswith(':'): + column_family_name = column_family_name[:-1] + gc_rule_dict[column_family_name] = _parse_family_option(option) + + # Create table instance and then make API calls. + name = self._table_name(name) + low_level_table = _LowLevelTable(name, self._instance) + try: + low_level_table.create() + except face.NetworkError as network_err: + if network_err.code == interfaces.StatusCode.ALREADY_EXISTS: + raise AlreadyExists(name) + else: + raise + + for column_family_name, gc_rule in gc_rule_dict.items(): + column_family = low_level_table.column_family( + column_family_name, gc_rule=gc_rule) + column_family.create() + + def delete_table(self, name, disable=False): + """Delete the specified table. + + :type name: str + :param name: The name of the table to be deleted. If ``table_prefix`` + is set, a prefix will be added to the ``name``. + + :type disable: bool + :param disable: Whether to first disable the table if needed. This + is provided for compatibility with HappyBase, but is + not relevant for Cloud Bigtable since it has no concept + of enabled / disabled tables. + """ + if disable: + _WARN(_DISABLE_DELETE_MSG) + + name = self._table_name(name) + _LowLevelTable(name, self._instance).delete() + + def enable_table(self, name): + """Enable the specified table. + + .. warning:: + + Cloud Bigtable has no concept of enabled / disabled tables so this + method does not work. It is provided simply for compatibility. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API has no concept of ' + 'enabled or disabled tables.') + + def disable_table(self, name): + """Disable the specified table. + + .. warning:: + + Cloud Bigtable has no concept of enabled / disabled tables so this + method does not work. It is provided simply for compatibility. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API has no concept of ' + 'enabled or disabled tables.') + + def is_table_enabled(self, name): + """Return whether the specified table is enabled. + + .. warning:: + + Cloud Bigtable has no concept of enabled / disabled tables so this + method does not work. It is provided simply for compatibility. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API has no concept of ' + 'enabled or disabled tables.') + + def compact_table(self, name, major=False): + """Compact the specified table. + + .. warning:: + + Cloud Bigtable does not support compacting a table, so this + method does not work. It is provided simply for compatibility. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API does not support ' + 'compacting a table.') + + +def _parse_family_option(option): + """Parses a column family option into a garbage collection rule. + + .. note:: + + If ``option`` is not a dictionary, the type is not checked. + If ``option`` is :data:`None`, there is nothing to do, since this + is the correct output. + + :type option: :class:`dict`, + :data:`NoneType `, + :class:`.GarbageCollectionRule` + :param option: A column family option passes as a dictionary value in + :meth:`Connection.create_table`. + + :rtype: :class:`.GarbageCollectionRule` + :returns: A garbage collection rule parsed from the input. + """ + result = option + if isinstance(result, dict): + if not set(result.keys()) <= set(['max_versions', 'time_to_live']): + all_keys = ', '.join(repr(key) for key in result.keys()) + warning_msg = ('Cloud Bigtable only supports max_versions and ' + 'time_to_live column family settings. ' + 'Received: %s' % (all_keys,)) + _WARN(warning_msg) + + max_num_versions = result.get('max_versions') + max_age = None + if 'time_to_live' in result: + max_age = datetime.timedelta(seconds=result['time_to_live']) + + versions_rule = age_rule = None + if max_num_versions is not None: + versions_rule = MaxVersionsGCRule(max_num_versions) + if max_age is not None: + age_rule = MaxAgeGCRule(max_age) + + if versions_rule is None: + result = age_rule + else: + if age_rule is None: + result = versions_rule + else: + result = GCRuleIntersection(rules=[age_rule, versions_rule]) + + return result diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/pool.py b/env/Lib/site-packages/gcloud/bigtable/happybase/pool.py new file mode 100644 index 0000000..1ed22cd --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/pool.py @@ -0,0 +1,153 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable HappyBase pool module.""" + + +import contextlib +import threading + +import six + +from gcloud.bigtable.happybase.connection import Connection +from gcloud.bigtable.happybase.connection import _get_instance + + +_MIN_POOL_SIZE = 1 +"""Minimum allowable size of a connection pool.""" + + +class NoConnectionsAvailable(RuntimeError): + """Exception raised when no connections are available. + + This happens if a timeout was specified when obtaining a connection, + and no connection became available within the specified timeout. + """ + + +class ConnectionPool(object): + """Thread-safe connection pool. + + .. note:: + + All keyword arguments are passed unmodified to the + :class:`Connection <.happybase.connection.Connection>` constructor + **except** for ``autoconnect``. This is because the ``open`` / + ``closed`` status of a connection is managed by the pool. In addition, + if ``instance`` is not passed, the default / inferred instance is + determined by the pool and then passed to each + :class:`Connection <.happybase.connection.Connection>` that is created. + + :type size: int + :param size: The maximum number of concurrently open connections. + + :type kwargs: dict + :param kwargs: Keyword arguments passed to + :class:`Connection <.happybase.Connection>` + constructor. + + :raises: :class:`TypeError ` if ``size`` + is non an integer. + :class:`ValueError ` if ``size`` + is not positive. + """ + def __init__(self, size, **kwargs): + if not isinstance(size, six.integer_types): + raise TypeError('Pool size arg must be an integer') + + if size < _MIN_POOL_SIZE: + raise ValueError('Pool size must be positive') + + self._lock = threading.Lock() + self._queue = six.moves.queue.LifoQueue(maxsize=size) + self._thread_connections = threading.local() + + connection_kwargs = kwargs + connection_kwargs['autoconnect'] = False + if 'instance' not in connection_kwargs: + connection_kwargs['instance'] = _get_instance( + timeout=kwargs.get('timeout')) + + for _ in six.moves.range(size): + connection = Connection(**connection_kwargs) + self._queue.put(connection) + + def _acquire_connection(self, timeout=None): + """Acquire a connection from the pool. + + :type timeout: int + :param timeout: (Optional) Time (in seconds) to wait for a connection + to open. + + :rtype: :class:`Connection <.happybase.Connection>` + :returns: An active connection from the queue stored on the pool. + :raises: :class:`NoConnectionsAvailable` if ``Queue.get`` fails + before the ``timeout`` (only if a timeout is specified). + """ + try: + return self._queue.get(block=True, timeout=timeout) + except six.moves.queue.Empty: + raise NoConnectionsAvailable('No connection available from pool ' + 'within specified timeout') + + @contextlib.contextmanager + def connection(self, timeout=None): + """Obtain a connection from the pool. + + Must be used as a context manager, for example:: + + with pool.connection() as connection: + pass # do something with the connection + + If ``timeout`` is omitted, this method waits forever for a connection + to become available from the local queue. + + :type timeout: int + :param timeout: (Optional) Time (in seconds) to wait for a connection + to open. + + :rtype: :class:`Connection <.happybase.connection.Connection>` + :returns: An active connection from the pool. + :raises: :class:`NoConnectionsAvailable` if no connection can be + retrieved from the pool before the ``timeout`` (only if + a timeout is specified). + """ + connection = getattr(self._thread_connections, 'current', None) + + retrieved_new_cnxn = False + if connection is None: + # In this case we need to actually grab a connection from the + # pool. After retrieval, the connection is stored on a thread + # local so that nested connection requests from the same + # thread can re-use the same connection instance. + # + # NOTE: This code acquires a lock before assigning to the + # thread local; see + # ('https://emptysqua.re/blog/' + # 'another-thing-about-pythons-threadlocals/') + retrieved_new_cnxn = True + connection = self._acquire_connection(timeout) + with self._lock: + self._thread_connections.current = connection + + # This is a no-op for connections that have already been opened + # since they just call Client.start(). + connection.open() + yield connection + + # Remove thread local reference after the outermost 'with' block + # ends. Afterwards the thread no longer owns the connection. + if retrieved_new_cnxn: + del self._thread_connections.current + self._queue.put(connection) diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/table.py b/env/Lib/site-packages/gcloud/bigtable/happybase/table.py new file mode 100644 index 0000000..e35bb80 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/table.py @@ -0,0 +1,980 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Bigtable HappyBase table module.""" + + +import struct +import warnings + +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud._helpers import _microseconds_from_datetime +from gcloud._helpers import _to_bytes +from gcloud._helpers import _total_seconds +from gcloud.bigtable.column_family import GCRuleIntersection +from gcloud.bigtable.column_family import MaxAgeGCRule +from gcloud.bigtable.column_family import MaxVersionsGCRule +from gcloud.bigtable.happybase.batch import _get_column_pairs +from gcloud.bigtable.happybase.batch import _WAL_SENTINEL +from gcloud.bigtable.happybase.batch import Batch +from gcloud.bigtable.row_filters import CellsColumnLimitFilter +from gcloud.bigtable.row_filters import ColumnQualifierRegexFilter +from gcloud.bigtable.row_filters import FamilyNameRegexFilter +from gcloud.bigtable.row_filters import RowFilterChain +from gcloud.bigtable.row_filters import RowFilterUnion +from gcloud.bigtable.row_filters import RowKeyRegexFilter +from gcloud.bigtable.row_filters import TimestampRange +from gcloud.bigtable.row_filters import TimestampRangeFilter +from gcloud.bigtable.table import Table as _LowLevelTable + + +_WARN = warnings.warn +_UNPACK_I64 = struct.Struct('>q').unpack +_SIMPLE_GC_RULES = (MaxAgeGCRule, MaxVersionsGCRule) + + +def make_row(cell_map, include_timestamp): + """Make a row dict for a Thrift cell mapping. + + .. warning:: + + This method is only provided for HappyBase compatibility, but does not + actually work. + + :type cell_map: dict + :param cell_map: Dictionary with ``fam:col`` strings as keys and ``TCell`` + instances as values. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API output is not the same ' + 'as the output from the Thrift server, so this ' + 'helper can not be implemented.', 'Called with', + cell_map, include_timestamp) + + +def make_ordered_row(sorted_columns, include_timestamp): + """Make a row dict for sorted Thrift column results from scans. + + .. warning:: + + This method is only provided for HappyBase compatibility, but does not + actually work. + + :type sorted_columns: list + :param sorted_columns: List of ``TColumn`` instances from Thrift. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API output is not the same ' + 'as the output from the Thrift server, so this ' + 'helper can not be implemented.', 'Called with', + sorted_columns, include_timestamp) + + +class Table(object): + """Representation of Cloud Bigtable table. + + Used for adding data and + + :type name: str + :param name: The name of the table. + + :type connection: :class:`Connection <.happybase.connection.Connection>` + :param connection: The connection which has access to the table. + """ + + def __init__(self, name, connection): + self.name = name + # This remains as legacy for HappyBase, but only the instance + # from the connection is needed. + self.connection = connection + self._low_level_table = None + if self.connection is not None: + self._low_level_table = _LowLevelTable(self.name, + self.connection._instance) + + def __repr__(self): + return '' % (self.name,) + + def families(self): + """Retrieve the column families for this table. + + :rtype: dict + :returns: Mapping from column family name to garbage collection rule + for a column family. + """ + column_family_map = self._low_level_table.list_column_families() + result = {} + for col_fam, col_fam_obj in six.iteritems(column_family_map): + result[col_fam] = _gc_rule_to_dict(col_fam_obj.gc_rule) + return result + + def regions(self): + """Retrieve the regions for this table. + + .. warning:: + + Cloud Bigtable does not give information about how a table is laid + out in memory, so this method does not work. It is + provided simply for compatibility. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('The Cloud Bigtable API does not have a ' + 'concept of splitting a table into regions.') + + def row(self, row, columns=None, timestamp=None, include_timestamp=False): + """Retrieve a single row of data. + + Returns the latest cells in each column (or all columns if ``columns`` + is not specified). If a ``timestamp`` is set, then **latest** becomes + **latest** up until ``timestamp``. + + :type row: str + :param row: Row key for the row we are reading from. + + :type columns: list + :param columns: (Optional) Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). If specified, only cells returned before the + the timestamp will be returned. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :rtype: dict + :returns: Dictionary containing all the latest column values in + the row. + """ + filters = [] + if columns is not None: + filters.append(_columns_filter_helper(columns)) + # versions == 1 since we only want the latest. + filter_ = _filter_chain_helper(versions=1, timestamp=timestamp, + filters=filters) + + partial_row_data = self._low_level_table.read_row( + row, filter_=filter_) + if partial_row_data is None: + return {} + + return _partial_row_to_dict(partial_row_data, + include_timestamp=include_timestamp) + + def rows(self, rows, columns=None, timestamp=None, + include_timestamp=False): + """Retrieve multiple rows of data. + + All optional arguments behave the same in this method as they do in + :meth:`row`. + + :type rows: list + :param rows: Iterable of the row keys for the rows we are reading from. + + :type columns: list + :param columns: (Optional) Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). If specified, only cells returned before (or + at) the timestamp will be returned. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :rtype: list + :returns: A list of pairs, where the first is the row key and the + second is a dictionary with the filtered values returned. + """ + if not rows: + # Avoid round-trip if the result is empty anyway + return [] + + filters = [] + if columns is not None: + filters.append(_columns_filter_helper(columns)) + filters.append(_row_keys_filter_helper(rows)) + # versions == 1 since we only want the latest. + filter_ = _filter_chain_helper(versions=1, timestamp=timestamp, + filters=filters) + + partial_rows_data = self._low_level_table.read_rows(filter_=filter_) + # NOTE: We could use max_loops = 1000 or some similar value to ensure + # that the stream isn't open too long. + partial_rows_data.consume_all() + + result = [] + for row_key in rows: + if row_key not in partial_rows_data.rows: + continue + curr_row_data = partial_rows_data.rows[row_key] + curr_row_dict = _partial_row_to_dict( + curr_row_data, include_timestamp=include_timestamp) + result.append((row_key, curr_row_dict)) + + return result + + def cells(self, row, column, versions=None, timestamp=None, + include_timestamp=False): + """Retrieve multiple versions of a single cell from the table. + + :type row: str + :param row: Row key for the row we are reading from. + + :type column: str + :param column: Column we are reading from; of the form ``fam:col``. + + :type versions: int + :param versions: (Optional) The maximum number of cells to return. If + not set, returns all cells found. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). If specified, only cells returned before (or + at) the timestamp will be returned. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :rtype: list + :returns: List of values in the cell (with timestamps if + ``include_timestamp`` is :data:`True`). + """ + filter_ = _filter_chain_helper(column=column, versions=versions, + timestamp=timestamp) + partial_row_data = self._low_level_table.read_row(row, filter_=filter_) + if partial_row_data is None: + return [] + else: + cells = partial_row_data._cells + # We know that `_filter_chain_helper` has already verified that + # column will split as such. + column_family_id, column_qualifier = column.split(':') + # NOTE: We expect the only key in `cells` is `column_family_id` + # and the only key `cells[column_family_id]` is + # `column_qualifier`. But we don't check that this is true. + curr_cells = cells[column_family_id][column_qualifier] + return _cells_to_pairs( + curr_cells, include_timestamp=include_timestamp) + + def scan(self, row_start=None, row_stop=None, row_prefix=None, + columns=None, timestamp=None, + include_timestamp=False, limit=None, **kwargs): + """Create a scanner for data in this table. + + This method returns a generator that can be used for looping over the + matching rows. + + If ``row_prefix`` is specified, only rows with row keys matching the + prefix will be returned. If given, ``row_start`` and ``row_stop`` + cannot be used. + + .. note:: + + Both ``row_start`` and ``row_stop`` can be :data:`None` to specify + the start and the end of the table respectively. If both are + omitted, a full table scan is done. Note that this usually results + in severe performance problems. + + The keyword argument ``filter`` is also supported (beyond column and + row range filters supported here). HappyBase / HBase users will have + used this as an HBase filter string. (See the `Thrift docs`_ for more + details on those filters.) However, Google Cloud Bigtable doesn't + support those filter strings so a + :class:`~gcloud.bigtable.row.RowFilter` should be used instead. + + .. _Thrift docs: http://hbase.apache.org/0.94/book/thrift.html + + The arguments ``batch_size``, ``scan_batching`` and ``sorted_columns`` + are allowed (as keyword arguments) for compatibility with + HappyBase. However, they will not be used in any way, and will cause a + warning if passed. (The ``batch_size`` determines the number of + results to retrieve per request. The HBase scanner defaults to reading + one record at a time, so this argument allows HappyBase to increase + that number. However, the Cloud Bigtable API uses HTTP/2 streaming so + there is no concept of a batched scan. The ``sorted_columns`` flag + tells HBase to return columns in order, but Cloud Bigtable doesn't + have this feature.) + + :type row_start: str + :param row_start: (Optional) Row key where the scanner should start + (includes ``row_start``). If not specified, reads + from the first key. If the table does not contain + ``row_start``, it will start from the next key after + it that **is** contained in the table. + + :type row_stop: str + :param row_stop: (Optional) Row key where the scanner should stop + (excludes ``row_stop``). If not specified, reads + until the last key. The table does not have to contain + ``row_stop``. + + :type row_prefix: str + :param row_prefix: (Optional) Prefix to match row keys. + + :type columns: list + :param columns: (Optional) Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). If specified, only cells returned before (or + at) the timestamp will be returned. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :type limit: int + :param limit: (Optional) Maximum number of rows to return. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments. Provided for HappyBase + compatibility. + + :raises: If ``limit`` is set but non-positive, or if ``row_prefix`` is + used with row start/stop, + :class:`TypeError ` if a string + ``filter`` is used. + """ + row_start, row_stop, filter_chain = _scan_filter_helper( + row_start, row_stop, row_prefix, columns, timestamp, limit, kwargs) + + partial_rows_data = self._low_level_table.read_rows( + start_key=row_start, end_key=row_stop, + limit=limit, filter_=filter_chain) + + # Mutable copy of data. + rows_dict = partial_rows_data.rows + while True: + try: + partial_rows_data.consume_next() + for row_key in sorted(rows_dict): + curr_row_data = rows_dict.pop(row_key) + # NOTE: We expect len(rows_dict) == 0, but don't check it. + curr_row_dict = _partial_row_to_dict( + curr_row_data, include_timestamp=include_timestamp) + yield (row_key, curr_row_dict) + except StopIteration: + break + + def put(self, row, data, timestamp=None, wal=_WAL_SENTINEL): + """Insert data into a row in this table. + + .. note:: + + This method will send a request with a single "put" mutation. + In many situations, :meth:`batch` is a more appropriate + method to manipulate data since it helps combine many mutations + into a single request. + + :type row: str + :param row: The row key where the mutation will be "put". + + :type data: dict + :param data: Dictionary containing the data to be inserted. The keys + are columns names (of the form ``fam:col``) and the values + are strings (bytes) to be stored in those columns. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch) that the mutation will be applied at. + + :type wal: object + :param wal: Unused parameter (to be passed to a created batch). + Provided for compatibility with HappyBase, but irrelevant + for Cloud Bigtable since it does not have a Write Ahead + Log. + """ + with self.batch(timestamp=timestamp, wal=wal) as batch: + batch.put(row, data) + + def delete(self, row, columns=None, timestamp=None, wal=_WAL_SENTINEL): + """Delete data from a row in this table. + + This method deletes the entire ``row`` if ``columns`` is not + specified. + + .. note:: + + This method will send a request with a single delete mutation. + In many situations, :meth:`batch` is a more appropriate + method to manipulate data since it helps combine many mutations + into a single request. + + :type row: str + :param row: The row key where the delete will occur. + + :type columns: list + :param columns: (Optional) Iterable containing column names (as + strings). Each column name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch) that the mutation will be applied at. + + :type wal: object + :param wal: Unused parameter (to be passed to a created batch). + Provided for compatibility with HappyBase, but irrelevant + for Cloud Bigtable since it does not have a Write Ahead + Log. + """ + with self.batch(timestamp=timestamp, wal=wal) as batch: + batch.delete(row, columns) + + def batch(self, timestamp=None, batch_size=None, transaction=False, + wal=_WAL_SENTINEL): + """Create a new batch operation for this table. + + This method returns a new + :class:`Batch <.happybase.batch.Batch>` instance that can be + used for mass data manipulation. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch) that all mutations will be applied at. + + :type batch_size: int + :param batch_size: (Optional) The maximum number of mutations to allow + to accumulate before committing them. + + :type transaction: bool + :param transaction: Flag indicating if the mutations should be sent + transactionally or not. If ``transaction=True`` and + an error occurs while a + :class:`Batch <.happybase.batch.Batch>` is + active, then none of the accumulated mutations will + be committed. If ``batch_size`` is set, the + mutation can't be transactional. + + :type wal: object + :param wal: Unused parameter (to be passed to the created batch). + Provided for compatibility with HappyBase, but irrelevant + for Cloud Bigtable since it does not have a Write Ahead + Log. + + :rtype: :class:`Batch ` + :returns: A batch bound to this table. + """ + return Batch(self, timestamp=timestamp, batch_size=batch_size, + transaction=transaction, wal=wal) + + def counter_get(self, row, column): + """Retrieve the current value of a counter column. + + This method retrieves the current value of a counter column. If the + counter column does not exist, this function initializes it to ``0``. + + .. note:: + + Application code should **never** store a counter value directly; + use the atomic :meth:`counter_inc` and :meth:`counter_dec` methods + for that. + + :type row: str + :param row: Row key for the row we are getting a counter from. + + :type column: str + :param column: Column we are ``get``-ing from; of the form ``fam:col``. + + :rtype: int + :returns: Counter value (after initializing / incrementing by 0). + """ + # Don't query directly, but increment with value=0 so that the counter + # is correctly initialized if didn't exist yet. + return self.counter_inc(row, column, value=0) + + def counter_set(self, row, column, value=0): + """Set a counter column to a specific value. + + This method is provided in HappyBase, but we do not provide it here + because it defeats the purpose of using atomic increment and decrement + of a counter. + + :type row: str + :param row: Row key for the row we are setting a counter in. + + :type column: str + :param column: Column we are setting a value in; of + the form ``fam:col``. + + :type value: int + :param value: Value to set the counter to. + + :raises: :class:`NotImplementedError ` + always + """ + raise NotImplementedError('Table.counter_set will not be implemented. ' + 'Instead use the increment/decrement ' + 'methods along with counter_get.') + + def counter_inc(self, row, column, value=1): + """Atomically increment a counter column. + + This method atomically increments a counter column in ``row``. + If the counter column does not exist, it is automatically initialized + to ``0`` before being incremented. + + :type row: str + :param row: Row key for the row we are incrementing a counter in. + + :type column: str + :param column: Column we are incrementing a value in; of the + form ``fam:col``. + + :type value: int + :param value: Amount to increment the counter by. (If negative, + this is equivalent to decrement.) + + :rtype: int + :returns: Counter value after incrementing. + """ + row = self._low_level_table.row(row, append=True) + if isinstance(column, six.binary_type): + column = column.decode('utf-8') + column_family_id, column_qualifier = column.split(':') + row.increment_cell_value(column_family_id, column_qualifier, value) + # See AppendRow.commit() will return a dictionary: + # { + # u'col-fam-id': { + # b'col-name1': [ + # (b'cell-val', datetime.datetime(...)), + # ... + # ], + # ... + # }, + # } + modified_cells = row.commit() + # Get the cells in the modified column, + column_cells = modified_cells[column_family_id][column_qualifier] + # Make sure there is exactly one cell in the column. + if len(column_cells) != 1: + raise ValueError('Expected server to return one modified cell.') + column_cell = column_cells[0] + # Get the bytes value from the column and convert it to an integer. + bytes_value = column_cell[0] + int_value, = _UNPACK_I64(bytes_value) + return int_value + + def counter_dec(self, row, column, value=1): + """Atomically decrement a counter column. + + This method atomically decrements a counter column in ``row``. + If the counter column does not exist, it is automatically initialized + to ``0`` before being decremented. + + :type row: str + :param row: Row key for the row we are decrementing a counter in. + + :type column: str + :param column: Column we are decrementing a value in; of the + form ``fam:col``. + + :type value: int + :param value: Amount to decrement the counter by. (If negative, + this is equivalent to increment.) + + :rtype: int + :returns: Counter value after decrementing. + """ + return self.counter_inc(row, column, -value) + + +def _gc_rule_to_dict(gc_rule): + """Converts garbage collection rule to dictionary if possible. + + This is in place to support dictionary values as was done + in HappyBase, which has somewhat different garbage collection rule + settings for column families. + + Only does this if the garbage collection rule is: + + * :class:`gcloud.bigtable.column_family.MaxAgeGCRule` + * :class:`gcloud.bigtable.column_family.MaxVersionsGCRule` + * Composite :class:`gcloud.bigtable.column_family.GCRuleIntersection` + with two rules, one each of type + :class:`gcloud.bigtable.column_family.MaxAgeGCRule` and + :class:`gcloud.bigtable.column_family.MaxVersionsGCRule` + + Otherwise, just returns the input without change. + + :type gc_rule: :data:`NoneType `, + :class:`.GarbageCollectionRule` + :param gc_rule: A garbage collection rule to convert to a dictionary + (if possible). + + :rtype: dict or + :class:`gcloud.bigtable.column_family.GarbageCollectionRule` + :returns: The converted garbage collection rule. + """ + result = gc_rule + if gc_rule is None: + result = {} + elif isinstance(gc_rule, MaxAgeGCRule): + result = {'time_to_live': _total_seconds(gc_rule.max_age)} + elif isinstance(gc_rule, MaxVersionsGCRule): + result = {'max_versions': gc_rule.max_num_versions} + elif isinstance(gc_rule, GCRuleIntersection): + if len(gc_rule.rules) == 2: + rule1, rule2 = gc_rule.rules + if (isinstance(rule1, _SIMPLE_GC_RULES) and + isinstance(rule2, _SIMPLE_GC_RULES)): + rule1 = _gc_rule_to_dict(rule1) + rule2 = _gc_rule_to_dict(rule2) + key1, = rule1.keys() + key2, = rule2.keys() + if key1 != key2: + result = {key1: rule1[key1], key2: rule2[key2]} + return result + + +def _next_char(str_val, index): + """Gets the next character based on a position in a string. + + :type str_val: str + :param str_val: A string containing the character to update. + + :type index: int + :param index: An integer index in ``str_val``. + + :rtype: str + :returns: The next character after the character at ``index`` + in ``str_val``. + """ + ord_val = six.indexbytes(str_val, index) + return _to_bytes(chr(ord_val + 1), encoding='latin-1') + + +def _string_successor(str_val): + """Increment and truncate a byte string. + + Determines shortest string that sorts after the given string when + compared using regular string comparison semantics. + + Modeled after implementation in ``gcloud-golang``. + + Increments the last byte that is smaller than ``0xFF``, and + drops everything after it. If the string only contains ``0xFF`` bytes, + ``''`` is returned. + + :type str_val: str + :param str_val: String to increment. + + :rtype: str + :returns: The next string in lexical order after ``str_val``. + """ + str_val = _to_bytes(str_val, encoding='latin-1') + if str_val == b'': + return str_val + + index = len(str_val) - 1 + while index >= 0: + if six.indexbytes(str_val, index) != 0xff: + break + index -= 1 + + if index == -1: + return b'' + + return str_val[:index] + _next_char(str_val, index) + + +def _convert_to_time_range(timestamp=None): + """Create a timestamp range from an HBase / HappyBase timestamp. + + HBase uses timestamp as an argument to specify an exclusive end + deadline. Cloud Bigtable also uses exclusive end times, so + the behavior matches. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). Intended to be used as the end of an HBase + time range, which is exclusive. + + :rtype: :class:`gcloud.bigtable.row.TimestampRange`, + :data:`NoneType ` + :returns: The timestamp range corresponding to the passed in + ``timestamp``. + """ + if timestamp is None: + return None + + next_timestamp = _datetime_from_microseconds(1000 * timestamp) + return TimestampRange(end=next_timestamp) + + +def _cells_to_pairs(cells, include_timestamp=False): + """Converts list of cells to HappyBase format. + + For example:: + + >>> import datetime + >>> from gcloud.bigtable.row_data import Cell + >>> cell1 = Cell(b'val1', datetime.datetime.utcnow()) + >>> cell2 = Cell(b'val2', datetime.datetime.utcnow()) + >>> _cells_to_pairs([cell1, cell2]) + [b'val1', b'val2'] + >>> _cells_to_pairs([cell1, cell2], include_timestamp=True) + [(b'val1', 1456361486255), (b'val2', 1456361491927)] + + :type cells: list + :param cells: List of :class:`gcloud.bigtable.row_data.Cell` returned + from a read request. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :rtype: list + :returns: List of values in the cell. If ``include_timestamp=True``, each + value will be a pair, with the first part the bytes value in + the cell and the second part the number of milliseconds in the + timestamp on the cell. + """ + result = [] + for cell in cells: + if include_timestamp: + ts_millis = _microseconds_from_datetime(cell.timestamp) // 1000 + result.append((cell.value, ts_millis)) + else: + result.append(cell.value) + return result + + +def _partial_row_to_dict(partial_row_data, include_timestamp=False): + """Convert a low-level row data object to a dictionary. + + Assumes only the latest value in each row is needed. This assumption + is due to the fact that this method is used by callers which use + a ``CellsColumnLimitFilter(1)`` filter. + + For example:: + + >>> import datetime + >>> from gcloud.bigtable.row_data import Cell, PartialRowData + >>> cell1 = Cell(b'val1', datetime.datetime.utcnow()) + >>> cell2 = Cell(b'val2', datetime.datetime.utcnow()) + >>> row_data = PartialRowData(b'row-key') + >>> _partial_row_to_dict(row_data) + {} + >>> row_data._cells[u'fam1'] = {b'col1': [cell1], b'col2': [cell2]} + >>> _partial_row_to_dict(row_data) + {b'fam1:col2': b'val2', b'fam1:col1': b'val1'} + >>> _partial_row_to_dict(row_data, include_timestamp=True) + {b'fam1:col2': (b'val2', 1456361724480), + b'fam1:col1': (b'val1', 1456361721135)} + + :type partial_row_data: :class:`.row_data.PartialRowData` + :param partial_row_data: Row data consumed from a stream. + + :type include_timestamp: bool + :param include_timestamp: Flag to indicate if cell timestamps should be + included with the output. + + :rtype: dict + :returns: The row data converted to a dictionary. + """ + result = {} + for column, cells in six.iteritems(partial_row_data.to_dict()): + cell_vals = _cells_to_pairs(cells, + include_timestamp=include_timestamp) + # NOTE: We assume there is exactly 1 version since we used that in + # our filter, but we don't check this. + result[column] = cell_vals[0] + return result + + +def _filter_chain_helper(column=None, versions=None, timestamp=None, + filters=None): + """Create filter chain to limit a results set. + + :type column: str + :param column: (Optional) The column (``fam:col``) to be selected + with the filter. + + :type versions: int + :param versions: (Optional) The maximum number of cells to return. + + :type timestamp: int + :param timestamp: (Optional) Timestamp (in milliseconds since the + epoch). If specified, only cells returned before (or + at) the timestamp will be matched. + + :type filters: list + :param filters: (Optional) List of existing filters to be extended. + + :rtype: :class:`RowFilter ` + :returns: The chained filter created, or just a single filter if only + one was needed. + :raises: :class:`ValueError ` if there are no + filters to chain. + """ + if filters is None: + filters = [] + + if column is not None: + if isinstance(column, six.binary_type): + column = column.decode('utf-8') + column_family_id, column_qualifier = column.split(':') + fam_filter = FamilyNameRegexFilter(column_family_id) + qual_filter = ColumnQualifierRegexFilter(column_qualifier) + filters.extend([fam_filter, qual_filter]) + if versions is not None: + filters.append(CellsColumnLimitFilter(versions)) + time_range = _convert_to_time_range(timestamp=timestamp) + if time_range is not None: + filters.append(TimestampRangeFilter(time_range)) + + num_filters = len(filters) + if num_filters == 0: + raise ValueError('Must have at least one filter.') + elif num_filters == 1: + return filters[0] + else: + return RowFilterChain(filters=filters) + + +def _scan_filter_helper(row_start, row_stop, row_prefix, columns, + timestamp, limit, kwargs): + """Helper for :meth:`scan`: build up a filter chain.""" + filter_ = kwargs.pop('filter', None) + legacy_args = [] + for kw_name in ('batch_size', 'scan_batching', 'sorted_columns'): + if kw_name in kwargs: + legacy_args.append(kw_name) + kwargs.pop(kw_name) + if legacy_args: + legacy_args = ', '.join(legacy_args) + message = ('The HappyBase legacy arguments %s were used. These ' + 'arguments are unused by gcloud.' % (legacy_args,)) + _WARN(message) + if kwargs: + raise TypeError('Received unexpected arguments', kwargs.keys()) + + if limit is not None and limit < 1: + raise ValueError('limit must be positive') + if row_prefix is not None: + if row_start is not None or row_stop is not None: + raise ValueError('row_prefix cannot be combined with ' + 'row_start or row_stop') + row_start = row_prefix + row_stop = _string_successor(row_prefix) + + filters = [] + if isinstance(filter_, six.string_types): + raise TypeError('Specifying filters as a string is not supported ' + 'by Cloud Bigtable. Use a ' + 'gcloud.bigtable.row.RowFilter instead.') + elif filter_ is not None: + filters.append(filter_) + + if columns is not None: + filters.append(_columns_filter_helper(columns)) + + # versions == 1 since we only want the latest. + filter_ = _filter_chain_helper(versions=1, timestamp=timestamp, + filters=filters) + return row_start, row_stop, filter_ + + +def _columns_filter_helper(columns): + """Creates a union filter for a list of columns. + + :type columns: list + :param columns: Iterable containing column names (as strings). Each column + name can be either + + * an entire column family: ``fam`` or ``fam:`` + * a single column: ``fam:col`` + + :rtype: :class:`RowFilter ` + :returns: The union filter created containing all of the matched columns. + :raises: :class:`ValueError ` if there are no + filters to union. + """ + filters = [] + for column_family_id, column_qualifier in _get_column_pairs(columns): + fam_filter = FamilyNameRegexFilter(column_family_id) + if column_qualifier is not None: + qual_filter = ColumnQualifierRegexFilter(column_qualifier) + combined_filter = RowFilterChain( + filters=[fam_filter, qual_filter]) + filters.append(combined_filter) + else: + filters.append(fam_filter) + + num_filters = len(filters) + if num_filters == 0: + raise ValueError('Must have at least one filter.') + elif num_filters == 1: + return filters[0] + else: + return RowFilterUnion(filters=filters) + + +def _row_keys_filter_helper(row_keys): + """Creates a union filter for a list of rows. + + :type row_keys: list + :param row_keys: Iterable containing row keys (as strings). + + :rtype: :class:`RowFilter ` + :returns: The union filter created containing all of the row keys. + :raises: :class:`ValueError ` if there are no + filters to union. + """ + filters = [] + for row_key in row_keys: + filters.append(RowKeyRegexFilter(row_key)) + + num_filters = len(filters) + if num_filters == 0: + raise ValueError('Must have at least one filter.') + elif num_filters == 1: + return filters[0] + else: + return RowFilterUnion(filters=filters) diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/test_batch.py b/env/Lib/site-packages/gcloud/bigtable/happybase/test_batch.py new file mode 100644 index 0000000..cf2156f --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/test_batch.py @@ -0,0 +1,568 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class _SendMixin(object): + + _send_called = False + + def send(self): + self._send_called = True + + +class TestBatch(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.happybase.batch import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + table = object() + batch = self._makeOne(table) + self.assertEqual(batch._table, table) + self.assertEqual(batch._batch_size, None) + self.assertEqual(batch._timestamp, None) + self.assertEqual(batch._delete_range, None) + self.assertEqual(batch._transaction, False) + self.assertEqual(batch._row_map, {}) + self.assertEqual(batch._mutation_count, 0) + + def test_constructor_explicit(self): + from gcloud._helpers import _datetime_from_microseconds + from gcloud.bigtable.row_filters import TimestampRange + + table = object() + timestamp = 144185290431 + batch_size = 42 + transaction = False # Must be False when batch_size is non-null + + batch = self._makeOne(table, timestamp=timestamp, + batch_size=batch_size, transaction=transaction) + self.assertEqual(batch._table, table) + self.assertEqual(batch._batch_size, batch_size) + self.assertEqual(batch._timestamp, + _datetime_from_microseconds(1000 * timestamp)) + + next_timestamp = _datetime_from_microseconds(1000 * (timestamp + 1)) + time_range = TimestampRange(end=next_timestamp) + self.assertEqual(batch._delete_range, time_range) + self.assertEqual(batch._transaction, transaction) + self.assertEqual(batch._row_map, {}) + self.assertEqual(batch._mutation_count, 0) + + def test_constructor_with_non_default_wal(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import batch as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + table = object() + wal = object() + with _Monkey(MUT, _WARN=mock_warn): + self._makeOne(table, wal=wal) + + self.assertEqual(warned, [MUT._WAL_WARNING]) + + def test_constructor_with_non_positive_batch_size(self): + table = object() + batch_size = -10 + with self.assertRaises(ValueError): + self._makeOne(table, batch_size=batch_size) + batch_size = 0 + with self.assertRaises(ValueError): + self._makeOne(table, batch_size=batch_size) + + def test_constructor_with_batch_size_and_transactional(self): + table = object() + batch_size = 1 + transaction = True + with self.assertRaises(TypeError): + self._makeOne(table, batch_size=batch_size, + transaction=transaction) + + def test_send(self): + table = object() + batch = self._makeOne(table) + + batch._row_map = row_map = _MockRowMap() + row_map['row-key1'] = row1 = _MockRow() + row_map['row-key2'] = row2 = _MockRow() + batch._mutation_count = 1337 + + self.assertEqual(row_map.clear_count, 0) + self.assertEqual(row1.commits, 0) + self.assertEqual(row2.commits, 0) + self.assertNotEqual(batch._mutation_count, 0) + self.assertNotEqual(row_map, {}) + + batch.send() + self.assertEqual(row_map.clear_count, 1) + self.assertEqual(row1.commits, 1) + self.assertEqual(row2.commits, 1) + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(row_map, {}) + + def test__try_send_no_batch_size(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch = BatchWithSend(table) + + self.assertEqual(batch._batch_size, None) + self.assertFalse(batch._send_called) + batch._try_send() + self.assertFalse(batch._send_called) + + def test__try_send_too_few_mutations(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch_size = 10 + batch = BatchWithSend(table, batch_size=batch_size) + + self.assertEqual(batch._batch_size, batch_size) + self.assertFalse(batch._send_called) + mutation_count = 2 + batch._mutation_count = mutation_count + self.assertTrue(mutation_count < batch_size) + batch._try_send() + self.assertFalse(batch._send_called) + + def test__try_send_actual_send(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch_size = 10 + batch = BatchWithSend(table, batch_size=batch_size) + + self.assertEqual(batch._batch_size, batch_size) + self.assertFalse(batch._send_called) + mutation_count = 12 + batch._mutation_count = mutation_count + self.assertTrue(mutation_count > batch_size) + batch._try_send() + self.assertTrue(batch._send_called) + + def test__get_row_exists(self): + table = object() + batch = self._makeOne(table) + + row_key = 'row-key' + row_obj = object() + batch._row_map[row_key] = row_obj + result = batch._get_row(row_key) + self.assertEqual(result, row_obj) + + def test__get_row_create_new(self): + # Make mock batch and make sure we can create a low-level table. + low_level_table = _MockLowLevelTable() + table = _MockTable(low_level_table) + batch = self._makeOne(table) + + # Make sure row map is empty. + self.assertEqual(batch._row_map, {}) + + # Customize/capture mock table creation. + low_level_table.mock_row = mock_row = object() + + # Actually get the row (which creates a row via a low-level table). + row_key = 'row-key' + result = batch._get_row(row_key) + self.assertEqual(result, mock_row) + + # Check all the things that were constructed. + self.assertEqual(low_level_table.rows_made, [row_key]) + # Check how the batch was updated. + self.assertEqual(batch._row_map, {row_key: mock_row}) + + def test_put_bad_wal(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import batch as MUT + + warned = [] + + def mock_warn(message): + warned.append(message) + # Raise an exception so we don't have to mock the entire + # environment needed for put(). + raise RuntimeError('No need to execute the rest.') + + table = object() + batch = self._makeOne(table) + + row = 'row-key' + data = {} + wal = None + + self.assertNotEqual(wal, MUT._WAL_SENTINEL) + with _Monkey(MUT, _WARN=mock_warn): + with self.assertRaises(RuntimeError): + batch.put(row, data, wal=wal) + + self.assertEqual(warned, [MUT._WAL_WARNING]) + + def test_put(self): + import operator + + table = object() + batch = self._makeOne(table) + batch._timestamp = timestamp = object() + row_key = 'row-key' + batch._row_map[row_key] = row = _MockRow() + + col1_fam = 'cf1' + col1_qual = 'qual1' + value1 = 'value1' + col2_fam = 'cf2' + col2_qual = 'qual2' + value2 = 'value2' + data = {col1_fam + ':' + col1_qual: value1, + col2_fam + ':' + col2_qual: value2} + + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(row.set_cell_calls, []) + batch.put(row_key, data) + self.assertEqual(batch._mutation_count, 2) + # Since the calls depend on data.keys(), the order + # is non-deterministic. + first_elt = operator.itemgetter(0) + ordered_calls = sorted(row.set_cell_calls, key=first_elt) + + cell1_args = (col1_fam, col1_qual, value1) + cell1_kwargs = {'timestamp': timestamp} + cell2_args = (col2_fam, col2_qual, value2) + cell2_kwargs = {'timestamp': timestamp} + self.assertEqual(ordered_calls, [ + (cell1_args, cell1_kwargs), + (cell2_args, cell2_kwargs), + ]) + + def test_put_call_try_send(self): + klass = self._getTargetClass() + + class CallTrySend(klass): + + try_send_calls = 0 + + def _try_send(self): + self.try_send_calls += 1 + + table = object() + batch = CallTrySend(table) + + row_key = 'row-key' + batch._row_map[row_key] = _MockRow() + + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(batch.try_send_calls, 0) + # No data so that nothing happens + batch.put(row_key, data={}) + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(batch.try_send_calls, 1) + + def _delete_columns_test_helper(self, time_range=None): + table = object() + batch = self._makeOne(table) + batch._delete_range = time_range + + col1_fam = 'cf1' + col2_fam = 'cf2' + col2_qual = 'col-name' + columns = [col1_fam + ':', col2_fam + ':' + col2_qual] + row_object = _MockRow() + + batch._delete_columns(columns, row_object) + self.assertEqual(row_object.commits, 0) + + cell_deleted_args = (col2_fam, col2_qual) + cell_deleted_kwargs = {'time_range': time_range} + self.assertEqual(row_object.delete_cell_calls, + [(cell_deleted_args, cell_deleted_kwargs)]) + fam_deleted_args = (col1_fam,) + fam_deleted_kwargs = {'columns': row_object.ALL_COLUMNS} + self.assertEqual(row_object.delete_cells_calls, + [(fam_deleted_args, fam_deleted_kwargs)]) + + def test__delete_columns(self): + self._delete_columns_test_helper() + + def test__delete_columns_w_time_and_col_fam(self): + time_range = object() + with self.assertRaises(ValueError): + self._delete_columns_test_helper(time_range=time_range) + + def test_delete_bad_wal(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import batch as MUT + + warned = [] + + def mock_warn(message): + warned.append(message) + # Raise an exception so we don't have to mock the entire + # environment needed for delete(). + raise RuntimeError('No need to execute the rest.') + + table = object() + batch = self._makeOne(table) + + row = 'row-key' + columns = [] + wal = None + + self.assertNotEqual(wal, MUT._WAL_SENTINEL) + with _Monkey(MUT, _WARN=mock_warn): + with self.assertRaises(RuntimeError): + batch.delete(row, columns=columns, wal=wal) + + self.assertEqual(warned, [MUT._WAL_WARNING]) + + def test_delete_entire_row(self): + table = object() + batch = self._makeOne(table) + + row_key = 'row-key' + batch._row_map[row_key] = row = _MockRow() + + self.assertEqual(row.deletes, 0) + self.assertEqual(batch._mutation_count, 0) + batch.delete(row_key, columns=None) + self.assertEqual(row.deletes, 1) + self.assertEqual(batch._mutation_count, 1) + + def test_delete_entire_row_with_ts(self): + table = object() + batch = self._makeOne(table) + batch._delete_range = object() + + row_key = 'row-key' + batch._row_map[row_key] = row = _MockRow() + + self.assertEqual(row.deletes, 0) + self.assertEqual(batch._mutation_count, 0) + with self.assertRaises(ValueError): + batch.delete(row_key, columns=None) + self.assertEqual(row.deletes, 0) + self.assertEqual(batch._mutation_count, 0) + + def test_delete_call_try_send(self): + klass = self._getTargetClass() + + class CallTrySend(klass): + + try_send_calls = 0 + + def _try_send(self): + self.try_send_calls += 1 + + table = object() + batch = CallTrySend(table) + + row_key = 'row-key' + batch._row_map[row_key] = _MockRow() + + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(batch.try_send_calls, 0) + # No columns so that nothing happens + batch.delete(row_key, columns=[]) + self.assertEqual(batch._mutation_count, 0) + self.assertEqual(batch.try_send_calls, 1) + + def test_delete_some_columns(self): + table = object() + batch = self._makeOne(table) + + row_key = 'row-key' + batch._row_map[row_key] = row = _MockRow() + + self.assertEqual(batch._mutation_count, 0) + + col1_fam = 'cf1' + col2_fam = 'cf2' + col2_qual = 'col-name' + columns = [col1_fam + ':', col2_fam + ':' + col2_qual] + batch.delete(row_key, columns=columns) + + self.assertEqual(batch._mutation_count, 2) + cell_deleted_args = (col2_fam, col2_qual) + cell_deleted_kwargs = {'time_range': None} + self.assertEqual(row.delete_cell_calls, + [(cell_deleted_args, cell_deleted_kwargs)]) + fam_deleted_args = (col1_fam,) + fam_deleted_kwargs = {'columns': row.ALL_COLUMNS} + self.assertEqual(row.delete_cells_calls, + [(fam_deleted_args, fam_deleted_kwargs)]) + + def test_context_manager(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch = BatchWithSend(table) + self.assertFalse(batch._send_called) + + with batch: + pass + + self.assertTrue(batch._send_called) + + def test_context_manager_with_exception_non_transactional(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch = BatchWithSend(table) + self.assertFalse(batch._send_called) + + with self.assertRaises(ValueError): + with batch: + raise ValueError('Something bad happened') + + self.assertTrue(batch._send_called) + + def test_context_manager_with_exception_transactional(self): + klass = self._getTargetClass() + + class BatchWithSend(_SendMixin, klass): + pass + + table = object() + batch = BatchWithSend(table, transaction=True) + self.assertFalse(batch._send_called) + + with self.assertRaises(ValueError): + with batch: + raise ValueError('Something bad happened') + + self.assertFalse(batch._send_called) + + # Just to make sure send() actually works (and to make cover happy). + batch.send() + self.assertTrue(batch._send_called) + + +class Test__get_column_pairs(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.batch import _get_column_pairs + return _get_column_pairs(*args, **kwargs) + + def test_it(self): + columns = [b'cf1', u'cf2:', 'cf3::', 'cf3:name1', 'cf3:name2'] + result = self._callFUT(columns) + expected_result = [ + ['cf1', None], + ['cf2', None], + ['cf3', ''], + ['cf3', 'name1'], + ['cf3', 'name2'], + ] + self.assertEqual(result, expected_result) + + def test_bad_column(self): + columns = ['a:b:c'] + with self.assertRaises(ValueError): + self._callFUT(columns) + + def test_bad_column_type(self): + columns = [None] + with self.assertRaises(AttributeError): + self._callFUT(columns) + + def test_bad_columns_var(self): + columns = None + with self.assertRaises(TypeError): + self._callFUT(columns) + + def test_column_family_with_require_qualifier(self): + columns = ['a:'] + with self.assertRaises(ValueError): + self._callFUT(columns, require_qualifier=True) + + +class _MockRowMap(dict): + + clear_count = 0 + + def clear(self): + self.clear_count += 1 + super(_MockRowMap, self).clear() + + +class _MockRow(object): + + ALL_COLUMNS = object() + + def __init__(self): + self.commits = 0 + self.deletes = 0 + self.set_cell_calls = [] + self.delete_cell_calls = [] + self.delete_cells_calls = [] + + def commit(self): + self.commits += 1 + + def delete(self): + self.deletes += 1 + + def set_cell(self, *args, **kwargs): + self.set_cell_calls.append((args, kwargs)) + + def delete_cell(self, *args, **kwargs): + self.delete_cell_calls.append((args, kwargs)) + + def delete_cells(self, *args, **kwargs): + self.delete_cells_calls.append((args, kwargs)) + + +class _MockTable(object): + + def __init__(self, low_level_table): + self._low_level_table = low_level_table + + +class _MockLowLevelTable(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.rows_made = [] + self.mock_row = None + + def row(self, row_key): + self.rows_made.append(row_key) + return self.mock_row diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/test_connection.py b/env/Lib/site-packages/gcloud/bigtable/happybase/test_connection.py new file mode 100644 index 0000000..6236539 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/test_connection.py @@ -0,0 +1,682 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import sys + +import unittest2 + + +class Test__get_instance(unittest2.TestCase): + + def _callFUT(self, timeout=None): + from gcloud.bigtable.happybase.connection import _get_instance + return _get_instance(timeout=timeout) + + def _helper(self, timeout=None, instances=(), failed_locations=()): + from functools import partial + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + client_with_instances = partial( + _Client, instances=instances, failed_locations=failed_locations) + with _Monkey(MUT, Client=client_with_instances): + result = self._callFUT(timeout=timeout) + + # If we've reached this point, then _callFUT didn't fail, so we know + # there is exactly one instance. + instance, = instances + self.assertEqual(result, instance) + client = instance.client + self.assertEqual(client.args, ()) + expected_kwargs = {'admin': True} + if timeout is not None: + expected_kwargs['timeout_seconds'] = timeout / 1000.0 + self.assertEqual(client.kwargs, expected_kwargs) + self.assertEqual(client.start_calls, 1) + self.assertEqual(client.stop_calls, 1) + + def test_default(self): + instance = _Instance() + self._helper(instances=[instance]) + + def test_with_timeout(self): + instance = _Instance() + self._helper(timeout=2103, instances=[instance]) + + def test_with_no_instances(self): + with self.assertRaises(ValueError): + self._helper() + + def test_with_too_many_instances(self): + instances = [_Instance(), _Instance()] + with self.assertRaises(ValueError): + self._helper(instances=instances) + + def test_with_failed_locations(self): + instance = _Instance() + failed_location = 'us-central1-c' + with self.assertRaises(ValueError): + self._helper(instances=[instance], + failed_locations=[failed_location]) + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.happybase.connection import Connection + return Connection + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + instance = _Instance() # Avoid implicit environ check. + self.assertEqual(instance._client.start_calls, 0) + connection = self._makeOne(instance=instance) + self.assertEqual(instance._client.start_calls, 1) + self.assertEqual(instance._client.stop_calls, 0) + + self.assertEqual(connection._instance, instance) + self.assertEqual(connection.table_prefix, None) + self.assertEqual(connection.table_prefix_separator, '_') + + def test_constructor_no_autoconnect(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + self.assertEqual(instance._client.start_calls, 0) + self.assertEqual(instance._client.stop_calls, 0) + self.assertEqual(connection.table_prefix, None) + self.assertEqual(connection.table_prefix_separator, '_') + + def test_constructor_missing_instance(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + instance = _Instance() + timeout = object() + get_instance_called = [] + + def mock_get_instance(timeout): + get_instance_called.append(timeout) + return instance + + with _Monkey(MUT, _get_instance=mock_get_instance): + connection = self._makeOne(autoconnect=False, instance=None, + timeout=timeout) + self.assertEqual(connection.table_prefix, None) + self.assertEqual(connection.table_prefix_separator, '_') + self.assertEqual(connection._instance, instance) + + self.assertEqual(get_instance_called, [timeout]) + + def test_constructor_explicit(self): + autoconnect = False + table_prefix = 'table-prefix' + table_prefix_separator = 'sep' + instance_copy = _Instance() + instance = _Instance(copies=[instance_copy]) + + connection = self._makeOne( + autoconnect=autoconnect, + table_prefix=table_prefix, + table_prefix_separator=table_prefix_separator, + instance=instance) + self.assertEqual(connection.table_prefix, table_prefix) + self.assertEqual(connection.table_prefix_separator, + table_prefix_separator) + + def test_constructor_with_unknown_argument(self): + instance = _Instance() + with self.assertRaises(TypeError): + self._makeOne(instance=instance, unknown='foo') + + def test_constructor_with_legacy_args(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + instance = _Instance() + with _Monkey(MUT, _WARN=mock_warn): + self._makeOne(instance=instance, host=object(), + port=object(), compat=object(), + transport=object(), protocol=object()) + + self.assertEqual(len(warned), 1) + self.assertIn('host', warned[0]) + self.assertIn('port', warned[0]) + self.assertIn('compat', warned[0]) + self.assertIn('transport', warned[0]) + self.assertIn('protocol', warned[0]) + + def test_constructor_with_timeout_and_instance(self): + instance = _Instance() + with self.assertRaises(ValueError): + self._makeOne(instance=instance, timeout=object()) + + def test_constructor_non_string_prefix(self): + table_prefix = object() + + with self.assertRaises(TypeError): + self._makeOne(autoconnect=False, + table_prefix=table_prefix) + + def test_constructor_non_string_prefix_separator(self): + table_prefix_separator = object() + + with self.assertRaises(TypeError): + self._makeOne(autoconnect=False, + table_prefix_separator=table_prefix_separator) + + def test_open(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + self.assertEqual(instance._client.start_calls, 0) + connection.open() + self.assertEqual(instance._client.start_calls, 1) + self.assertEqual(instance._client.stop_calls, 0) + + def test_close(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + self.assertEqual(instance._client.stop_calls, 0) + connection.close() + self.assertEqual(instance._client.stop_calls, 1) + self.assertEqual(instance._client.start_calls, 0) + + def test___del__with_instance(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + self.assertEqual(instance._client.stop_calls, 0) + connection.__del__() + self.assertEqual(instance._client.stop_calls, 1) + + def test___del__no_instance(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + self.assertEqual(instance._client.stop_calls, 0) + del connection._instance + connection.__del__() + self.assertEqual(instance._client.stop_calls, 0) + + def test__table_name_with_prefix_set(self): + table_prefix = 'table-prefix' + table_prefix_separator = '<>' + instance = _Instance() + + connection = self._makeOne( + autoconnect=False, + table_prefix=table_prefix, + table_prefix_separator=table_prefix_separator, + instance=instance) + + name = 'some-name' + prefixed = connection._table_name(name) + self.assertEqual(prefixed, + table_prefix + table_prefix_separator + name) + + def test__table_name_with_no_prefix_set(self): + instance = _Instance() + connection = self._makeOne(autoconnect=False, + instance=instance) + + name = 'some-name' + prefixed = connection._table_name(name) + self.assertEqual(prefixed, name) + + def test_table_factory(self): + from gcloud.bigtable.happybase.table import Table + + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + table = connection.table(name) + + self.assertTrue(isinstance(table, Table)) + self.assertEqual(table.name, name) + self.assertEqual(table.connection, connection) + + def _table_factory_prefix_helper(self, use_prefix=True): + from gcloud.bigtable.happybase.table import Table + + instance = _Instance() # Avoid implicit environ check. + table_prefix = 'table-prefix' + table_prefix_separator = '<>' + connection = self._makeOne( + autoconnect=False, table_prefix=table_prefix, + table_prefix_separator=table_prefix_separator, + instance=instance) + + name = 'table-name' + table = connection.table(name, use_prefix=use_prefix) + + self.assertTrue(isinstance(table, Table)) + prefixed_name = table_prefix + table_prefix_separator + name + if use_prefix: + self.assertEqual(table.name, prefixed_name) + else: + self.assertEqual(table.name, name) + self.assertEqual(table.connection, connection) + + def test_table_factory_with_prefix(self): + self._table_factory_prefix_helper(use_prefix=True) + + def test_table_factory_with_ignored_prefix(self): + self._table_factory_prefix_helper(use_prefix=False) + + def test_tables(self): + from gcloud.bigtable.table import Table + + table_name1 = 'table-name1' + table_name2 = 'table-name2' + instance = _Instance(list_tables_result=[ + Table(table_name1, None), + Table(table_name2, None), + ]) + connection = self._makeOne(autoconnect=False, instance=instance) + result = connection.tables() + self.assertEqual(result, [table_name1, table_name2]) + + def test_tables_with_prefix(self): + from gcloud.bigtable.table import Table + + table_prefix = 'prefix' + table_prefix_separator = '<>' + unprefixed_table_name1 = 'table-name1' + + table_name1 = (table_prefix + table_prefix_separator + + unprefixed_table_name1) + table_name2 = 'table-name2' + instance = _Instance(list_tables_result=[ + Table(table_name1, None), + Table(table_name2, None), + ]) + connection = self._makeOne( + autoconnect=False, instance=instance, table_prefix=table_prefix, + table_prefix_separator=table_prefix_separator) + result = connection.tables() + self.assertEqual(result, [unprefixed_table_name1]) + + def test_create_table(self): + import operator + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + mock_gc_rule = object() + called_options = [] + + def mock_parse_family_option(option): + called_options.append(option) + return mock_gc_rule + + name = 'table-name' + col_fam1 = 'cf1' + col_fam_option1 = object() + col_fam2 = u'cf2' + col_fam_option2 = object() + col_fam3 = b'cf3' + col_fam_option3 = object() + families = { + col_fam1: col_fam_option1, + # A trailing colon is also allowed. + col_fam2 + ':': col_fam_option2, + col_fam3 + b':': col_fam_option3, + } + + tables_created = [] + + def make_table(*args, **kwargs): + result = _MockLowLevelTable(*args, **kwargs) + tables_created.append(result) + return result + + with _Monkey(MUT, _LowLevelTable=make_table, + _parse_family_option=mock_parse_family_option): + connection.create_table(name, families) + + # Just one table would have been created. + table_instance, = tables_created + self.assertEqual(table_instance.args, (name, instance)) + self.assertEqual(table_instance.kwargs, {}) + self.assertEqual(table_instance.create_calls, 1) + + # Check if our mock was called twice, but we don't know the order. + self.assertEqual( + set(called_options), + set([col_fam_option1, col_fam_option2, col_fam_option3])) + + # We expect three column family instances created, but don't know the + # order due to non-deterministic dict.items(). + col_fam_created = table_instance.col_fam_created + self.assertEqual(len(col_fam_created), 3) + col_fam_created.sort(key=operator.attrgetter('column_family_id')) + self.assertEqual(col_fam_created[0].column_family_id, col_fam1) + self.assertEqual(col_fam_created[0].gc_rule, mock_gc_rule) + self.assertEqual(col_fam_created[0].create_calls, 1) + self.assertEqual(col_fam_created[1].column_family_id, col_fam2) + self.assertEqual(col_fam_created[1].gc_rule, mock_gc_rule) + self.assertEqual(col_fam_created[1].create_calls, 1) + self.assertEqual(col_fam_created[2].column_family_id, + col_fam3.decode('utf-8')) + self.assertEqual(col_fam_created[2].gc_rule, mock_gc_rule) + self.assertEqual(col_fam_created[2].create_calls, 1) + + def test_create_table_bad_type(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + families = None + with self.assertRaises(TypeError): + connection.create_table(name, families) + + def test_create_table_bad_value(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + families = {} + with self.assertRaises(ValueError): + connection.create_table(name, families) + + def _create_table_error_helper(self, err_val, err_type): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + tables_created = [] + + def make_table(*args, **kwargs): + kwargs['create_error'] = err_val + result = _MockLowLevelTable(*args, **kwargs) + tables_created.append(result) + return result + + name = 'table-name' + families = {'foo': {}} + with _Monkey(MUT, _LowLevelTable=make_table): + with self.assertRaises(err_type): + connection.create_table(name, families) + + self.assertEqual(len(tables_created), 1) + self.assertEqual(tables_created[0].create_calls, 1) + + @unittest2.skipUnless(sys.version_info[:2] == (2, 7), + 'gRPC only in Python 2.7') + def test_create_table_already_exists(self): + from grpc.beta import interfaces + from grpc.framework.interfaces.face import face + from gcloud.bigtable.happybase.connection import AlreadyExists + + err_val = face.NetworkError(None, None, + interfaces.StatusCode.ALREADY_EXISTS, None) + self._create_table_error_helper(err_val, AlreadyExists) + + @unittest2.skipUnless(sys.version_info[:2] == (2, 7), + 'gRPC only in Python 2.7') + def test_create_table_connection_error(self): + from grpc.beta import interfaces + from grpc.framework.interfaces.face import face + err_val = face.NetworkError(None, None, + interfaces.StatusCode.INTERNAL, None) + self._create_table_error_helper(err_val, face.NetworkError) + + @unittest2.skipUnless(sys.version_info[:2] == (2, 7), + 'gRPC only in Python 2.7') + def test_create_table_other_error(self): + self._create_table_error_helper(RuntimeError, RuntimeError) + + def _delete_table_helper(self, disable=False): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + tables_created = [] + + def make_table(*args, **kwargs): + result = _MockLowLevelTable(*args, **kwargs) + tables_created.append(result) + return result + + name = 'table-name' + with _Monkey(MUT, _LowLevelTable=make_table): + connection.delete_table(name, disable=disable) + + # Just one table would have been created. + table_instance, = tables_created + self.assertEqual(table_instance.args, (name, instance)) + self.assertEqual(table_instance.kwargs, {}) + self.assertEqual(table_instance.delete_calls, 1) + + def test_delete_table(self): + self._delete_table_helper() + + def test_delete_table_disable(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + with _Monkey(MUT, _WARN=mock_warn): + self._delete_table_helper(disable=True) + + self.assertEqual(warned, [MUT._DISABLE_DELETE_MSG]) + + def test_enable_table(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + with self.assertRaises(NotImplementedError): + connection.enable_table(name) + + def test_disable_table(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + with self.assertRaises(NotImplementedError): + connection.disable_table(name) + + def test_is_table_enabled(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + with self.assertRaises(NotImplementedError): + connection.is_table_enabled(name) + + def test_compact_table(self): + instance = _Instance() # Avoid implicit environ check. + connection = self._makeOne(autoconnect=False, instance=instance) + + name = 'table-name' + major = True + with self.assertRaises(NotImplementedError): + connection.compact_table(name, major=major) + + +class Test__parse_family_option(unittest2.TestCase): + + def _callFUT(self, option): + from gcloud.bigtable.happybase.connection import _parse_family_option + return _parse_family_option(option) + + def test_dictionary_no_keys(self): + option = {} + result = self._callFUT(option) + self.assertEqual(result, None) + + def test_null(self): + option = None + result = self._callFUT(option) + self.assertEqual(result, None) + + def test_dictionary_bad_key(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + option = {'badkey': None} + with _Monkey(MUT, _WARN=mock_warn): + result = self._callFUT(option) + + self.assertEqual(result, None) + self.assertEqual(len(warned), 1) + self.assertIn('badkey', warned[0]) + + def test_dictionary_versions_key(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + + versions = 42 + option = {'max_versions': versions} + result = self._callFUT(option) + + gc_rule = MaxVersionsGCRule(versions) + self.assertEqual(result, gc_rule) + + def test_dictionary_ttl_key(self): + import datetime + from gcloud.bigtable.column_family import MaxAgeGCRule + + time_to_live = 24 * 60 * 60 + max_age = datetime.timedelta(days=1) + option = {'time_to_live': time_to_live} + result = self._callFUT(option) + + gc_rule = MaxAgeGCRule(max_age) + self.assertEqual(result, gc_rule) + + def test_dictionary_both_keys(self): + import datetime + from gcloud.bigtable.column_family import GCRuleIntersection + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + versions = 42 + time_to_live = 24 * 60 * 60 + option = { + 'max_versions': versions, + 'time_to_live': time_to_live, + } + result = self._callFUT(option) + + max_age = datetime.timedelta(days=1) + # NOTE: This relies on the order of the rules in the method we are + # calling matching this order here. + gc_rule1 = MaxAgeGCRule(max_age) + gc_rule2 = MaxVersionsGCRule(versions) + gc_rule = GCRuleIntersection(rules=[gc_rule1, gc_rule2]) + self.assertEqual(result, gc_rule) + + def test_non_dictionary(self): + option = object() + self.assertFalse(isinstance(option, dict)) + result = self._callFUT(option) + self.assertEqual(result, option) + + +class _Client(object): + + def __init__(self, *args, **kwargs): + self.instances = kwargs.pop('instances', []) + for instance in self.instances: + instance.client = self + self.failed_locations = kwargs.pop('failed_locations', []) + self.args = args + self.kwargs = kwargs + self.start_calls = 0 + self.stop_calls = 0 + + def start(self): + self.start_calls += 1 + + def stop(self): + self.stop_calls += 1 + + def list_instances(self): + return self.instances, self.failed_locations + + +class _Instance(object): + + def __init__(self, copies=(), list_tables_result=()): + self.copies = list(copies) + # Included to support Connection.__del__ + self._client = _Client() + self.list_tables_result = list_tables_result + + def copy(self): + if self.copies: + result = self.copies[0] + self.copies[:] = self.copies[1:] + return result + else: + return self + + def list_tables(self): + return self.list_tables_result + + +class _MockLowLevelColumnFamily(object): + + def __init__(self, column_family_id, gc_rule=None): + self.column_family_id = column_family_id + self.gc_rule = gc_rule + self.create_calls = 0 + + def create(self): + self.create_calls += 1 + + +class _MockLowLevelTable(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.create_error = kwargs.get('create_error') + self.delete_calls = 0 + self.create_calls = 0 + self.col_fam_created = [] + + def delete(self): + self.delete_calls += 1 + + def create(self): + self.create_calls += 1 + if self.create_error: + raise self.create_error + + def column_family(self, column_family_id, gc_rule=None): + result = _MockLowLevelColumnFamily(column_family_id, gc_rule=gc_rule) + self.col_fam_created.append(result) + return result diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/test_pool.py b/env/Lib/site-packages/gcloud/bigtable/happybase/test_pool.py new file mode 100644 index 0000000..5021292 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/test_pool.py @@ -0,0 +1,264 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class TestConnectionPool(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.happybase.pool import ConnectionPool + return ConnectionPool + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + import six + import threading + from gcloud.bigtable.happybase.connection import Connection + + size = 11 + instance_copy = _Instance() + all_copies = [instance_copy] * size + instance = _Instance(all_copies) # Avoid implicit environ check. + pool = self._makeOne(size, instance=instance) + + self.assertTrue(isinstance(pool._lock, type(threading.Lock()))) + self.assertTrue(isinstance(pool._thread_connections, threading.local)) + self.assertEqual(pool._thread_connections.__dict__, {}) + + queue = pool._queue + self.assertTrue(isinstance(queue, six.moves.queue.LifoQueue)) + self.assertTrue(queue.full()) + self.assertEqual(queue.maxsize, size) + for connection in queue.queue: + self.assertTrue(isinstance(connection, Connection)) + self.assertTrue(connection._instance is instance_copy) + + def test_constructor_passes_kwargs(self): + table_prefix = 'foo' + table_prefix_separator = '<>' + instance = _Instance() # Avoid implicit environ check. + + size = 1 + pool = self._makeOne(size, table_prefix=table_prefix, + table_prefix_separator=table_prefix_separator, + instance=instance) + + for connection in pool._queue.queue: + self.assertEqual(connection.table_prefix, table_prefix) + self.assertEqual(connection.table_prefix_separator, + table_prefix_separator) + + def test_constructor_ignores_autoconnect(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase.connection import Connection + from gcloud.bigtable.happybase import pool as MUT + + class ConnectionWithOpen(Connection): + + _open_called = False + + def open(self): + self._open_called = True + + # First make sure the custom Connection class does as expected. + instance_copy1 = _Instance() + instance_copy2 = _Instance() + instance_copy3 = _Instance() + instance = _Instance([instance_copy1, instance_copy2, instance_copy3]) + connection = ConnectionWithOpen(autoconnect=False, instance=instance) + self.assertFalse(connection._open_called) + self.assertTrue(connection._instance is instance_copy1) + connection = ConnectionWithOpen(autoconnect=True, instance=instance) + self.assertTrue(connection._open_called) + self.assertTrue(connection._instance is instance_copy2) + + # Then make sure autoconnect=True is ignored in a pool. + size = 1 + with _Monkey(MUT, Connection=ConnectionWithOpen): + pool = self._makeOne(size, autoconnect=True, instance=instance) + + for connection in pool._queue.queue: + self.assertTrue(isinstance(connection, ConnectionWithOpen)) + self.assertTrue(connection._instance is instance_copy3) + self.assertFalse(connection._open_called) + + def test_constructor_infers_instance(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase.connection import Connection + from gcloud.bigtable.happybase import pool as MUT + + size = 1 + instance_copy = _Instance() + all_copies = [instance_copy] * size + instance = _Instance(all_copies) + get_instance_calls = [] + + def mock_get_instance(timeout=None): + get_instance_calls.append(timeout) + return instance + + with _Monkey(MUT, _get_instance=mock_get_instance): + pool = self._makeOne(size) + + for connection in pool._queue.queue: + self.assertTrue(isinstance(connection, Connection)) + # We know that the Connection() constructor will + # call instance.copy(). + self.assertTrue(connection._instance is instance_copy) + + self.assertEqual(get_instance_calls, [None]) + + def test_constructor_non_integer_size(self): + size = None + with self.assertRaises(TypeError): + self._makeOne(size) + + def test_constructor_non_positive_size(self): + size = -10 + with self.assertRaises(ValueError): + self._makeOne(size) + size = 0 + with self.assertRaises(ValueError): + self._makeOne(size) + + def _makeOneWithMockQueue(self, queue_return): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import pool as MUT + + # We are going to use a fake queue, so we don't want any connections + # or instances to be created in the constructor. + size = -1 + instance = object() + with _Monkey(MUT, _MIN_POOL_SIZE=size): + pool = self._makeOne(size, instance=instance) + + pool._queue = _Queue(queue_return) + return pool + + def test__acquire_connection(self): + queue_return = object() + pool = self._makeOneWithMockQueue(queue_return) + + timeout = 432 + connection = pool._acquire_connection(timeout=timeout) + self.assertTrue(connection is queue_return) + self.assertEqual(pool._queue._get_calls, [(True, timeout)]) + self.assertEqual(pool._queue._put_calls, []) + + def test__acquire_connection_failure(self): + from gcloud.bigtable.happybase.pool import NoConnectionsAvailable + + pool = self._makeOneWithMockQueue(None) + timeout = 1027 + with self.assertRaises(NoConnectionsAvailable): + pool._acquire_connection(timeout=timeout) + self.assertEqual(pool._queue._get_calls, [(True, timeout)]) + self.assertEqual(pool._queue._put_calls, []) + + def test_connection_is_context_manager(self): + import contextlib + import six + + queue_return = _Connection() + pool = self._makeOneWithMockQueue(queue_return) + cnxn_context = pool.connection() + if six.PY3: # pragma: NO COVER Python 3 + self.assertTrue(isinstance(cnxn_context, + contextlib._GeneratorContextManager)) + else: + self.assertTrue(isinstance(cnxn_context, + contextlib.GeneratorContextManager)) + + def test_connection_no_current_cnxn(self): + queue_return = _Connection() + pool = self._makeOneWithMockQueue(queue_return) + timeout = 55 + + self.assertFalse(hasattr(pool._thread_connections, 'current')) + with pool.connection(timeout=timeout) as connection: + self.assertEqual(pool._thread_connections.current, queue_return) + self.assertTrue(connection is queue_return) + self.assertFalse(hasattr(pool._thread_connections, 'current')) + + self.assertEqual(pool._queue._get_calls, [(True, timeout)]) + self.assertEqual(pool._queue._put_calls, + [(queue_return, None, None)]) + + def test_connection_with_current_cnxn(self): + current_cnxn = _Connection() + queue_return = _Connection() + pool = self._makeOneWithMockQueue(queue_return) + pool._thread_connections.current = current_cnxn + timeout = 8001 + + with pool.connection(timeout=timeout) as connection: + self.assertTrue(connection is current_cnxn) + + self.assertEqual(pool._queue._get_calls, []) + self.assertEqual(pool._queue._put_calls, []) + self.assertEqual(pool._thread_connections.current, current_cnxn) + + +class _Client(object): + + def __init__(self): + self.stop_calls = 0 + + def stop(self): + self.stop_calls += 1 + + +class _Connection(object): + + def open(self): + pass + + +class _Instance(object): + + def __init__(self, copies=()): + self.copies = list(copies) + # Included to support Connection.__del__ + self._client = _Client() + + def copy(self): + if self.copies: + result = self.copies[0] + self.copies[:] = self.copies[1:] + return result + else: + return self + + +class _Queue(object): + + def __init__(self, result=None): + self.result = result + self._get_calls = [] + self._put_calls = [] + + def get(self, block=None, timeout=None): + self._get_calls.append((block, timeout)) + if self.result is None: + import six + raise six.moves.queue.Empty + else: + return self.result + + def put(self, item, block=None, timeout=None): + self._put_calls.append((item, block, timeout)) diff --git a/env/Lib/site-packages/gcloud/bigtable/happybase/test_table.py b/env/Lib/site-packages/gcloud/bigtable/happybase/test_table.py new file mode 100644 index 0000000..7efa186 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/happybase/test_table.py @@ -0,0 +1,1505 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_make_row(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import make_row + return make_row(*args, **kwargs) + + def test_it(self): + with self.assertRaises(NotImplementedError): + self._callFUT({}, False) + + +class Test_make_ordered_row(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import make_ordered_row + return make_ordered_row(*args, **kwargs) + + def test_it(self): + with self.assertRaises(NotImplementedError): + self._callFUT([], False) + + +class TestTable(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.happybase.table import Table + return Table + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + instance = object() + connection = _Connection(instance) + tables_constructed = [] + + def make_low_level_table(*args, **kwargs): + result = _MockLowLevelTable(*args, **kwargs) + tables_constructed.append(result) + return result + + with _Monkey(MUT, _LowLevelTable=make_low_level_table): + table = self._makeOne(name, connection) + self.assertEqual(table.name, name) + self.assertEqual(table.connection, connection) + + table_instance, = tables_constructed + self.assertEqual(table._low_level_table, table_instance) + self.assertEqual(table_instance.args, (name, instance)) + self.assertEqual(table_instance.kwargs, {}) + + def test_constructor_null_connection(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + self.assertEqual(table.name, name) + self.assertEqual(table.connection, connection) + self.assertEqual(table._low_level_table, None) + + def test_families(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + + # Mock the column families to be returned. + col_fam_name = 'fam' + gc_rule = object() + col_fam = _MockLowLevelColumnFamily(col_fam_name, gc_rule=gc_rule) + col_fams = {col_fam_name: col_fam} + table._low_level_table.column_families = col_fams + + to_dict_result = object() + to_dict_calls = [] + + def mock_gc_rule_to_dict(gc_rule): + to_dict_calls.append(gc_rule) + return to_dict_result + + with _Monkey(MUT, _gc_rule_to_dict=mock_gc_rule_to_dict): + result = table.families() + + self.assertEqual(result, {col_fam_name: to_dict_result}) + self.assertEqual(table._low_level_table.list_column_families_calls, 1) + self.assertEqual(to_dict_calls, [gc_rule]) + + def test___repr__(self): + name = 'table-name' + table = self._makeOne(name, None) + self.assertEqual(repr(table), '') + + def test_regions(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + + with self.assertRaises(NotImplementedError): + table.regions() + + def test_row_empty_row(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + table._low_level_table.read_row_result = None + + # Set-up mocks. + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + row_key = 'row-key' + timestamp = object() + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper): + result = table.row(row_key, timestamp=timestamp) + + # read_row_result == None --> No results. + self.assertEqual(result, {}) + + read_row_args = (row_key,) + read_row_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_row_calls, [ + (read_row_args, read_row_kwargs), + ]) + + expected_kwargs = { + 'filters': [], + 'versions': 1, + 'timestamp': timestamp, + } + self.assertEqual(mock_filters, [expected_kwargs]) + + def test_row_with_columns(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + table._low_level_table.read_row_result = None + + # Set-up mocks. + fake_col_filter = object() + mock_columns = [] + + def mock_columns_filter_helper(*args): + mock_columns.append(args) + return fake_col_filter + + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + row_key = 'row-key' + columns = object() + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, + _columns_filter_helper=mock_columns_filter_helper): + result = table.row(row_key, columns=columns) + + # read_row_result == None --> No results. + self.assertEqual(result, {}) + + read_row_args = (row_key,) + read_row_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_row_calls, [ + (read_row_args, read_row_kwargs), + ]) + + self.assertEqual(mock_columns, [(columns,)]) + expected_kwargs = { + 'filters': [fake_col_filter], + 'versions': 1, + 'timestamp': None, + } + self.assertEqual(mock_filters, [expected_kwargs]) + + def test_row_with_results(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + from gcloud.bigtable.row_data import PartialRowData + + row_key = 'row-key' + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + partial_row = PartialRowData(row_key) + table._low_level_table.read_row_result = partial_row + + # Set-up mocks. + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + fake_pair = object() + mock_cells = [] + + def mock_cells_to_pairs(*args, **kwargs): + mock_cells.append((args, kwargs)) + return [fake_pair] + + col_fam = u'cf1' + qual = b'qual' + fake_cells = object() + partial_row._cells = {col_fam: {qual: fake_cells}} + include_timestamp = object() + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, + _cells_to_pairs=mock_cells_to_pairs): + result = table.row(row_key, include_timestamp=include_timestamp) + + # The results come from _cells_to_pairs. + expected_result = {col_fam.encode('ascii') + b':' + qual: fake_pair} + self.assertEqual(result, expected_result) + + read_row_args = (row_key,) + read_row_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_row_calls, [ + (read_row_args, read_row_kwargs), + ]) + + expected_kwargs = { + 'filters': [], + 'versions': 1, + 'timestamp': None, + } + self.assertEqual(mock_filters, [expected_kwargs]) + to_pairs_kwargs = {'include_timestamp': include_timestamp} + self.assertEqual(mock_cells, + [((fake_cells,), to_pairs_kwargs)]) + + def test_rows_empty_row(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + + result = table.rows([]) + self.assertEqual(result, []) + + def test_rows_with_columns(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + rr_result = _MockPartialRowsData() + table._low_level_table.read_rows_result = rr_result + self.assertEqual(rr_result.consume_all_calls, 0) + + # Set-up mocks. + fake_col_filter = object() + mock_cols = [] + + def mock_columns_filter_helper(*args): + mock_cols.append(args) + return fake_col_filter + + fake_rows_filter = object() + mock_rows = [] + + def mock_row_keys_filter_helper(*args): + mock_rows.append(args) + return fake_rows_filter + + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + rows = ['row-key'] + columns = object() + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, + _row_keys_filter_helper=mock_row_keys_filter_helper, + _columns_filter_helper=mock_columns_filter_helper): + result = table.rows(rows, columns=columns) + + # read_rows_result == Empty PartialRowsData --> No results. + self.assertEqual(result, []) + + read_rows_args = () + read_rows_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_rows_calls, [ + (read_rows_args, read_rows_kwargs), + ]) + self.assertEqual(rr_result.consume_all_calls, 1) + + self.assertEqual(mock_cols, [(columns,)]) + self.assertEqual(mock_rows, [(rows,)]) + expected_kwargs = { + 'filters': [fake_col_filter, fake_rows_filter], + 'versions': 1, + 'timestamp': None, + } + self.assertEqual(mock_filters, [expected_kwargs]) + + def test_rows_with_results(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + from gcloud.bigtable.row_data import PartialRowData + + row_key1 = 'row-key1' + row_key2 = 'row-key2' + rows = [row_key1, row_key2] + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + + row1 = PartialRowData(row_key1) + # Return row1 but not row2 + rr_result = _MockPartialRowsData(rows={row_key1: row1}) + table._low_level_table.read_rows_result = rr_result + self.assertEqual(rr_result.consume_all_calls, 0) + + # Set-up mocks. + fake_rows_filter = object() + mock_rows = [] + + def mock_row_keys_filter_helper(*args): + mock_rows.append(args) + return fake_rows_filter + + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + fake_pair = object() + mock_cells = [] + + def mock_cells_to_pairs(*args, **kwargs): + mock_cells.append((args, kwargs)) + return [fake_pair] + + col_fam = u'cf1' + qual = b'qual' + fake_cells = object() + row1._cells = {col_fam: {qual: fake_cells}} + include_timestamp = object() + with _Monkey(MUT, _row_keys_filter_helper=mock_row_keys_filter_helper, + _filter_chain_helper=mock_filter_chain_helper, + _cells_to_pairs=mock_cells_to_pairs): + result = table.rows(rows, include_timestamp=include_timestamp) + + # read_rows_result == PartialRowsData with row_key1 + expected_result = {col_fam.encode('ascii') + b':' + qual: fake_pair} + self.assertEqual(result, [(row_key1, expected_result)]) + + read_rows_args = () + read_rows_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_rows_calls, [ + (read_rows_args, read_rows_kwargs), + ]) + self.assertEqual(rr_result.consume_all_calls, 1) + + self.assertEqual(mock_rows, [(rows,)]) + expected_kwargs = { + 'filters': [fake_rows_filter], + 'versions': 1, + 'timestamp': None, + } + self.assertEqual(mock_filters, [expected_kwargs]) + to_pairs_kwargs = {'include_timestamp': include_timestamp} + self.assertEqual(mock_cells, + [((fake_cells,), to_pairs_kwargs)]) + + def test_cells_empty_row(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + table._low_level_table.read_row_result = None + + # Set-up mocks. + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + row_key = 'row-key' + column = 'fam:col1' + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper): + result = table.cells(row_key, column) + + # read_row_result == None --> No results. + self.assertEqual(result, []) + + read_row_args = (row_key,) + read_row_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_row_calls, [ + (read_row_args, read_row_kwargs), + ]) + + expected_kwargs = { + 'column': column, + 'versions': None, + 'timestamp': None, + } + self.assertEqual(mock_filters, [expected_kwargs]) + + def test_cells_with_results(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + from gcloud.bigtable.row_data import PartialRowData + + row_key = 'row-key' + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + partial_row = PartialRowData(row_key) + table._low_level_table.read_row_result = partial_row + + # These are all passed to mocks. + versions = object() + timestamp = object() + include_timestamp = object() + + # Set-up mocks. + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + fake_result = object() + mock_cells = [] + + def mock_cells_to_pairs(*args, **kwargs): + mock_cells.append((args, kwargs)) + return fake_result + + col_fam = 'cf1' + qual = 'qual' + fake_cells = object() + partial_row._cells = {col_fam: {qual: fake_cells}} + column = col_fam + ':' + qual + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, + _cells_to_pairs=mock_cells_to_pairs): + result = table.cells(row_key, column, versions=versions, + timestamp=timestamp, + include_timestamp=include_timestamp) + + self.assertEqual(result, fake_result) + + read_row_args = (row_key,) + read_row_kwargs = {'filter_': fake_filter} + self.assertEqual(table._low_level_table.read_row_calls, [ + (read_row_args, read_row_kwargs), + ]) + + filter_kwargs = { + 'column': column, + 'versions': versions, + 'timestamp': timestamp, + } + self.assertEqual(mock_filters, [filter_kwargs]) + to_pairs_kwargs = {'include_timestamp': include_timestamp} + self.assertEqual(mock_cells, + [((fake_cells,), to_pairs_kwargs)]) + + def test_scan_with_batch_size(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + # Use unknown to force a TypeError, so we don't need to + # stub out the rest of the method. + with self.assertRaises(TypeError): + with _Monkey(MUT, _WARN=mock_warn): + list(table.scan(batch_size=object(), unknown=None)) + + self.assertEqual(len(warned), 1) + self.assertIn('batch_size', warned[0]) + + def test_scan_with_scan_batching(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + # Use unknown to force a TypeError, so we don't need to + # stub out the rest of the method. + with self.assertRaises(TypeError): + with _Monkey(MUT, _WARN=mock_warn): + list(table.scan(scan_batching=object(), unknown=None)) + + self.assertEqual(len(warned), 1) + self.assertIn('scan_batching', warned[0]) + + def test_scan_with_sorted_columns(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + # Use unknown to force a TypeError, so we don't need to + # stub out the rest of the method. + with self.assertRaises(TypeError): + with _Monkey(MUT, _WARN=mock_warn): + list(table.scan(sorted_columns=object(), unknown=None)) + + self.assertEqual(len(warned), 1) + self.assertIn('sorted_columns', warned[0]) + + def test_scan_with_invalid_limit(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + with self.assertRaises(ValueError): + list(table.scan(limit=-10)) + + def test_scan_with_row_prefix_and_row_start(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + with self.assertRaises(ValueError): + list(table.scan(row_prefix='a', row_stop='abc')) + + def test_scan_with_string_filter(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + with self.assertRaises(TypeError): + list(table.scan(filter='some-string')) + + def _scan_test_helper(self, row_limits=(None, None), row_prefix=None, + columns=None, filter_=None, timestamp=None, + include_timestamp=False, limit=None, rr_result=None, + expected_result=None): + import types + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + row_start, row_stop = row_limits + connection = None + table = self._makeOne(name, connection) + table._low_level_table = _MockLowLevelTable() + rr_result = rr_result or _MockPartialRowsData() + table._low_level_table.read_rows_result = rr_result + self.assertEqual(rr_result.consume_next_calls, 0) + + # Set-up mocks. + fake_col_filter = object() + mock_columns = [] + + def mock_columns_filter_helper(*args): + mock_columns.append(args) + return fake_col_filter + + fake_filter = object() + mock_filters = [] + + def mock_filter_chain_helper(**kwargs): + mock_filters.append(kwargs) + return fake_filter + + with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, + _columns_filter_helper=mock_columns_filter_helper): + result = table.scan(row_start=row_start, row_stop=row_stop, + row_prefix=row_prefix, columns=columns, + filter=filter_, timestamp=timestamp, + include_timestamp=include_timestamp, + limit=limit) + self.assertTrue(isinstance(result, types.GeneratorType)) + # Need to consume the result while the monkey patch is applied. + # read_rows_result == Empty PartialRowsData --> No results. + expected_result = expected_result or [] + self.assertEqual(list(result), expected_result) + + read_rows_args = () + if row_prefix: + row_start = row_prefix + row_stop = MUT._string_successor(row_prefix) + read_rows_kwargs = { + 'end_key': row_stop, + 'filter_': fake_filter, + 'limit': limit, + 'start_key': row_start, + } + self.assertEqual(table._low_level_table.read_rows_calls, [ + (read_rows_args, read_rows_kwargs), + ]) + self.assertEqual(rr_result.consume_next_calls, + rr_result.iterations + 1) + + if columns is not None: + self.assertEqual(mock_columns, [(columns,)]) + else: + self.assertEqual(mock_columns, []) + + filters = [] + if filter_ is not None: + filters.append(filter_) + if columns: + filters.append(fake_col_filter) + expected_kwargs = { + 'filters': filters, + 'versions': 1, + 'timestamp': timestamp, + } + self.assertEqual(mock_filters, [expected_kwargs]) + + def test_scan_with_columns(self): + columns = object() + self._scan_test_helper(columns=columns) + + def test_scan_with_row_start_and_stop(self): + row_start = 'bar' + row_stop = 'foo' + row_limits = (row_start, row_stop) + self._scan_test_helper(row_limits=row_limits) + + def test_scan_with_row_prefix(self): + row_prefix = 'row-prefi' + self._scan_test_helper(row_prefix=row_prefix) + + def test_scan_with_filter(self): + mock_filter = object() + self._scan_test_helper(filter_=mock_filter) + + def test_scan_with_no_results(self): + limit = 1337 + timestamp = object() + self._scan_test_helper(timestamp=timestamp, limit=limit) + + def test_scan_with_results(self): + from gcloud.bigtable.row_data import PartialRowData + + row_key1 = 'row-key1' + row1 = PartialRowData(row_key1) + rr_result = _MockPartialRowsData(rows={row_key1: row1}, iterations=1) + + include_timestamp = object() + expected_result = [(row_key1, {})] + self._scan_test_helper(include_timestamp=include_timestamp, + rr_result=rr_result, + expected_result=expected_result) + + def test_put(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + from gcloud.bigtable.happybase.table import _WAL_SENTINEL + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + batches_created = [] + + def make_batch(*args, **kwargs): + result = _MockBatch(*args, **kwargs) + batches_created.append(result) + return result + + row = 'row-key' + data = {'fam:col': 'foo'} + timestamp = None + with _Monkey(MUT, Batch=make_batch): + result = table.put(row, data, timestamp=timestamp) + + # There is no return value. + self.assertEqual(result, None) + + # Check how the batch was created and used. + batch, = batches_created + self.assertTrue(isinstance(batch, _MockBatch)) + self.assertEqual(batch.args, (table,)) + expected_kwargs = { + 'timestamp': timestamp, + 'batch_size': None, + 'transaction': False, + 'wal': _WAL_SENTINEL, + } + self.assertEqual(batch.kwargs, expected_kwargs) + # Make sure it was a successful context manager + self.assertEqual(batch.exit_vals, [(None, None, None)]) + self.assertEqual(batch.put_args, [(row, data)]) + self.assertEqual(batch.delete_args, []) + + def test_delete(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + from gcloud.bigtable.happybase.table import _WAL_SENTINEL + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + batches_created = [] + + def make_batch(*args, **kwargs): + result = _MockBatch(*args, **kwargs) + batches_created.append(result) + return result + + row = 'row-key' + columns = ['fam:col1', 'fam:col2'] + timestamp = None + with _Monkey(MUT, Batch=make_batch): + result = table.delete(row, columns=columns, timestamp=timestamp) + + # There is no return value. + self.assertEqual(result, None) + + # Check how the batch was created and used. + batch, = batches_created + self.assertTrue(isinstance(batch, _MockBatch)) + self.assertEqual(batch.args, (table,)) + expected_kwargs = { + 'timestamp': timestamp, + 'batch_size': None, + 'transaction': False, + 'wal': _WAL_SENTINEL, + } + self.assertEqual(batch.kwargs, expected_kwargs) + # Make sure it was a successful context manager + self.assertEqual(batch.exit_vals, [(None, None, None)]) + self.assertEqual(batch.put_args, []) + self.assertEqual(batch.delete_args, [(row, columns)]) + + def test_batch(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + + timestamp = object() + batch_size = 42 + transaction = False # Must be False when batch_size is non-null + wal = object() + + with _Monkey(MUT, Batch=_MockBatch): + result = table.batch(timestamp=timestamp, batch_size=batch_size, + transaction=transaction, wal=wal) + + self.assertTrue(isinstance(result, _MockBatch)) + self.assertEqual(result.args, (table,)) + expected_kwargs = { + 'timestamp': timestamp, + 'batch_size': batch_size, + 'transaction': transaction, + 'wal': wal, + } + self.assertEqual(result.kwargs, expected_kwargs) + + def test_counter_get(self): + klass = self._getTargetClass() + counter_value = 1337 + + class TableWithInc(klass): + + incremented = [] + value = counter_value + + def counter_inc(self, row, column, value=1): + self.incremented.append((row, column, value)) + self.value += value + return self.value + + name = 'table-name' + connection = None + table = TableWithInc(name, connection) + + row = 'row-key' + column = 'fam:col1' + self.assertEqual(TableWithInc.incremented, []) + result = table.counter_get(row, column) + self.assertEqual(result, counter_value) + self.assertEqual(TableWithInc.incremented, [(row, column, 0)]) + + def test_counter_dec(self): + klass = self._getTargetClass() + counter_value = 42 + + class TableWithInc(klass): + + incremented = [] + value = counter_value + + def counter_inc(self, row, column, value=1): + self.incremented.append((row, column, value)) + self.value += value + return self.value + + name = 'table-name' + connection = None + table = TableWithInc(name, connection) + + row = 'row-key' + column = 'fam:col1' + dec_value = 987 + self.assertEqual(TableWithInc.incremented, []) + result = table.counter_dec(row, column, value=dec_value) + self.assertEqual(result, counter_value - dec_value) + self.assertEqual(TableWithInc.incremented, [(row, column, -dec_value)]) + + def _counter_inc_helper(self, row, column, value, commit_result): + import six + + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + # Mock the return values. + table._low_level_table = _MockLowLevelTable() + table._low_level_table.row_values[row] = row_obj = _MockLowLevelRow( + row, commit_result=commit_result) + + self.assertFalse(row_obj._append) + result = table.counter_inc(row, column, value=value) + self.assertTrue(row_obj._append) + + incremented_value = value + _MockLowLevelRow.COUNTER_DEFAULT + self.assertEqual(result, incremented_value) + + # Check the row values returned. + row_obj = table._low_level_table.row_values[row] + if isinstance(column, six.binary_type): + column = column.decode('utf-8') + self.assertEqual(row_obj.counts, + {tuple(column.split(':')): incremented_value}) + + def test_counter_set(self): + name = 'table-name' + connection = None + table = self._makeOne(name, connection) + + row = 'row-key' + column = 'fam:col1' + value = 42 + with self.assertRaises(NotImplementedError): + table.counter_set(row, column, value=value) + + def test_counter_inc(self): + import struct + + row = 'row-key' + col_fam = u'fam' + col_qual = u'col1' + column = col_fam + u':' + col_qual + value = 42 + packed_value = struct.pack('>q', value) + fake_timestamp = None + commit_result = { + col_fam: { + col_qual: [(packed_value, fake_timestamp)], + } + } + self._counter_inc_helper(row, column, value, commit_result) + + def test_counter_inc_column_bytes(self): + import struct + + row = 'row-key' + col_fam = b'fam' + col_qual = b'col1' + column = col_fam + b':' + col_qual + value = 42 + packed_value = struct.pack('>q', value) + fake_timestamp = None + commit_result = { + col_fam.decode('utf-8'): { + col_qual.decode('utf-8'): [(packed_value, fake_timestamp)], + } + } + self._counter_inc_helper(row, column, value, commit_result) + + def test_counter_inc_bad_result(self): + row = 'row-key' + col_fam = 'fam' + col_qual = 'col1' + column = col_fam + ':' + col_qual + value = 42 + commit_result = None + with self.assertRaises(TypeError): + self._counter_inc_helper(row, column, value, commit_result) + + def test_counter_inc_result_key_error(self): + row = 'row-key' + col_fam = 'fam' + col_qual = 'col1' + column = col_fam + ':' + col_qual + value = 42 + commit_result = {} + with self.assertRaises(KeyError): + self._counter_inc_helper(row, column, value, commit_result) + + def test_counter_inc_result_nested_key_error(self): + row = 'row-key' + col_fam = 'fam' + col_qual = 'col1' + column = col_fam + ':' + col_qual + value = 42 + commit_result = {col_fam: {}} + with self.assertRaises(KeyError): + self._counter_inc_helper(row, column, value, commit_result) + + def test_counter_inc_result_non_unique_cell(self): + row = 'row-key' + col_fam = 'fam' + col_qual = 'col1' + column = col_fam + ':' + col_qual + value = 42 + fake_timestamp = None + packed_value = None + commit_result = { + col_fam: { + col_qual: [ + (packed_value, fake_timestamp), + (packed_value, fake_timestamp), + ], + } + } + with self.assertRaises(ValueError): + self._counter_inc_helper(row, column, value, commit_result) + + +class Test__gc_rule_to_dict(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _gc_rule_to_dict + return _gc_rule_to_dict(*args, **kwargs) + + def test_with_null(self): + gc_rule = None + result = self._callFUT(gc_rule) + self.assertEqual(result, {}) + + def test_with_max_versions(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + + max_versions = 2 + gc_rule = MaxVersionsGCRule(max_versions) + result = self._callFUT(gc_rule) + expected_result = {'max_versions': max_versions} + self.assertEqual(result, expected_result) + + def test_with_max_age(self): + import datetime + from gcloud.bigtable.column_family import MaxAgeGCRule + + time_to_live = 101 + max_age = datetime.timedelta(seconds=time_to_live) + gc_rule = MaxAgeGCRule(max_age) + result = self._callFUT(gc_rule) + expected_result = {'time_to_live': time_to_live} + self.assertEqual(result, expected_result) + + def test_with_non_gc_rule(self): + gc_rule = object() + result = self._callFUT(gc_rule) + self.assertTrue(result is gc_rule) + + def test_with_gc_rule_union(self): + from gcloud.bigtable.column_family import GCRuleUnion + + gc_rule = GCRuleUnion(rules=[]) + result = self._callFUT(gc_rule) + self.assertTrue(result is gc_rule) + + def test_with_intersection_other_than_two(self): + from gcloud.bigtable.column_family import GCRuleIntersection + + gc_rule = GCRuleIntersection(rules=[]) + result = self._callFUT(gc_rule) + self.assertTrue(result is gc_rule) + + def test_with_intersection_two_max_num_versions(self): + from gcloud.bigtable.column_family import GCRuleIntersection + from gcloud.bigtable.column_family import MaxVersionsGCRule + + rule1 = MaxVersionsGCRule(1) + rule2 = MaxVersionsGCRule(2) + gc_rule = GCRuleIntersection(rules=[rule1, rule2]) + result = self._callFUT(gc_rule) + self.assertTrue(result is gc_rule) + + def test_with_intersection_two_rules(self): + import datetime + from gcloud.bigtable.column_family import GCRuleIntersection + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + time_to_live = 101 + max_age = datetime.timedelta(seconds=time_to_live) + rule1 = MaxAgeGCRule(max_age) + max_versions = 2 + rule2 = MaxVersionsGCRule(max_versions) + gc_rule = GCRuleIntersection(rules=[rule1, rule2]) + result = self._callFUT(gc_rule) + expected_result = { + 'max_versions': max_versions, + 'time_to_live': time_to_live, + } + self.assertEqual(result, expected_result) + + def test_with_intersection_two_nested_rules(self): + from gcloud.bigtable.column_family import GCRuleIntersection + + rule1 = GCRuleIntersection(rules=[]) + rule2 = GCRuleIntersection(rules=[]) + gc_rule = GCRuleIntersection(rules=[rule1, rule2]) + result = self._callFUT(gc_rule) + self.assertTrue(result is gc_rule) + + +class Test__string_successor(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _string_successor + return _string_successor(*args, **kwargs) + + def test_with_alphanumeric(self): + self.assertEqual(self._callFUT(b'boa'), b'bob') + self.assertEqual(self._callFUT(b'abc1'), b'abc2') + + def test_with_last_byte(self): + self.assertEqual(self._callFUT(b'boa\xff'), b'bob') + + def test_with_empty_string(self): + self.assertEqual(self._callFUT(b''), b'') + + def test_with_all_last_bytes(self): + self.assertEqual(self._callFUT(b'\xff\xff\xff'), b'') + + def test_with_unicode_input(self): + self.assertEqual(self._callFUT(u'boa'), b'bob') + + +class Test__convert_to_time_range(unittest2.TestCase): + + def _callFUT(self, timestamp=None): + from gcloud.bigtable.happybase.table import _convert_to_time_range + return _convert_to_time_range(timestamp=timestamp) + + def test_null(self): + timestamp = None + result = self._callFUT(timestamp=timestamp) + self.assertEqual(result, None) + + def test_invalid_type(self): + timestamp = object() + with self.assertRaises(TypeError): + self._callFUT(timestamp=timestamp) + + def test_success(self): + from gcloud._helpers import _datetime_from_microseconds + from gcloud.bigtable.row_filters import TimestampRange + + timestamp = 1441928298571 + ts_dt = _datetime_from_microseconds(1000 * timestamp) + result = self._callFUT(timestamp=timestamp) + self.assertTrue(isinstance(result, TimestampRange)) + self.assertEqual(result.start, None) + self.assertEqual(result.end, ts_dt) + + +class Test__cells_to_pairs(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _cells_to_pairs + return _cells_to_pairs(*args, **kwargs) + + def test_without_timestamp(self): + from gcloud.bigtable.row_data import Cell + + value1 = 'foo' + cell1 = Cell(value=value1, timestamp=None) + value2 = 'bar' + cell2 = Cell(value=value2, timestamp=None) + + result = self._callFUT([cell1, cell2]) + self.assertEqual(result, [value1, value2]) + + def test_with_timestamp(self): + from gcloud._helpers import _datetime_from_microseconds + from gcloud.bigtable.row_data import Cell + + value1 = 'foo' + ts1_millis = 1221934570148 + ts1 = _datetime_from_microseconds(ts1_millis * 1000) + cell1 = Cell(value=value1, timestamp=ts1) + + value2 = 'bar' + ts2_millis = 1221955575548 + ts2 = _datetime_from_microseconds(ts2_millis * 1000) + cell2 = Cell(value=value2, timestamp=ts2) + + result = self._callFUT([cell1, cell2], include_timestamp=True) + self.assertEqual(result, + [(value1, ts1_millis), (value2, ts2_millis)]) + + +class Test__partial_row_to_dict(unittest2.TestCase): + + def _callFUT(self, partial_row_data, include_timestamp=False): + from gcloud.bigtable.happybase.table import _partial_row_to_dict + return _partial_row_to_dict(partial_row_data, + include_timestamp=include_timestamp) + + def test_without_timestamp(self): + from gcloud.bigtable.row_data import Cell + from gcloud.bigtable.row_data import PartialRowData + + row_data = PartialRowData(b'row-key') + val1 = b'hi-im-bytes' + val2 = b'bi-im-hytes' + row_data._cells[u'fam1'] = { + b'col1': [Cell(val1, None)], + b'col2': [Cell(val2, None)], + } + result = self._callFUT(row_data) + expected_result = { + b'fam1:col1': val1, + b'fam1:col2': val2, + } + self.assertEqual(result, expected_result) + + def test_with_timestamp(self): + from gcloud._helpers import _datetime_from_microseconds + from gcloud.bigtable.row_data import Cell + from gcloud.bigtable.row_data import PartialRowData + + row_data = PartialRowData(b'row-key') + val1 = b'hi-im-bytes' + ts1_millis = 1221934570148 + ts1 = _datetime_from_microseconds(ts1_millis * 1000) + val2 = b'bi-im-hytes' + ts2_millis = 1331934880000 + ts2 = _datetime_from_microseconds(ts2_millis * 1000) + row_data._cells[u'fam1'] = { + b'col1': [Cell(val1, ts1)], + b'col2': [Cell(val2, ts2)], + } + result = self._callFUT(row_data, include_timestamp=True) + expected_result = { + b'fam1:col1': (val1, ts1_millis), + b'fam1:col2': (val2, ts2_millis), + } + self.assertEqual(result, expected_result) + + +class Test__filter_chain_helper(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _filter_chain_helper + return _filter_chain_helper(*args, **kwargs) + + def test_no_filters(self): + with self.assertRaises(ValueError): + self._callFUT() + + def test_single_filter(self): + from gcloud.bigtable.row_filters import CellsColumnLimitFilter + + versions = 1337 + result = self._callFUT(versions=versions) + self.assertTrue(isinstance(result, CellsColumnLimitFilter)) + # Relies on the fact that RowFilter instances can + # only have one value set. + self.assertEqual(result.num_cells, versions) + + def test_existing_filters(self): + from gcloud.bigtable.row_filters import CellsColumnLimitFilter + + filters = [] + versions = 1337 + result = self._callFUT(versions=versions, filters=filters) + # Make sure filters has grown. + self.assertEqual(filters, [result]) + + self.assertTrue(isinstance(result, CellsColumnLimitFilter)) + # Relies on the fact that RowFilter instances can + # only have one value set. + self.assertEqual(result.num_cells, versions) + + def _column_helper(self, num_filters, versions=None, timestamp=None, + column=None, col_fam=None, qual=None): + from gcloud.bigtable.row_filters import ColumnQualifierRegexFilter + from gcloud.bigtable.row_filters import FamilyNameRegexFilter + from gcloud.bigtable.row_filters import RowFilterChain + + if col_fam is None: + col_fam = 'cf1' + if qual is None: + qual = 'qual' + if column is None: + column = col_fam + ':' + qual + result = self._callFUT(column, versions=versions, timestamp=timestamp) + self.assertTrue(isinstance(result, RowFilterChain)) + + self.assertEqual(len(result.filters), num_filters) + fam_filter = result.filters[0] + qual_filter = result.filters[1] + self.assertTrue(isinstance(fam_filter, FamilyNameRegexFilter)) + self.assertTrue(isinstance(qual_filter, ColumnQualifierRegexFilter)) + + # Relies on the fact that RowFilter instances can + # only have one value set. + self.assertEqual(fam_filter.regex, col_fam.encode('utf-8')) + self.assertEqual(qual_filter.regex, qual.encode('utf-8')) + + return result + + def test_column_only(self): + self._column_helper(num_filters=2) + + def test_column_bytes(self): + self._column_helper(num_filters=2, column=b'cfB:qualY', + col_fam=u'cfB', qual=u'qualY') + + def test_column_unicode(self): + self._column_helper(num_filters=2, column=u'cfU:qualN', + col_fam=u'cfU', qual=u'qualN') + + def test_with_versions(self): + from gcloud.bigtable.row_filters import CellsColumnLimitFilter + + versions = 11 + result = self._column_helper(num_filters=3, versions=versions) + + version_filter = result.filters[2] + self.assertTrue(isinstance(version_filter, CellsColumnLimitFilter)) + # Relies on the fact that RowFilter instances can + # only have one value set. + self.assertEqual(version_filter.num_cells, versions) + + def test_with_timestamp(self): + from gcloud._helpers import _datetime_from_microseconds + from gcloud.bigtable.row_filters import TimestampRange + from gcloud.bigtable.row_filters import TimestampRangeFilter + + timestamp = 1441928298571 + result = self._column_helper(num_filters=3, timestamp=timestamp) + + range_filter = result.filters[2] + self.assertTrue(isinstance(range_filter, TimestampRangeFilter)) + # Relies on the fact that RowFilter instances can + # only have one value set. + time_range = range_filter.range_ + self.assertTrue(isinstance(time_range, TimestampRange)) + self.assertEqual(time_range.start, None) + ts_dt = _datetime_from_microseconds(1000 * timestamp) + self.assertEqual(time_range.end, ts_dt) + + def test_with_all_options(self): + versions = 11 + timestamp = 1441928298571 + self._column_helper(num_filters=4, versions=versions, + timestamp=timestamp) + + +class Test__columns_filter_helper(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _columns_filter_helper + return _columns_filter_helper(*args, **kwargs) + + def test_no_columns(self): + columns = [] + with self.assertRaises(ValueError): + self._callFUT(columns) + + def test_single_column(self): + from gcloud.bigtable.row_filters import FamilyNameRegexFilter + + col_fam = 'cf1' + columns = [col_fam] + result = self._callFUT(columns) + expected_result = FamilyNameRegexFilter(col_fam) + self.assertEqual(result, expected_result) + + def test_column_and_column_families(self): + from gcloud.bigtable.row_filters import ColumnQualifierRegexFilter + from gcloud.bigtable.row_filters import FamilyNameRegexFilter + from gcloud.bigtable.row_filters import RowFilterChain + from gcloud.bigtable.row_filters import RowFilterUnion + + col_fam1 = 'cf1' + col_fam2 = 'cf2' + col_qual2 = 'qual2' + columns = [col_fam1, col_fam2 + ':' + col_qual2] + result = self._callFUT(columns) + + self.assertTrue(isinstance(result, RowFilterUnion)) + self.assertEqual(len(result.filters), 2) + filter1 = result.filters[0] + filter2 = result.filters[1] + + self.assertTrue(isinstance(filter1, FamilyNameRegexFilter)) + self.assertEqual(filter1.regex, col_fam1.encode('utf-8')) + + self.assertTrue(isinstance(filter2, RowFilterChain)) + filter2a, filter2b = filter2.filters + self.assertTrue(isinstance(filter2a, FamilyNameRegexFilter)) + self.assertEqual(filter2a.regex, col_fam2.encode('utf-8')) + self.assertTrue(isinstance(filter2b, ColumnQualifierRegexFilter)) + self.assertEqual(filter2b.regex, col_qual2.encode('utf-8')) + + +class Test__row_keys_filter_helper(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.happybase.table import _row_keys_filter_helper + return _row_keys_filter_helper(*args, **kwargs) + + def test_no_rows(self): + row_keys = [] + with self.assertRaises(ValueError): + self._callFUT(row_keys) + + def test_single_row(self): + from gcloud.bigtable.row_filters import RowKeyRegexFilter + + row_key = b'row-key' + row_keys = [row_key] + result = self._callFUT(row_keys) + expected_result = RowKeyRegexFilter(row_key) + self.assertEqual(result, expected_result) + + def test_many_rows(self): + from gcloud.bigtable.row_filters import RowFilterUnion + from gcloud.bigtable.row_filters import RowKeyRegexFilter + + row_key1 = b'row-key1' + row_key2 = b'row-key2' + row_key3 = b'row-key3' + row_keys = [row_key1, row_key2, row_key3] + result = self._callFUT(row_keys) + + filter1 = RowKeyRegexFilter(row_key1) + filter2 = RowKeyRegexFilter(row_key2) + filter3 = RowKeyRegexFilter(row_key3) + expected_result = RowFilterUnion(filters=[filter1, filter2, filter3]) + self.assertEqual(result, expected_result) + + +class _Connection(object): + + def __init__(self, instance): + self._instance = instance + + +class _MockLowLevelColumnFamily(object): + + def __init__(self, column_family_id, gc_rule=None): + self.column_family_id = column_family_id + self.gc_rule = gc_rule + + +class _MockLowLevelTable(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.list_column_families_calls = 0 + self.column_families = {} + self.row_values = {} + self.read_row_calls = [] + self.read_row_result = None + self.read_rows_calls = [] + self.read_rows_result = None + + def list_column_families(self): + self.list_column_families_calls += 1 + return self.column_families + + def row(self, row_key, append=None): + result = self.row_values[row_key] + result._append = append + return result + + def read_row(self, *args, **kwargs): + self.read_row_calls.append((args, kwargs)) + return self.read_row_result + + def read_rows(self, *args, **kwargs): + self.read_rows_calls.append((args, kwargs)) + return self.read_rows_result + + +class _MockLowLevelRow(object): + + COUNTER_DEFAULT = 0 + + def __init__(self, row_key, commit_result=None): + self.row_key = row_key + self._append = False + self.counts = {} + self.commit_result = commit_result + + def increment_cell_value(self, column_family_id, column, int_value): + count = self.counts.setdefault((column_family_id, column), + self.COUNTER_DEFAULT) + self.counts[(column_family_id, column)] = count + int_value + + def commit(self): + return self.commit_result + + +class _MockBatch(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.exit_vals = [] + self.put_args = [] + self.delete_args = [] + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.exit_vals.append((exc_type, exc_value, traceback)) + + def put(self, *args): + self.put_args.append(args) + + def delete(self, *args): + self.delete_args.append(args) + + +class _MockPartialRowsData(object): + + def __init__(self, rows=None, iterations=0): + self.rows = rows or {} + self.consume_all_calls = 0 + self.consume_next_calls = 0 + self.iterations = iterations + + def consume_all(self): + self.consume_all_calls += 1 + + def consume_next(self): + self.consume_next_calls += 1 + if self.consume_next_calls > self.iterations: + raise StopIteration diff --git a/env/Lib/site-packages/gcloud/bigtable/instance.py b/env/Lib/site-packages/gcloud/bigtable/instance.py new file mode 100644 index 0000000..dec6c90 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/instance.py @@ -0,0 +1,488 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""User friendly container for Google Cloud Bigtable Instance.""" + + +import re + +from google.longrunning import operations_pb2 + +from gcloud._helpers import _pb_timestamp_to_datetime +from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_messages_v2_pb2) +from gcloud.bigtable.cluster import Cluster +from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES +from gcloud.bigtable.table import Table + + +_EXISTING_INSTANCE_LOCATION_ID = 'see-existing-cluster' +_INSTANCE_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' + r'instances/(?P[a-z][-a-z0-9]*)$') +_OPERATION_NAME_RE = re.compile(r'^operations/projects/([^/]+)/' + r'instances/([a-z][-a-z0-9]*)/' + r'locations/(?P[a-z][-a-z0-9]*)/' + r'operations/(?P\d+)$') +_TYPE_URL_BASE = 'type.googleapis.com/google.bigtable.' +_ADMIN_TYPE_URL_BASE = _TYPE_URL_BASE + 'admin.v2.' +_INSTANCE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'CreateInstanceMetadata' +_TYPE_URL_MAP = { + _INSTANCE_CREATE_METADATA: messages_v2_pb2.CreateInstanceMetadata, +} + + +def _prepare_create_request(instance): + """Creates a protobuf request for a CreateInstance request. + + :type instance: :class:`Instance` + :param instance: The instance to be created. + + :rtype: :class:`.messages_v2_pb2.CreateInstanceRequest` + :returns: The CreateInstance request object containing the instance info. + """ + parent_name = ('projects/' + instance._client.project) + message = messages_v2_pb2.CreateInstanceRequest( + parent=parent_name, + instance_id=instance.instance_id, + instance=data_v2_pb2.Instance( + display_name=instance.display_name, + ), + ) + cluster = message.clusters[instance.instance_id] + cluster.name = instance.name + '/clusters/' + instance.instance_id + cluster.location = ( + parent_name + '/locations/' + instance._cluster_location_id) + cluster.serve_nodes = instance._cluster_serve_nodes + return message + + +def _parse_pb_any_to_native(any_val, expected_type=None): + """Convert a serialized "google.protobuf.Any" value to actual type. + + :type any_val: :class:`google.protobuf.any_pb2.Any` + :param any_val: A serialized protobuf value container. + + :type expected_type: str + :param expected_type: (Optional) The type URL we expect ``any_val`` + to have. + + :rtype: object + :returns: The de-serialized object. + :raises: :class:`ValueError ` if the + ``expected_type`` does not match the ``type_url`` on the input. + """ + if expected_type is not None and expected_type != any_val.type_url: + raise ValueError('Expected type: %s, Received: %s' % ( + expected_type, any_val.type_url)) + container_class = _TYPE_URL_MAP[any_val.type_url] + return container_class.FromString(any_val.value) + + +def _process_operation(operation_pb): + """Processes a create protobuf response. + + :type operation_pb: :class:`google.longrunning.operations_pb2.Operation` + :param operation_pb: The long-running operation response from a + Create/Update/Undelete instance request. + + :rtype: (int, str, datetime) + :returns: (operation_id, location_id, operation_begin). + :raises: :class:`ValueError ` if the operation name + doesn't match the :data:`_OPERATION_NAME_RE` regex. + """ + match = _OPERATION_NAME_RE.match(operation_pb.name) + if match is None: + raise ValueError('Operation name was not in the expected ' + 'format after instance creation.', + operation_pb.name) + location_id = match.group('location_id') + operation_id = int(match.group('operation_id')) + + request_metadata = _parse_pb_any_to_native(operation_pb.metadata) + operation_begin = _pb_timestamp_to_datetime( + request_metadata.request_time) + + return operation_id, location_id, operation_begin + + +class Operation(object): + """Representation of a Google API Long-Running Operation. + + In particular, these will be the result of operations on + instances using the Cloud Bigtable API. + + :type op_type: str + :param op_type: The type of operation being performed. Expect + ``create``, ``update`` or ``undelete``. + + :type op_id: int + :param op_id: The ID of the operation. + + :type begin: :class:`datetime.datetime` + :param begin: The time when the operation was started. + + :type location_id: str + :param location_id: ID of the location in which the operation is running + + :type instance: :class:`Instance` + :param instance: The instance that created the operation. + """ + + def __init__(self, op_type, op_id, begin, location_id, instance=None): + self.op_type = op_type + self.op_id = op_id + self.begin = begin + self.location_id = location_id + self._instance = instance + self._complete = False + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.op_type == self.op_type and + other.op_id == self.op_id and + other.begin == self.begin and + other.location_id == self.location_id and + other._instance == self._instance and + other._complete == self._complete) + + def __ne__(self, other): + return not self.__eq__(other) + + def finished(self): + """Check if the operation has finished. + + :rtype: bool + :returns: A boolean indicating if the current operation has completed. + :raises: :class:`ValueError ` if the operation + has already completed. + """ + if self._complete: + raise ValueError('The operation has completed.') + + operation_name = ( + 'operations/%s/locations/%s/operations/%d' % + (self._instance.name, self.location_id, self.op_id)) + request_pb = operations_pb2.GetOperationRequest(name=operation_name) + # We expect a `google.longrunning.operations_pb2.Operation`. + operation_pb = self._instance._client._operations_stub.GetOperation( + request_pb, self._instance._client.timeout_seconds) + + if operation_pb.done: + self._complete = True + return True + else: + return False + + +class Instance(object): + """Representation of a Google Cloud Bigtable Instance. + + We can use a :class:`Instance` to: + + * :meth:`reload` itself + * :meth:`create` itself + * :meth:`update` itself + * :meth:`delete` itself + * :meth:`undelete` itself + + .. note:: + + For now, we leave out the ``default_storage_type`` (an enum) + which if not sent will end up as :data:`.data_v2_pb2.STORAGE_SSD`. + + :type instance_id: str + :param instance_id: The ID of the instance. + + :type client: :class:`Client ` + :param client: The client that owns the instance. Provides + authorization and a project ID. + + :type location_id: str + :param location_id: ID of the location in which the instance will be + created. Required for instances which do not yet + exist. + + :type display_name: str + :param display_name: (Optional) The display name for the instance in the + Cloud Console UI. (Must be between 4 and 30 + characters.) If this value is not set in the + constructor, will fall back to the instance ID. + + :type serve_nodes: int + :param serve_nodes: (Optional) The number of nodes in the instance's + cluster; used to set up the instance's cluster. + """ + + def __init__(self, instance_id, client, + location_id=_EXISTING_INSTANCE_LOCATION_ID, + display_name=None, + serve_nodes=DEFAULT_SERVE_NODES): + self.instance_id = instance_id + self.display_name = display_name or instance_id + self._cluster_location_id = location_id + self._cluster_serve_nodes = serve_nodes + self._client = client + + def _update_from_pb(self, instance_pb): + """Refresh self from the server-provided protobuf. + + Helper for :meth:`from_pb` and :meth:`reload`. + """ + if not instance_pb.display_name: # Simple field (string) + raise ValueError('Instance protobuf does not contain display_name') + self.display_name = instance_pb.display_name + + @classmethod + def from_pb(cls, instance_pb, client): + """Creates a instance instance from a protobuf. + + :type instance_pb: :class:`instance_pb2.Instance` + :param instance_pb: A instance protobuf object. + + :type client: :class:`Client ` + :param client: The client that owns the instance. + + :rtype: :class:`Instance` + :returns: The instance parsed from the protobuf response. + :raises: :class:`ValueError ` if the instance + name does not match + ``projects/{project}/instances/{instance_id}`` + or if the parsed project ID does not match the project ID + on the client. + """ + match = _INSTANCE_NAME_RE.match(instance_pb.name) + if match is None: + raise ValueError('Instance protobuf name was not in the ' + 'expected format.', instance_pb.name) + if match.group('project') != client.project: + raise ValueError('Project ID on instance does not match the ' + 'project ID on the client') + instance_id = match.group('instance_id') + + result = cls(instance_id, client, _EXISTING_INSTANCE_LOCATION_ID) + result._update_from_pb(instance_pb) + return result + + def copy(self): + """Make a copy of this instance. + + Copies the local data stored as simple types and copies the client + attached to this instance. + + :rtype: :class:`.Instance` + :returns: A copy of the current instance. + """ + new_client = self._client.copy() + return self.__class__(self.instance_id, new_client, + self._cluster_location_id, + display_name=self.display_name) + + @property + def name(self): + """Instance name used in requests. + + .. note:: + This property will not change if ``instance_id`` does not, + but the return value is not cached. + + The instance name is of the form + + ``"projects/{project}/instances/{instance_id}"`` + + :rtype: str + :returns: The instance name. + """ + return self._client.project_name + '/instances/' + self.instance_id + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + # NOTE: This does not compare the configuration values, such as + # the display_name. Instead, it only compares + # identifying values instance ID and client. This is + # intentional, since the same instance can be in different states + # if not synchronized. Instances with similar instance + # settings but different clients can't be used in the same way. + return (other.instance_id == self.instance_id and + other._client == self._client) + + def __ne__(self, other): + return not self.__eq__(other) + + def reload(self): + """Reload the metadata for this instance.""" + request_pb = messages_v2_pb2.GetInstanceRequest(name=self.name) + # We expect `data_v2_pb2.Instance`. + instance_pb = self._client._instance_stub.GetInstance( + request_pb, self._client.timeout_seconds) + + # NOTE: _update_from_pb does not check that the project and + # instance ID on the response match the request. + self._update_from_pb(instance_pb) + + def create(self): + """Create this instance. + + .. note:: + + Uses the ``project`` and ``instance_id`` on the current + :class:`Instance` in addition to the ``display_name``. + To change them before creating, reset the values via + + .. code:: python + + instance.display_name = 'New display name' + instance.instance_id = 'i-changed-my-mind' + + before calling :meth:`create`. + + :rtype: :class:`Operation` + :returns: The long-running operation corresponding to the + create operation. + """ + request_pb = _prepare_create_request(self) + # We expect a `google.longrunning.operations_pb2.Operation`. + operation_pb = self._client._instance_stub.CreateInstance( + request_pb, self._client.timeout_seconds) + + op_id, loc_id, op_begin = _process_operation(operation_pb) + return Operation('create', op_id, op_begin, loc_id, instance=self) + + def update(self): + """Update this instance. + + .. note:: + + Updates the ``display_name``. To change that value before + updating, reset its values via + + .. code:: python + + instance.display_name = 'New display name' + + before calling :meth:`update`. + """ + request_pb = data_v2_pb2.Instance( + name=self.name, + display_name=self.display_name, + ) + # Ignore the expected `data_v2_pb2.Instance`. + self._client._instance_stub.UpdateInstance( + request_pb, self._client.timeout_seconds) + + def delete(self): + """Delete this instance. + + Marks a instance and all of its tables for permanent deletion + in 7 days. + + Immediately upon completion of the request: + + * Billing will cease for all of the instance's reserved resources. + * The instance's ``delete_time`` field will be set 7 days in + the future. + + Soon afterward: + + * All tables within the instance will become unavailable. + + Prior to the instance's ``delete_time``: + + * The instance can be recovered with a call to ``UndeleteInstance``. + * All other attempts to modify or delete the instance will be rejected. + + At the instance's ``delete_time``: + + * The instance and **all of its tables** will immediately and + irrevocably disappear from the API, and their data will be + permanently deleted. + """ + request_pb = messages_v2_pb2.DeleteInstanceRequest(name=self.name) + # We expect a `google.protobuf.empty_pb2.Empty` + self._client._instance_stub.DeleteInstance( + request_pb, self._client.timeout_seconds) + + def cluster(self, cluster_id, serve_nodes=3): + """Factory to create a cluster associated with this client. + + :type cluster_id: str + :param cluster_id: The ID of the cluster. + + :type serve_nodes: int + :param serve_nodes: (Optional) The number of nodes in the cluster. + Defaults to 3. + + :rtype: :class:`.Cluster` + :returns: The cluster owned by this client. + """ + return Cluster(cluster_id, self, serve_nodes=serve_nodes) + + def list_clusters(self): + """Lists clusters in this instance. + + :rtype: tuple + :returns: A pair of results, the first is a list of :class:`.Cluster` s + returned and the second is a list of strings (the failed + locations in the request). + """ + request_pb = messages_v2_pb2.ListClustersRequest(parent=self.name) + # We expect a `.cluster_messages_v1_pb2.ListClustersResponse` + list_clusters_response = self._client._instance_stub.ListClusters( + request_pb, self._client.timeout_seconds) + + failed_locations = [ + location for location in list_clusters_response.failed_locations] + clusters = [Cluster.from_pb(cluster_pb, self) + for cluster_pb in list_clusters_response.clusters] + return clusters, failed_locations + + def table(self, table_id): + """Factory to create a table associated with this instance. + + :type table_id: str + :param table_id: The ID of the table. + + :rtype: :class:`Table ` + :returns: The table owned by this instance. + """ + return Table(table_id, self) + + def list_tables(self): + """List the tables in this instance. + + :rtype: list of :class:`Table ` + :returns: The list of tables owned by the instance. + :raises: :class:`ValueError ` if one of the + returned tables has a name that is not of the expected format. + """ + request_pb = table_messages_v2_pb2.ListTablesRequest(parent=self.name) + # We expect a `table_messages_v2_pb2.ListTablesResponse` + table_list_pb = self._client._table_stub.ListTables( + request_pb, self._client.timeout_seconds) + + result = [] + for table_pb in table_list_pb.tables: + table_prefix = self.name + '/tables/' + if not table_pb.name.startswith(table_prefix): + raise ValueError('Table name %s not of expected format' % ( + table_pb.name,)) + table_id = table_pb.name[len(table_prefix):] + result.append(self.table(table_id)) + + return result diff --git a/env/Lib/site-packages/gcloud/bigtable/read-rows-acceptance-test.json b/env/Lib/site-packages/gcloud/bigtable/read-rows-acceptance-test.json new file mode 100644 index 0000000..4973831 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/read-rows-acceptance-test.json @@ -0,0 +1,1178 @@ +{ + "tests": [ + { + "name": "invalid - no commit", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - no cell key before commit", + "chunks": [ + "commit_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - no cell key before value", + "chunks": [ + "timestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - new col family must specify qualifier", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "family_name: \u003c\n value: \"B\"\n\u003e\ntimestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "bare commit implies ts=0", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "commit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + } + ] + }, + { + "name": "simple row with timestamp", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "missing timestamp, implied ts=0", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "empty cell value", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + } + ] + }, + { + "name": "two unsplit cells", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "two qualifiers", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "D", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "two families", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "family_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"E\"\n\u003e\ntimestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "B", + "qual": "E", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "with labels", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nlabels: \"L_1\"\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nlabels: \"L_2\"\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "L_1", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 102, + "value": "value-VAL_2", + "label": "L_2", + "error": false + } + ] + }, + { + "name": "split cell, bare commit", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL\"\ncommit_row: false\n", + "commit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + } + ] + }, + { + "name": "split cell", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "split four ways", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nlabels: \"L\"\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"l\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"ue-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "L", + "error": false + } + ] + }, + { + "name": "two split cells", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "multi-qualifier splits", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_1\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 102\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "D", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "multi-qualifier multi-split", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"lue-VAL_1\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 102\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"lue-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "D", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "multi-family split", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_1\"\ncommit_row: false\n", + "family_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"E\"\n\u003e\ntimestamp_micros: 102\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "B", + "qual": "E", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "invalid - no commit between rows", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - no commit after first row", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - last row missing commit", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - duplicate row key", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n", + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - new row missing row key", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n", + "timestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "two rows", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows implicit timestamp", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\nvalue: \"value-VAL\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows empty value", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows, one with multiple cells", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 103\nvalue: \"value-VAL_3\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "B", + "qual": "D", + "ts": 103, + "value": "value-VAL_3", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows, multiple cells", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"E\"\n\u003e\ntimestamp_micros: 103\nvalue: \"value-VAL_3\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"F\"\n\u003e\ntimestamp_micros: 104\nvalue: \"value-VAL_4\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK_1", + "fm": "A", + "qual": "D", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "B", + "qual": "E", + "ts": 103, + "value": "value-VAL_3", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "B", + "qual": "F", + "ts": 104, + "value": "value-VAL_4", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows, multiple cells, multiple families", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "family_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"E\"\n\u003e\ntimestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"M\"\n\u003e\nqualifier: \u003c\n value: \"O\"\n\u003e\ntimestamp_micros: 103\nvalue: \"value-VAL_3\"\ncommit_row: false\n", + "family_name: \u003c\n value: \"N\"\n\u003e\nqualifier: \u003c\n value: \"P\"\n\u003e\ntimestamp_micros: 104\nvalue: \"value-VAL_4\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK_1", + "fm": "B", + "qual": "E", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "M", + "qual": "O", + "ts": 103, + "value": "value-VAL_3", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "N", + "qual": "P", + "ts": 104, + "value": "value-VAL_4", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows, four cells, 2 labels", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 101\nlabels: \"L_1\"\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 103\nlabels: \"L_3\"\nvalue: \"value-VAL_3\"\ncommit_row: false\n", + "timestamp_micros: 104\nvalue: \"value-VAL_4\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 101, + "value": "value-VAL_1", + "label": "L_1", + "error": false + }, + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 102, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "B", + "qual": "D", + "ts": 103, + "value": "value-VAL_3", + "label": "L_3", + "error": false + }, + { + "rk": "RK_2", + "fm": "B", + "qual": "D", + "ts": 104, + "value": "value-VAL_4", + "label": "", + "error": false + } + ] + }, + { + "name": "two rows with splits, same timestamp", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_1\"\ncommit_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"alue-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_1", + "label": "", + "error": false + }, + { + "rk": "RK_2", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "invalid - bare reset", + "chunks": [ + "reset_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - bad reset, no commit", + "chunks": [ + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - missing key after reset", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "reset_row: true\n", + "timestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "no data after reset", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "reset_row: true\n" + ], + "results": null + }, + { + "name": "simple reset", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + } + ] + }, + { + "name": "reset to new val", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "reset to new qual", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "D", + "ts": 100, + "value": "value-VAL_1", + "label": "", + "error": false + } + ] + }, + { + "name": "reset with splits", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "timestamp_micros: 102\nvalue: \"value-VAL_2\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "reset two cells", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: false\n", + "timestamp_micros: 103\nvalue: \"value-VAL_3\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 103, + "value": "value-VAL_3", + "label": "", + "error": false + } + ] + }, + { + "name": "two resets", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_3\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_3", + "label": "", + "error": false + } + ] + }, + { + "name": "reset then two cells", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"B\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: false\n", + "qualifier: \u003c\n value: \"D\"\n\u003e\ntimestamp_micros: 103\nvalue: \"value-VAL_3\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "B", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "B", + "qual": "D", + "ts": 103, + "value": "value-VAL_3", + "label": "", + "error": false + } + ] + }, + { + "name": "reset to new row", + "chunks": [ + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK_2\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_2\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_2", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_2", + "label": "", + "error": false + } + ] + }, + { + "name": "reset in between chunks", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nlabels: \"L\"\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\ncommit_row: false\n", + "reset_row: true\n", + "row_key: \"RK_1\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL_1\"\ncommit_row: true\n" + ], + "results": [ + { + "rk": "RK_1", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL_1", + "label": "", + "error": false + } + ] + }, + { + "name": "invalid - reset with chunk", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nlabels: \"L\"\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\nreset_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "invalid - commit with chunk", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nlabels: \"L\"\nvalue: \"v\"\nvalue_size: 10\ncommit_row: false\n", + "value: \"a\"\nvalue_size: 10\ncommit_row: true\n" + ], + "results": [ + { + "rk": "", + "fm": "", + "qual": "", + "ts": 0, + "value": "", + "label": "", + "error": true + } + ] + }, + { + "name": "empty cell chunk", + "chunks": [ + "row_key: \"RK\"\nfamily_name: \u003c\n value: \"A\"\n\u003e\nqualifier: \u003c\n value: \"C\"\n\u003e\ntimestamp_micros: 100\nvalue: \"value-VAL\"\ncommit_row: false\n", + "commit_row: false\n", + "commit_row: true\n" + ], + "results": [ + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 100, + "value": "value-VAL", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + }, + { + "rk": "RK", + "fm": "A", + "qual": "C", + "ts": 0, + "value": "", + "label": "", + "error": false + } + ] + } + ] +} \ No newline at end of file diff --git a/env/Lib/site-packages/gcloud/bigtable/row.py b/env/Lib/site-packages/gcloud/bigtable/row.py new file mode 100644 index 0000000..aad7dbe --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/row.py @@ -0,0 +1,889 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""User friendly container for Google Cloud Bigtable Row.""" + + +import struct + +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud._helpers import _microseconds_from_datetime +from gcloud._helpers import _to_bytes +from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + + +_PACK_I64 = struct.Struct('>q').pack + +MAX_MUTATIONS = 100000 +"""The maximum number of mutations that a row can accumulate.""" + + +class Row(object): + """Base representation of a Google Cloud Bigtable Row. + + This class has three subclasses corresponding to the three + RPC methods for sending row mutations: + + * :class:`DirectRow` for ``MutateRow`` + * :class:`ConditionalRow` for ``CheckAndMutateRow`` + * :class:`AppendRow` for ``ReadModifyWriteRow`` + + :type row_key: bytes + :param row_key: The key for the current row. + + :type table: :class:`Table ` + :param table: The table that owns the row. + """ + + def __init__(self, row_key, table): + self._row_key = _to_bytes(row_key) + self._table = table + + +class _SetDeleteRow(Row): + """Row helper for setting or deleting cell values. + + Implements helper methods to add mutations to set or delete cell contents: + + * :meth:`set_cell` + * :meth:`delete` + * :meth:`delete_cell` + * :meth:`delete_cells` + + :type row_key: bytes + :param row_key: The key for the current row. + + :type table: :class:`Table ` + :param table: The table that owns the row. + """ + + ALL_COLUMNS = object() + """Sentinel value used to indicate all columns in a column family.""" + + def _get_mutations(self, state): + """Gets the list of mutations for a given state. + + This method intended to be implemented by subclasses. + + ``state`` may not need to be used by all subclasses. + + :type state: bool + :param state: The state that the mutation should be + applied in. + + :raises: :class:`NotImplementedError ` + always. + """ + raise NotImplementedError + + def _set_cell(self, column_family_id, column, value, timestamp=None, + state=None): + """Helper for :meth:`set_cell` + + Adds a mutation to set the value in a specific cell. + + ``state`` is unused by :class:`DirectRow` but is used by + subclasses. + + :type column_family_id: str + :param column_family_id: The column family that contains the column. + Must be of the form + ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family where the cell + is located. + + :type value: bytes or :class:`int` + :param value: The value to set in the cell. If an integer is used, + will be interpreted as a 64-bit big-endian signed + integer (8 bytes). + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (Optional) The timestamp of the operation. + + :type state: bool + :param state: (Optional) The state that is passed along to + :meth:`_get_mutations`. + """ + column = _to_bytes(column) + if isinstance(value, six.integer_types): + value = _PACK_I64(value) + value = _to_bytes(value) + if timestamp is None: + # Use -1 for current Bigtable server time. + timestamp_micros = -1 + else: + timestamp_micros = _microseconds_from_datetime(timestamp) + # Truncate to millisecond granularity. + timestamp_micros -= (timestamp_micros % 1000) + + mutation_val = data_v2_pb2.Mutation.SetCell( + family_name=column_family_id, + column_qualifier=column, + timestamp_micros=timestamp_micros, + value=value, + ) + mutation_pb = data_v2_pb2.Mutation(set_cell=mutation_val) + self._get_mutations(state).append(mutation_pb) + + def _delete(self, state=None): + """Helper for :meth:`delete` + + Adds a delete mutation (for the entire row) to the accumulated + mutations. + + ``state`` is unused by :class:`DirectRow` but is used by + subclasses. + + :type state: bool + :param state: (Optional) The state that is passed along to + :meth:`_get_mutations`. + """ + mutation_val = data_v2_pb2.Mutation.DeleteFromRow() + mutation_pb = data_v2_pb2.Mutation(delete_from_row=mutation_val) + self._get_mutations(state).append(mutation_pb) + + def _delete_cells(self, column_family_id, columns, time_range=None, + state=None): + """Helper for :meth:`delete_cell` and :meth:`delete_cells`. + + ``state`` is unused by :class:`DirectRow` but is used by + subclasses. + + :type column_family_id: str + :param column_family_id: The column family that contains the column + or columns with cells being deleted. Must be + of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type columns: :class:`list` of :class:`str` / + :func:`unicode `, or :class:`object` + :param columns: The columns within the column family that will have + cells deleted. If :attr:`ALL_COLUMNS` is used then + the entire column family will be deleted from the row. + + :type time_range: :class:`TimestampRange` + :param time_range: (Optional) The range of time within which cells + should be deleted. + + :type state: bool + :param state: (Optional) The state that is passed along to + :meth:`_get_mutations`. + """ + mutations_list = self._get_mutations(state) + if columns is self.ALL_COLUMNS: + mutation_val = data_v2_pb2.Mutation.DeleteFromFamily( + family_name=column_family_id, + ) + mutation_pb = data_v2_pb2.Mutation(delete_from_family=mutation_val) + mutations_list.append(mutation_pb) + else: + delete_kwargs = {} + if time_range is not None: + delete_kwargs['time_range'] = time_range.to_pb() + + to_append = [] + for column in columns: + column = _to_bytes(column) + # time_range will never change if present, but the rest of + # delete_kwargs will + delete_kwargs.update( + family_name=column_family_id, + column_qualifier=column, + ) + mutation_val = data_v2_pb2.Mutation.DeleteFromColumn( + **delete_kwargs) + mutation_pb = data_v2_pb2.Mutation( + delete_from_column=mutation_val) + to_append.append(mutation_pb) + + # We don't add the mutations until all columns have been + # processed without error. + mutations_list.extend(to_append) + + +class DirectRow(_SetDeleteRow): + """Google Cloud Bigtable Row for sending "direct" mutations. + + These mutations directly set or delete cell contents: + + * :meth:`set_cell` + * :meth:`delete` + * :meth:`delete_cell` + * :meth:`delete_cells` + + These methods can be used directly:: + + >>> row = table.row(b'row-key1') + >>> row.set_cell(u'fam', b'col1', b'cell-val') + >>> row.delete_cell(u'fam', b'col2') + + .. note:: + + A :class:`DirectRow` accumulates mutations locally via the + :meth:`set_cell`, :meth:`delete`, :meth:`delete_cell` and + :meth:`delete_cells` methods. To actually send these mutations to the + Google Cloud Bigtable API, you must call :meth:`commit`. + + :type row_key: bytes + :param row_key: The key for the current row. + + :type table: :class:`Table ` + :param table: The table that owns the row. + """ + + def __init__(self, row_key, table): + super(DirectRow, self).__init__(row_key, table) + self._pb_mutations = [] + + def _get_mutations(self, state): # pylint: disable=unused-argument + """Gets the list of mutations for a given state. + + ``state`` is unused by :class:`DirectRow` but is used by + subclasses. + + :type state: bool + :param state: The state that the mutation should be + applied in. + + :rtype: list + :returns: The list to add new mutations to (for the current state). + """ + return self._pb_mutations + + def set_cell(self, column_family_id, column, value, timestamp=None): + """Sets a value in this row. + + The cell is determined by the ``row_key`` of this :class:`DirectRow` + and the ``column``. The ``column`` must be in an existing + :class:`.ColumnFamily` (as determined by ``column_family_id``). + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column. + Must be of the form + ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family where the cell + is located. + + :type value: bytes or :class:`int` + :param value: The value to set in the cell. If an integer is used, + will be interpreted as a 64-bit big-endian signed + integer (8 bytes). + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (Optional) The timestamp of the operation. + """ + self._set_cell(column_family_id, column, value, timestamp=timestamp, + state=None) + + def delete(self): + """Deletes this row from the table. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + """ + self._delete(state=None) + + def delete_cell(self, column_family_id, column, time_range=None): + """Deletes cell in this row. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column + or columns with cells being deleted. Must be + of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family that will have a + cell deleted. + + :type time_range: :class:`TimestampRange` + :param time_range: (Optional) The range of time within which cells + should be deleted. + """ + self._delete_cells(column_family_id, [column], time_range=time_range, + state=None) + + def delete_cells(self, column_family_id, columns, time_range=None): + """Deletes cells in this row. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column + or columns with cells being deleted. Must be + of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type columns: :class:`list` of :class:`str` / + :func:`unicode `, or :class:`object` + :param columns: The columns within the column family that will have + cells deleted. If :attr:`ALL_COLUMNS` is used then + the entire column family will be deleted from the row. + + :type time_range: :class:`TimestampRange` + :param time_range: (Optional) The range of time within which cells + should be deleted. + """ + self._delete_cells(column_family_id, columns, time_range=time_range, + state=None) + + def commit(self): + """Makes a ``MutateRow`` API request. + + If no mutations have been created in the row, no request is made. + + Mutations are applied atomically and in order, meaning that earlier + mutations can be masked / negated by later ones. Cells already present + in the row are left unchanged unless explicitly changed by a mutation. + + After committing the accumulated mutations, resets the local + mutations to an empty list. + + :raises: :class:`ValueError ` if the number of + mutations exceeds the :data:`MAX_MUTATIONS`. + """ + mutations_list = self._get_mutations(None) + num_mutations = len(mutations_list) + if num_mutations == 0: + return + if num_mutations > MAX_MUTATIONS: + raise ValueError('%d total mutations exceed the maximum allowable ' + '%d.' % (num_mutations, MAX_MUTATIONS)) + request_pb = messages_v2_pb2.MutateRowRequest( + table_name=self._table.name, + row_key=self._row_key, + mutations=mutations_list, + ) + # We expect a `google.protobuf.empty_pb2.Empty` + client = self._table._instance._client + client._data_stub.MutateRow(request_pb, client.timeout_seconds) + self.clear() + + def clear(self): + """Removes all currently accumulated mutations on the current row.""" + del self._pb_mutations[:] + + +class ConditionalRow(_SetDeleteRow): + """Google Cloud Bigtable Row for sending mutations conditionally. + + Each mutation has an associated state: :data:`True` or :data:`False`. + When :meth:`commit`-ed, the mutations for the :data:`True` + state will be applied if the filter matches any cells in + the row, otherwise the :data:`False` state will be applied. + + A :class:`ConditionalRow` accumulates mutations in the same way a + :class:`DirectRow` does: + + * :meth:`set_cell` + * :meth:`delete` + * :meth:`delete_cell` + * :meth:`delete_cells` + + with the only change the extra ``state`` parameter:: + + >>> row_cond = table.row(b'row-key2', filter_=row_filter) + >>> row_cond.set_cell(u'fam', b'col', b'cell-val', state=True) + >>> row_cond.delete_cell(u'fam', b'col', state=False) + + .. note:: + + As with :class:`DirectRow`, to actually send these mutations to the + Google Cloud Bigtable API, you must call :meth:`commit`. + + :type row_key: bytes + :param row_key: The key for the current row. + + :type table: :class:`Table ` + :param table: The table that owns the row. + + :type filter_: :class:`.RowFilter` + :param filter_: Filter to be used for conditional mutations. + """ + def __init__(self, row_key, table, filter_): + super(ConditionalRow, self).__init__(row_key, table) + self._filter = filter_ + self._true_pb_mutations = [] + self._false_pb_mutations = [] + + def _get_mutations(self, state): + """Gets the list of mutations for a given state. + + Over-ridden so that the state can be used in: + + * :meth:`set_cell` + * :meth:`delete` + * :meth:`delete_cell` + * :meth:`delete_cells` + + :type state: bool + :param state: The state that the mutation should be + applied in. + + :rtype: list + :returns: The list to add new mutations to (for the current state). + """ + if state: + return self._true_pb_mutations + else: + return self._false_pb_mutations + + def commit(self): + """Makes a ``CheckAndMutateRow`` API request. + + If no mutations have been created in the row, no request is made. + + The mutations will be applied conditionally, based on whether the + filter matches any cells in the :class:`ConditionalRow` or not. (Each + method which adds a mutation has a ``state`` parameter for this + purpose.) + + Mutations are applied atomically and in order, meaning that earlier + mutations can be masked / negated by later ones. Cells already present + in the row are left unchanged unless explicitly changed by a mutation. + + After committing the accumulated mutations, resets the local + mutations. + + :rtype: bool + :returns: Flag indicating if the filter was matched (which also + indicates which set of mutations were applied by the server). + :raises: :class:`ValueError ` if the number of + mutations exceeds the :data:`MAX_MUTATIONS`. + """ + true_mutations = self._get_mutations(state=True) + false_mutations = self._get_mutations(state=False) + num_true_mutations = len(true_mutations) + num_false_mutations = len(false_mutations) + if num_true_mutations == 0 and num_false_mutations == 0: + return + if (num_true_mutations > MAX_MUTATIONS or + num_false_mutations > MAX_MUTATIONS): + raise ValueError( + 'Exceed the maximum allowable mutations (%d). Had %s true ' + 'mutations and %d false mutations.' % ( + MAX_MUTATIONS, num_true_mutations, num_false_mutations)) + + request_pb = messages_v2_pb2.CheckAndMutateRowRequest( + table_name=self._table.name, + row_key=self._row_key, + predicate_filter=self._filter.to_pb(), + true_mutations=true_mutations, + false_mutations=false_mutations, + ) + # We expect a `.messages_v2_pb2.CheckAndMutateRowResponse` + client = self._table._instance._client + resp = client._data_stub.CheckAndMutateRow( + request_pb, client.timeout_seconds) + self.clear() + return resp.predicate_matched + + # pylint: disable=arguments-differ + def set_cell(self, column_family_id, column, value, timestamp=None, + state=True): + """Sets a value in this row. + + The cell is determined by the ``row_key`` of this + :class:`ConditionalRow` and the ``column``. The ``column`` must be in + an existing :class:`.ColumnFamily` (as determined by + ``column_family_id``). + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column. + Must be of the form + ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family where the cell + is located. + + :type value: bytes or :class:`int` + :param value: The value to set in the cell. If an integer is used, + will be interpreted as a 64-bit big-endian signed + integer (8 bytes). + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (Optional) The timestamp of the operation. + + :type state: bool + :param state: (Optional) The state that the mutation should be + applied in. Defaults to :data:`True`. + """ + self._set_cell(column_family_id, column, value, timestamp=timestamp, + state=state) + + def delete(self, state=True): + """Deletes this row from the table. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type state: bool + :param state: (Optional) The state that the mutation should be + applied in. Defaults to :data:`True`. + """ + self._delete(state=state) + + def delete_cell(self, column_family_id, column, time_range=None, + state=True): + """Deletes cell in this row. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column + or columns with cells being deleted. Must be + of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family that will have a + cell deleted. + + :type time_range: :class:`TimestampRange` + :param time_range: (Optional) The range of time within which cells + should be deleted. + + :type state: bool + :param state: (Optional) The state that the mutation should be + applied in. Defaults to :data:`True`. + """ + self._delete_cells(column_family_id, [column], time_range=time_range, + state=state) + + def delete_cells(self, column_family_id, columns, time_range=None, + state=True): + """Deletes cells in this row. + + .. note:: + + This method adds a mutation to the accumulated mutations on this + row, but does not make an API request. To actually + send an API request (with the mutations) to the Google Cloud + Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column + or columns with cells being deleted. Must be + of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type columns: :class:`list` of :class:`str` / + :func:`unicode `, or :class:`object` + :param columns: The columns within the column family that will have + cells deleted. If :attr:`ALL_COLUMNS` is used then the + entire column family will be deleted from the row. + + :type time_range: :class:`TimestampRange` + :param time_range: (Optional) The range of time within which cells + should be deleted. + + :type state: bool + :param state: (Optional) The state that the mutation should be + applied in. Defaults to :data:`True`. + """ + self._delete_cells(column_family_id, columns, time_range=time_range, + state=state) + # pylint: enable=arguments-differ + + def clear(self): + """Removes all currently accumulated mutations on the current row.""" + del self._true_pb_mutations[:] + del self._false_pb_mutations[:] + + +class AppendRow(Row): + """Google Cloud Bigtable Row for sending append mutations. + + These mutations are intended to augment the value of an existing cell + and uses the methods: + + * :meth:`append_cell_value` + * :meth:`increment_cell_value` + + The first works by appending bytes and the second by incrementing an + integer (stored in the cell as 8 bytes). In either case, if the + cell is empty, assumes the default empty value (empty string for + bytes or and 0 for integer). + + :type row_key: bytes + :param row_key: The key for the current row. + + :type table: :class:`Table ` + :param table: The table that owns the row. + """ + + def __init__(self, row_key, table): + super(AppendRow, self).__init__(row_key, table) + self._rule_pb_list = [] + + def clear(self): + """Removes all currently accumulated modifications on current row.""" + del self._rule_pb_list[:] + + def append_cell_value(self, column_family_id, column, value): + """Appends a value to an existing cell. + + .. note:: + + This method adds a read-modify rule protobuf to the accumulated + read-modify rules on this row, but does not make an API + request. To actually send an API request (with the rules) to the + Google Cloud Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column. + Must be of the form + ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family where the cell + is located. + + :type value: bytes + :param value: The value to append to the existing value in the cell. If + the targeted cell is unset, it will be treated as + containing the empty string. + """ + column = _to_bytes(column) + value = _to_bytes(value) + rule_pb = data_v2_pb2.ReadModifyWriteRule( + family_name=column_family_id, + column_qualifier=column, + append_value=value) + self._rule_pb_list.append(rule_pb) + + def increment_cell_value(self, column_family_id, column, int_value): + """Increments a value in an existing cell. + + Assumes the value in the cell is stored as a 64 bit integer + serialized to bytes. + + .. note:: + + This method adds a read-modify rule protobuf to the accumulated + read-modify rules on this row, but does not make an API + request. To actually send an API request (with the rules) to the + Google Cloud Bigtable API, call :meth:`commit`. + + :type column_family_id: str + :param column_family_id: The column family that contains the column. + Must be of the form + ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type column: bytes + :param column: The column within the column family where the cell + is located. + + :type int_value: int + :param int_value: The value to increment the existing value in the cell + by. If the targeted cell is unset, it will be treated + as containing a zero. Otherwise, the targeted cell + must contain an 8-byte value (interpreted as a 64-bit + big-endian signed integer), or the entire request + will fail. + """ + column = _to_bytes(column) + rule_pb = data_v2_pb2.ReadModifyWriteRule( + family_name=column_family_id, + column_qualifier=column, + increment_amount=int_value) + self._rule_pb_list.append(rule_pb) + + def commit(self): + """Makes a ``ReadModifyWriteRow`` API request. + + This commits modifications made by :meth:`append_cell_value` and + :meth:`increment_cell_value`. If no modifications were made, makes + no API request and just returns ``{}``. + + Modifies a row atomically, reading the latest existing + timestamp / value from the specified columns and writing a new value by + appending / incrementing. The new cell created uses either the current + server time or the highest timestamp of a cell in that column (if it + exceeds the server time). + + After committing the accumulated mutations, resets the local mutations. + + .. code:: python + + >>> append_row.commit() + { + u'col-fam-id': { + b'col-name1': [ + (b'cell-val', datetime.datetime(...)), + (b'cell-val-newer', datetime.datetime(...)), + ], + b'col-name2': [ + (b'altcol-cell-val', datetime.datetime(...)), + ], + }, + u'col-fam-id2': { + b'col-name3-but-other-fam': [ + (b'foo', datetime.datetime(...)), + ], + }, + } + + :rtype: dict + :returns: The new contents of all modified cells. Returned as a + dictionary of column families, each of which holds a + dictionary of columns. Each column contains a list of cells + modified. Each cell is represented with a two-tuple with the + value (in bytes) and the timestamp for the cell. + :raises: :class:`ValueError ` if the number of + mutations exceeds the :data:`MAX_MUTATIONS`. + """ + num_mutations = len(self._rule_pb_list) + if num_mutations == 0: + return {} + if num_mutations > MAX_MUTATIONS: + raise ValueError('%d total append mutations exceed the maximum ' + 'allowable %d.' % (num_mutations, MAX_MUTATIONS)) + request_pb = messages_v2_pb2.ReadModifyWriteRowRequest( + table_name=self._table.name, + row_key=self._row_key, + rules=self._rule_pb_list, + ) + # We expect a `.data_v2_pb2.Row` + client = self._table._instance._client + row_response = client._data_stub.ReadModifyWriteRow( + request_pb, client.timeout_seconds) + + # Reset modifications after commit-ing request. + self.clear() + + # NOTE: We expect row_response.key == self._row_key but don't check. + return _parse_rmw_row_response(row_response) + + +def _parse_rmw_row_response(row_response): + """Parses the response to a ``ReadModifyWriteRow`` request. + + :type row_response: :class:`.data_v2_pb2.Row` + :param row_response: The response row (with only modified cells) from a + ``ReadModifyWriteRow`` request. + + :rtype: dict + :returns: The new contents of all modified cells. Returned as a + dictionary of column families, each of which holds a + dictionary of columns. Each column contains a list of cells + modified. Each cell is represented with a two-tuple with the + value (in bytes) and the timestamp for the cell. For example: + + .. code:: python + + { + u'col-fam-id': { + b'col-name1': [ + (b'cell-val', datetime.datetime(...)), + (b'cell-val-newer', datetime.datetime(...)), + ], + b'col-name2': [ + (b'altcol-cell-val', datetime.datetime(...)), + ], + }, + u'col-fam-id2': { + b'col-name3-but-other-fam': [ + (b'foo', datetime.datetime(...)), + ], + }, + } + """ + result = {} + for column_family in row_response.row.families: + column_family_id, curr_family = _parse_family_pb(column_family) + result[column_family_id] = curr_family + return result + + +def _parse_family_pb(family_pb): + """Parses a Family protobuf into a dictionary. + + :type family_pb: :class:`._generated_v2.data_pb2.Family` + :param family_pb: A protobuf + + :rtype: tuple + :returns: A string and dictionary. The string is the name of the + column family and the dictionary has column names (within the + family) as keys and cell lists as values. Each cell is + represented with a two-tuple with the value (in bytes) and the + timestamp for the cell. For example: + + .. code:: python + + { + b'col-name1': [ + (b'cell-val', datetime.datetime(...)), + (b'cell-val-newer', datetime.datetime(...)), + ], + b'col-name2': [ + (b'altcol-cell-val', datetime.datetime(...)), + ], + } + """ + result = {} + for column in family_pb.columns: + result[column.qualifier] = cells = [] + for cell in column.cells: + val_pair = ( + cell.value, + _datetime_from_microseconds(cell.timestamp_micros), + ) + cells.append(val_pair) + + return family_pb.name, result diff --git a/env/Lib/site-packages/gcloud/bigtable/row_data.py b/env/Lib/site-packages/gcloud/bigtable/row_data.py new file mode 100644 index 0000000..3f44900 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/row_data.py @@ -0,0 +1,442 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Container for Google Cloud Bigtable Cells and Streaming Row Contents.""" + + +import copy +import six + +from gcloud._helpers import _datetime_from_microseconds +from gcloud._helpers import _to_bytes + + +class Cell(object): + """Representation of a Google Cloud Bigtable Cell. + + :type value: bytes + :param value: The value stored in the cell. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: The timestamp when the cell was stored. + + :type labels: list + :param labels: (Optional) List of strings. Labels applied to the cell. + """ + + def __init__(self, value, timestamp, labels=()): + self.value = value + self.timestamp = timestamp + self.labels = list(labels) + + @classmethod + def from_pb(cls, cell_pb): + """Create a new cell from a Cell protobuf. + + :type cell_pb: :class:`._generated_v2.data_pb2.Cell` + :param cell_pb: The protobuf to convert. + + :rtype: :class:`Cell` + :returns: The cell corresponding to the protobuf. + """ + timestamp = _datetime_from_microseconds(cell_pb.timestamp_micros) + if cell_pb.labels: + return cls(cell_pb.value, timestamp, labels=cell_pb.labels) + else: + return cls(cell_pb.value, timestamp) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.value == self.value and + other.timestamp == self.timestamp and + other.labels == self.labels) + + def __ne__(self, other): + return not self.__eq__(other) + + +class PartialCellData(object): + """Representation of partial cell in a Google Cloud Bigtable Table. + + These are expected to be updated directly from a + :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse` + + :type row_key: bytes + :param row_key: The key for the row holding the (partial) cell. + + :type family_name: str + :param family_name: The family name of the (partial) cell. + + :type qualifier: bytes + :param qualifier: The column qualifier of the (partial) cell. + + :type timestamp_micros: int + :param timestamp_micros: The timestamp (in microsecods) of the + (partial) cell. + + :type labels: list of str + :param labels: labels assigned to the (partial) cell + + :type value: bytes + :param value: The (accumulated) value of the (partial) cell. + """ + def __init__(self, row_key, family_name, qualifier, timestamp_micros, + labels=(), value=b''): + self.row_key = row_key + self.family_name = family_name + self.qualifier = qualifier + self.timestamp_micros = timestamp_micros + self.labels = labels + self.value = value + + def append_value(self, value): + """Append bytes from a new chunk to value. + + :type value: bytes + :param value: bytes to append + """ + self.value += value + + +class PartialRowData(object): + """Representation of partial row in a Google Cloud Bigtable Table. + + These are expected to be updated directly from a + :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse` + + :type row_key: bytes + :param row_key: The key for the row holding the (partial) data. + """ + + def __init__(self, row_key): + self._row_key = row_key + self._cells = {} + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other._row_key == self._row_key and + other._cells == self._cells) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_dict(self): + """Convert the cells to a dictionary. + + This is intended to be used with HappyBase, so the column family and + column qualiers are combined (with ``:``). + + :rtype: dict + :returns: Dictionary containing all the data in the cells of this row. + """ + result = {} + for column_family_id, columns in six.iteritems(self._cells): + for column_qual, cells in six.iteritems(columns): + key = (_to_bytes(column_family_id) + b':' + + _to_bytes(column_qual)) + result[key] = cells + return result + + @property + def cells(self): + """Property returning all the cells accumulated on this partial row. + + :rtype: dict + :returns: Dictionary of the :class:`Cell` objects accumulated. This + dictionary has two-levels of keys (first for column families + and second for column names/qualifiers within a family). For + a given column, a list of :class:`Cell` objects is stored. + """ + return copy.deepcopy(self._cells) + + @property + def row_key(self): + """Getter for the current (partial) row's key. + + :rtype: bytes + :returns: The current (partial) row's key. + """ + return self._row_key + + +class InvalidReadRowsResponse(RuntimeError): + """Exception raised to to invalid response data from back-end.""" + + +class InvalidChunk(RuntimeError): + """Exception raised to to invalid chunk data from back-end.""" + + +class PartialRowsData(object): + """Convenience wrapper for consuming a ``ReadRows`` streaming response. + + :type response_iterator: + :class:`grpc.framework.alpha._reexport._CancellableIterator` + :param response_iterator: A streaming iterator returned from a + ``ReadRows`` request. + """ + START = "Start" # No responses yet processed. + NEW_ROW = "New row" # No cells yet complete for row + ROW_IN_PROGRESS = "Row in progress" # Some cells complete for row + CELL_IN_PROGRESS = "Cell in progress" # Incomplete cell for row + + def __init__(self, response_iterator): + self._response_iterator = response_iterator + # Fully-processed rows, keyed by `row_key` + self._rows = {} + # Counter for responses pulled from iterator + self._counter = 0 + # Maybe cached from previous response + self._last_scanned_row_key = None + # In-progress row, unset until first response, after commit/reset + self._row = None + # Last complete row, unset until first commit + self._previous_row = None + # In-progress cell, unset until first response, after completion + self._cell = None + # Last complete cell, unset until first completion, after new row + self._previous_cell = None + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other._response_iterator == self._response_iterator + + def __ne__(self, other): + return not self.__eq__(other) + + @property + def state(self): + """State machine state. + + :rtype: str + :returns: name of state corresponding to currrent row / chunk + processing. + """ + if self._last_scanned_row_key is None: + return self.START + if self._row is None: + assert self._cell is None + assert self._previous_cell is None + return self.NEW_ROW + if self._cell is not None: + return self.CELL_IN_PROGRESS + if self._previous_cell is not None: + return self.ROW_IN_PROGRESS + return self.NEW_ROW # row added, no chunk yet processed + + @property + def rows(self): + """Property returning all rows accumulated from the stream. + + :rtype: dict + :returns: row_key -> :class:`PartialRowData`. + """ + # NOTE: To avoid duplicating large objects, this is just the + # mutable private data. + return self._rows + + def cancel(self): + """Cancels the iterator, closing the stream.""" + self._response_iterator.cancel() + + def consume_next(self): + """Consume the next ``ReadRowsResponse`` from the stream. + + Parse the response and its chunks into a new/existing row in + :attr:`_rows` + """ + response = six.next(self._response_iterator) + self._counter += 1 + + if self._last_scanned_row_key is None: # first response + if response.last_scanned_row_key: + raise InvalidReadRowsResponse() + + self._last_scanned_row_key = response.last_scanned_row_key + + row = self._row + cell = self._cell + + for chunk in response.chunks: + + self._validate_chunk(chunk) + + if chunk.reset_row: + row = self._row = None + cell = self._cell = self._previous_cell = None + continue + + if row is None: + row = self._row = PartialRowData(chunk.row_key) + + if cell is None: + cell = self._cell = PartialCellData( + chunk.row_key, + chunk.family_name.value, + chunk.qualifier.value, + chunk.timestamp_micros, + chunk.labels, + chunk.value) + self._copy_from_previous(cell) + else: + cell.append_value(chunk.value) + + if chunk.commit_row: + self._save_current_row() + row = cell = None + continue + + if chunk.value_size == 0: + self._save_current_cell() + cell = None + + def consume_all(self, max_loops=None): + """Consume the streamed responses until there are no more. + + This simply calls :meth:`consume_next` until there are no + more to consume. + + :type max_loops: int + :param max_loops: (Optional) Maximum number of times to try to consume + an additional ``ReadRowsResponse``. You can use this + to avoid long wait times. + """ + curr_loop = 0 + if max_loops is None: + max_loops = float('inf') + while curr_loop < max_loops: + curr_loop += 1 + try: + self.consume_next() + except StopIteration: + break + + @staticmethod + def _validate_chunk_status(chunk): + """Helper for :meth:`_validate_chunk_row_in_progress`, etc.""" + # No reseet with other keys + if chunk.reset_row: + _raise_if(chunk.row_key) + _raise_if(chunk.HasField('family_name')) + _raise_if(chunk.HasField('qualifier')) + _raise_if(chunk.timestamp_micros) + _raise_if(chunk.labels) + _raise_if(chunk.value_size) + _raise_if(chunk.value) + # No commit with value size + _raise_if(chunk.commit_row and chunk.value_size > 0) + # No negative value_size (inferred as a general constraint). + _raise_if(chunk.value_size < 0) + + def _validate_chunk_new_row(self, chunk): + """Helper for :meth:`_validate_chunk`.""" + assert self.state == self.NEW_ROW + _raise_if(chunk.reset_row) + _raise_if(not chunk.row_key) + _raise_if(not chunk.family_name) + _raise_if(not chunk.qualifier) + # This constraint is not enforced in the Go example. + _raise_if(chunk.value_size > 0 and chunk.commit_row is not False) + # This constraint is from the Go example, not the spec. + _raise_if(self._previous_row is not None and + chunk.row_key <= self._previous_row.row_key) + + def _same_as_previous(self, chunk): + """Helper for :meth:`_validate_chunk_row_in_progress`""" + previous = self._previous_cell + return (chunk.row_key == previous.row_key and + chunk.family_name == previous.family_name and + chunk.qualifier == previous.qualifier and + chunk.labels == previous.labels) + + def _validate_chunk_row_in_progress(self, chunk): + """Helper for :meth:`_validate_chunk`""" + assert self.state == self.ROW_IN_PROGRESS + self._validate_chunk_status(chunk) + if not chunk.HasField('commit_row') and not chunk.reset_row: + _raise_if(not chunk.timestamp_micros or not chunk.value) + _raise_if(chunk.row_key and + chunk.row_key != self._row.row_key) + _raise_if(chunk.HasField('family_name') and + not chunk.HasField('qualifier')) + previous = self._previous_cell + _raise_if(self._same_as_previous(chunk) and + chunk.timestamp_micros <= previous.timestamp_micros) + + def _validate_chunk_cell_in_progress(self, chunk): + """Helper for :meth:`_validate_chunk`""" + assert self.state == self.CELL_IN_PROGRESS + self._validate_chunk_status(chunk) + self._copy_from_current(chunk) + + def _validate_chunk(self, chunk): + """Helper for :meth:`consume_next`.""" + if self.state == self.NEW_ROW: + self._validate_chunk_new_row(chunk) + if self.state == self.ROW_IN_PROGRESS: + self._validate_chunk_row_in_progress(chunk) + if self.state == self.CELL_IN_PROGRESS: + self._validate_chunk_cell_in_progress(chunk) + + def _save_current_cell(self): + """Helper for :meth:`consume_next`.""" + row, cell = self._row, self._cell + family = row._cells.setdefault(cell.family_name, {}) + qualified = family.setdefault(cell.qualifier, []) + complete = Cell.from_pb(self._cell) + qualified.append(complete) + self._cell, self._previous_cell = None, cell + + def _copy_from_current(self, chunk): + """Helper for :meth:`consume_next`.""" + current = self._cell + if current is not None: + if not chunk.row_key: + chunk.row_key = current.row_key + if not chunk.HasField('family_name'): + chunk.family_name.value = current.family_name + if not chunk.HasField('qualifier'): + chunk.qualifier.value = current.qualifier + if not chunk.timestamp_micros: + chunk.timestamp_micros = current.timestamp_micros + if not chunk.labels: + chunk.labels.extend(current.labels) + + def _copy_from_previous(self, cell): + """Helper for :meth:`consume_next`.""" + previous = self._previous_cell + if previous is not None: + if not cell.row_key: + cell.row_key = previous.row_key + if not cell.family_name: + cell.family_name = previous.family_name + if not cell.qualifier: + cell.qualifier = previous.qualifier + + def _save_current_row(self): + """Helper for :meth:`consume_next`.""" + if self._cell: + self._save_current_cell() + self._rows[self._row.row_key] = self._row + self._row, self._previous_row = None, self._row + self._previous_cell = None + + +def _raise_if(predicate, *args): + """Helper for validation methods.""" + if predicate: + raise InvalidChunk(*args) diff --git a/env/Lib/site-packages/gcloud/bigtable/row_filters.py b/env/Lib/site-packages/gcloud/bigtable/row_filters.py new file mode 100644 index 0000000..f76615b --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/row_filters.py @@ -0,0 +1,768 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Filters for Google Cloud Bigtable Row classes.""" + + +from gcloud._helpers import _microseconds_from_datetime +from gcloud._helpers import _to_bytes +from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + + +class RowFilter(object): + """Basic filter to apply to cells in a row. + + These values can be combined via :class:`RowFilterChain`, + :class:`RowFilterUnion` and :class:`ConditionalRowFilter`. + + .. note:: + + This class is a do-nothing base class for all row filters. + """ + + def __ne__(self, other): + return not self.__eq__(other) + + +class _BoolFilter(RowFilter): + """Row filter that uses a boolean flag. + + :type flag: bool + :param flag: An indicator if a setting is turned on or off. + """ + + def __init__(self, flag): + self.flag = flag + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.flag == self.flag + + +class SinkFilter(_BoolFilter): + """Advanced row filter to skip parent filters. + + :type flag: bool + :param flag: ADVANCED USE ONLY. Hook for introspection into the row filter. + Outputs all cells directly to the output of the read rather + than to any parent filter. Cannot be used within the + ``predicate_filter``, ``true_filter``, or ``false_filter`` + of a :class:`ConditionalRowFilter`. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(sink=self.flag) + + +class PassAllFilter(_BoolFilter): + """Row filter equivalent to not filtering at all. + + :type flag: bool + :param flag: Matches all cells, regardless of input. Functionally + equivalent to leaving ``filter`` unset, but included for + completeness. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(pass_all_filter=self.flag) + + +class BlockAllFilter(_BoolFilter): + """Row filter that doesn't match any cells. + + :type flag: bool + :param flag: Does not match any cells, regardless of input. Useful for + temporarily disabling just part of a filter. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(block_all_filter=self.flag) + + +class _RegexFilter(RowFilter): + """Row filter that uses a regular expression. + + The ``regex`` must be valid RE2 patterns. See Google's + `RE2 reference`_ for the accepted syntax. + + .. _RE2 reference: https://github.com/google/re2/wiki/Syntax + + :type regex: bytes or str + :param regex: A regular expression (RE2) for some row filter. + """ + + def __init__(self, regex): + self.regex = _to_bytes(regex) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.regex == self.regex + + +class RowKeyRegexFilter(_RegexFilter): + """Row filter for a row key regular expression. + + The ``regex`` must be valid RE2 patterns. See Google's + `RE2 reference`_ for the accepted syntax. + + .. _RE2 reference: https://github.com/google/re2/wiki/Syntax + + .. note:: + + Special care need be used with the expression used. Since + each of these properties can contain arbitrary bytes, the ``\\C`` + escape sequence must be used if a true wildcard is desired. The ``.`` + character will not match the new line character ``\\n``, which may be + present in a binary value. + + :type regex: bytes + :param regex: A regular expression (RE2) to match cells from rows with row + keys that satisfy this regex. For a + ``CheckAndMutateRowRequest``, this filter is unnecessary + since the row key is already specified. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(row_key_regex_filter=self.regex) + + +class RowSampleFilter(RowFilter): + """Matches all cells from a row with probability p. + + :type sample: float + :param sample: The probability of matching a cell (must be in the + interval ``[0, 1]``). + """ + + def __init__(self, sample): + self.sample = sample + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.sample == self.sample + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(row_sample_filter=self.sample) + + +class FamilyNameRegexFilter(_RegexFilter): + """Row filter for a family name regular expression. + + The ``regex`` must be valid RE2 patterns. See Google's + `RE2 reference`_ for the accepted syntax. + + .. _RE2 reference: https://github.com/google/re2/wiki/Syntax + + :type regex: str + :param regex: A regular expression (RE2) to match cells from columns in a + given column family. For technical reasons, the regex must + not contain the ``':'`` character, even if it is not being + used as a literal. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(family_name_regex_filter=self.regex) + + +class ColumnQualifierRegexFilter(_RegexFilter): + """Row filter for a column qualifier regular expression. + + The ``regex`` must be valid RE2 patterns. See Google's + `RE2 reference`_ for the accepted syntax. + + .. _RE2 reference: https://github.com/google/re2/wiki/Syntax + + .. note:: + + Special care need be used with the expression used. Since + each of these properties can contain arbitrary bytes, the ``\\C`` + escape sequence must be used if a true wildcard is desired. The ``.`` + character will not match the new line character ``\\n``, which may be + present in a binary value. + + :type regex: bytes + :param regex: A regular expression (RE2) to match cells from column that + match this regex (irrespective of column family). + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(column_qualifier_regex_filter=self.regex) + + +class TimestampRange(object): + """Range of time with inclusive lower and exclusive upper bounds. + + :type start: :class:`datetime.datetime` + :param start: (Optional) The (inclusive) lower bound of the timestamp + range. If omitted, defaults to Unix epoch. + + :type end: :class:`datetime.datetime` + :param end: (Optional) The (exclusive) upper bound of the timestamp + range. If omitted, no upper bound is used. + """ + + def __init__(self, start=None, end=None): + self.start = start + self.end = end + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.start == self.start and + other.end == self.end) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_pb(self): + """Converts the :class:`TimestampRange` to a protobuf. + + :rtype: :class:`.data_v2_pb2.TimestampRange` + :returns: The converted current object. + """ + timestamp_range_kwargs = {} + if self.start is not None: + timestamp_range_kwargs['start_timestamp_micros'] = ( + _microseconds_from_datetime(self.start)) + if self.end is not None: + timestamp_range_kwargs['end_timestamp_micros'] = ( + _microseconds_from_datetime(self.end)) + return data_v2_pb2.TimestampRange(**timestamp_range_kwargs) + + +class TimestampRangeFilter(RowFilter): + """Row filter that limits cells to a range of time. + + :type range_: :class:`TimestampRange` + :param range_: Range of time that cells should match against. + """ + + def __init__(self, range_): + self.range_ = range_ + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.range_ == self.range_ + + def to_pb(self): + """Converts the row filter to a protobuf. + + First converts the ``range_`` on the current object to a protobuf and + then uses it in the ``timestamp_range_filter`` field. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter( + timestamp_range_filter=self.range_.to_pb()) + + +class ColumnRangeFilter(RowFilter): + """A row filter to restrict to a range of columns. + + Both the start and end column can be included or excluded in the range. + By default, we include them both, but this can be changed with optional + flags. + + :type column_family_id: str + :param column_family_id: The column family that contains the columns. Must + be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type start_column: bytes + :param start_column: The start of the range of columns. If no value is + used, the backend applies no upper bound to the + values. + + :type end_column: bytes + :param end_column: The end of the range of columns. If no value is used, + the backend applies no upper bound to the values. + + :type inclusive_start: bool + :param inclusive_start: Boolean indicating if the start column should be + included in the range (or excluded). Defaults + to :data:`True` if ``start_column`` is passed and + no ``inclusive_start`` was given. + + :type inclusive_end: bool + :param inclusive_end: Boolean indicating if the end column should be + included in the range (or excluded). Defaults + to :data:`True` if ``end_column`` is passed and + no ``inclusive_end`` was given. + + :raises: :class:`ValueError ` if ``inclusive_start`` + is set but no ``start_column`` is given or if ``inclusive_end`` + is set but no ``end_column`` is given + """ + + def __init__(self, column_family_id, start_column=None, end_column=None, + inclusive_start=None, inclusive_end=None): + self.column_family_id = column_family_id + + if inclusive_start is None: + inclusive_start = True + elif start_column is None: + raise ValueError('Inclusive start was specified but no ' + 'start column was given.') + self.start_column = start_column + self.inclusive_start = inclusive_start + + if inclusive_end is None: + inclusive_end = True + elif end_column is None: + raise ValueError('Inclusive end was specified but no ' + 'end column was given.') + self.end_column = end_column + self.inclusive_end = inclusive_end + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.column_family_id == self.column_family_id and + other.start_column == self.start_column and + other.end_column == self.end_column and + other.inclusive_start == self.inclusive_start and + other.inclusive_end == self.inclusive_end) + + def to_pb(self): + """Converts the row filter to a protobuf. + + First converts to a :class:`.data_v2_pb2.ColumnRange` and then uses it + in the ``column_range_filter`` field. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + column_range_kwargs = {'family_name': self.column_family_id} + if self.start_column is not None: + if self.inclusive_start: + key = 'start_qualifier_closed' + else: + key = 'start_qualifier_open' + column_range_kwargs[key] = _to_bytes(self.start_column) + if self.end_column is not None: + if self.inclusive_end: + key = 'end_qualifier_closed' + else: + key = 'end_qualifier_open' + column_range_kwargs[key] = _to_bytes(self.end_column) + + column_range = data_v2_pb2.ColumnRange(**column_range_kwargs) + return data_v2_pb2.RowFilter(column_range_filter=column_range) + + +class ValueRegexFilter(_RegexFilter): + """Row filter for a value regular expression. + + The ``regex`` must be valid RE2 patterns. See Google's + `RE2 reference`_ for the accepted syntax. + + .. _RE2 reference: https://github.com/google/re2/wiki/Syntax + + .. note:: + + Special care need be used with the expression used. Since + each of these properties can contain arbitrary bytes, the ``\\C`` + escape sequence must be used if a true wildcard is desired. The ``.`` + character will not match the new line character ``\\n``, which may be + present in a binary value. + + :type regex: bytes + :param regex: A regular expression (RE2) to match cells with values that + match this regex. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(value_regex_filter=self.regex) + + +class ValueRangeFilter(RowFilter): + """A range of values to restrict to in a row filter. + + Will only match cells that have values in this range. + + Both the start and end value can be included or excluded in the range. + By default, we include them both, but this can be changed with optional + flags. + + :type start_value: bytes + :param start_value: The start of the range of values. If no value is used, + the backend applies no lower bound to the values. + + :type end_value: bytes + :param end_value: The end of the range of values. If no value is used, + the backend applies no upper bound to the values. + + :type inclusive_start: bool + :param inclusive_start: Boolean indicating if the start value should be + included in the range (or excluded). Defaults + to :data:`True` if ``start_value`` is passed and + no ``inclusive_start`` was given. + + :type inclusive_end: bool + :param inclusive_end: Boolean indicating if the end value should be + included in the range (or excluded). Defaults + to :data:`True` if ``end_value`` is passed and + no ``inclusive_end`` was given. + + :raises: :class:`ValueError ` if ``inclusive_start`` + is set but no ``start_value`` is given or if ``inclusive_end`` + is set but no ``end_value`` is given + """ + + def __init__(self, start_value=None, end_value=None, + inclusive_start=None, inclusive_end=None): + if inclusive_start is None: + inclusive_start = True + elif start_value is None: + raise ValueError('Inclusive start was specified but no ' + 'start value was given.') + self.start_value = start_value + self.inclusive_start = inclusive_start + + if inclusive_end is None: + inclusive_end = True + elif end_value is None: + raise ValueError('Inclusive end was specified but no ' + 'end value was given.') + self.end_value = end_value + self.inclusive_end = inclusive_end + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.start_value == self.start_value and + other.end_value == self.end_value and + other.inclusive_start == self.inclusive_start and + other.inclusive_end == self.inclusive_end) + + def to_pb(self): + """Converts the row filter to a protobuf. + + First converts to a :class:`.data_v2_pb2.ValueRange` and then uses + it to create a row filter protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + value_range_kwargs = {} + if self.start_value is not None: + if self.inclusive_start: + key = 'start_value_closed' + else: + key = 'start_value_open' + value_range_kwargs[key] = _to_bytes(self.start_value) + if self.end_value is not None: + if self.inclusive_end: + key = 'end_value_closed' + else: + key = 'end_value_open' + value_range_kwargs[key] = _to_bytes(self.end_value) + + value_range = data_v2_pb2.ValueRange(**value_range_kwargs) + return data_v2_pb2.RowFilter(value_range_filter=value_range) + + +class _CellCountFilter(RowFilter): + """Row filter that uses an integer count of cells. + + The cell count is used as an offset or a limit for the number + of results returned. + + :type num_cells: int + :param num_cells: An integer count / offset / limit. + """ + + def __init__(self, num_cells): + self.num_cells = num_cells + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.num_cells == self.num_cells + + +class CellsRowOffsetFilter(_CellCountFilter): + """Row filter to skip cells in a row. + + :type num_cells: int + :param num_cells: Skips the first N cells of the row. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter( + cells_per_row_offset_filter=self.num_cells) + + +class CellsRowLimitFilter(_CellCountFilter): + """Row filter to limit cells in a row. + + :type num_cells: int + :param num_cells: Matches only the first N cells of the row. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(cells_per_row_limit_filter=self.num_cells) + + +class CellsColumnLimitFilter(_CellCountFilter): + """Row filter to limit cells in a column. + + :type num_cells: int + :param num_cells: Matches only the most recent N cells within each column. + This filters a (family name, column) pair, based on + timestamps of each cell. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter( + cells_per_column_limit_filter=self.num_cells) + + +class StripValueTransformerFilter(_BoolFilter): + """Row filter that transforms cells into empty string (0 bytes). + + :type flag: bool + :param flag: If :data:`True`, replaces each cell's value with the empty + string. As the name indicates, this is more useful as a + transformer than a generic query / filter. + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(strip_value_transformer=self.flag) + + +class ApplyLabelFilter(RowFilter): + """Filter to apply labels to cells. + + Intended to be used as an intermediate filter on a pre-existing filtered + result set. This way if two sets are combined, the label can tell where + the cell(s) originated.This allows the client to determine which results + were produced from which part of the filter. + + .. note:: + + Due to a technical limitation of the backend, it is not currently + possible to apply multiple labels to a cell. + + :type label: str + :param label: Label to apply to cells in the output row. Values must be + at most 15 characters long, and match the pattern + ``[a-z0-9\\-]+``. + """ + + def __init__(self, label): + self.label = label + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.label == self.label + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + return data_v2_pb2.RowFilter(apply_label_transformer=self.label) + + +class _FilterCombination(RowFilter): + """Chain of row filters. + + Sends rows through several filters in sequence. The filters are "chained" + together to process a row. After the first filter is applied, the second + is applied to the filtered output and so on for subsequent filters. + + :type filters: list + :param filters: List of :class:`RowFilter` + """ + + def __init__(self, filters=None): + if filters is None: + filters = [] + self.filters = filters + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return other.filters == self.filters + + +class RowFilterChain(_FilterCombination): + """Chain of row filters. + + Sends rows through several filters in sequence. The filters are "chained" + together to process a row. After the first filter is applied, the second + is applied to the filtered output and so on for subsequent filters. + + :type filters: list + :param filters: List of :class:`RowFilter` + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + chain = data_v2_pb2.RowFilter.Chain( + filters=[row_filter.to_pb() for row_filter in self.filters]) + return data_v2_pb2.RowFilter(chain=chain) + + +class RowFilterUnion(_FilterCombination): + """Union of row filters. + + Sends rows through several filters simultaneously, then + merges / interleaves all the filtered results together. + + If multiple cells are produced with the same column and timestamp, + they will all appear in the output row in an unspecified mutual order. + + :type filters: list + :param filters: List of :class:`RowFilter` + """ + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + interleave = data_v2_pb2.RowFilter.Interleave( + filters=[row_filter.to_pb() for row_filter in self.filters]) + return data_v2_pb2.RowFilter(interleave=interleave) + + +class ConditionalRowFilter(RowFilter): + """Conditional row filter which exhibits ternary behavior. + + Executes one of two filters based on another filter. If the ``base_filter`` + returns any cells in the row, then ``true_filter`` is executed. If not, + then ``false_filter`` is executed. + + .. note:: + + The ``base_filter`` does not execute atomically with the true and false + filters, which may lead to inconsistent or unexpected results. + + Additionally, executing a :class:`ConditionalRowFilter` has poor + performance on the server, especially when ``false_filter`` is set. + + :type base_filter: :class:`RowFilter` + :param base_filter: The filter to condition on before executing the + true/false filters. + + :type true_filter: :class:`RowFilter` + :param true_filter: (Optional) The filter to execute if there are any cells + matching ``base_filter``. If not provided, no results + will be returned in the true case. + + :type false_filter: :class:`RowFilter` + :param false_filter: (Optional) The filter to execute if there are no cells + matching ``base_filter``. If not provided, no results + will be returned in the false case. + """ + + def __init__(self, base_filter, true_filter=None, false_filter=None): + self.base_filter = base_filter + self.true_filter = true_filter + self.false_filter = false_filter + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.base_filter == self.base_filter and + other.true_filter == self.true_filter and + other.false_filter == self.false_filter) + + def to_pb(self): + """Converts the row filter to a protobuf. + + :rtype: :class:`.data_v2_pb2.RowFilter` + :returns: The converted current object. + """ + condition_kwargs = {'predicate_filter': self.base_filter.to_pb()} + if self.true_filter is not None: + condition_kwargs['true_filter'] = self.true_filter.to_pb() + if self.false_filter is not None: + condition_kwargs['false_filter'] = self.false_filter.to_pb() + condition = data_v2_pb2.RowFilter.Condition(**condition_kwargs) + return data_v2_pb2.RowFilter(condition=condition) diff --git a/env/Lib/site-packages/gcloud/bigtable/table.py b/env/Lib/site-packages/gcloud/bigtable/table.py new file mode 100644 index 0000000..3eef6fe --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/table.py @@ -0,0 +1,379 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""User friendly container for Google Cloud Bigtable Table.""" + +from gcloud._helpers import _to_bytes +from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as data_messages_v2_pb2) +from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_messages_v2_pb2) +from gcloud.bigtable.column_family import _gc_rule_from_pb +from gcloud.bigtable.column_family import ColumnFamily +from gcloud.bigtable.row import AppendRow +from gcloud.bigtable.row import ConditionalRow +from gcloud.bigtable.row import DirectRow +from gcloud.bigtable.row_data import PartialRowsData + + +class Table(object): + """Representation of a Google Cloud Bigtable Table. + + .. note:: + + We don't define any properties on a table other than the name. As + the proto says, in a request: + + The ``name`` field of the Table and all of its ColumnFamilies must + be left blank, and will be populated in the response. + + This leaves only the ``current_operation`` and ``granularity`` + fields. The ``current_operation`` is only used for responses while + ``granularity`` is an enum with only one value. + + We can use a :class:`Table` to: + + * :meth:`create` the table + * :meth:`rename` the table + * :meth:`delete` the table + * :meth:`list_column_families` in the table + + :type table_id: str + :param table_id: The ID of the table. + + :type instance: :class:`Cluster <.instance.Instance>` + :param instance: The instance that owns the table. + """ + + def __init__(self, table_id, instance): + self.table_id = table_id + self._instance = instance + + @property + def name(self): + """Table name used in requests. + + .. note:: + + This property will not change if ``table_id`` does not, but the + return value is not cached. + + The table name is of the form + + ``"projects/../zones/../clusters/../tables/{table_id}"`` + + :rtype: str + :returns: The table name. + """ + return self._instance.name + '/tables/' + self.table_id + + def column_family(self, column_family_id, gc_rule=None): + """Factory to create a column family associated with this table. + + :type column_family_id: str + :param column_family_id: The ID of the column family. Must be of the + form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. + + :type gc_rule: :class:`.GarbageCollectionRule` + :param gc_rule: (Optional) The garbage collection settings for this + column family. + + :rtype: :class:`.ColumnFamily` + :returns: A column family owned by this table. + """ + return ColumnFamily(column_family_id, self, gc_rule=gc_rule) + + def row(self, row_key, filter_=None, append=False): + """Factory to create a row associated with this table. + + .. warning:: + + At most one of ``filter_`` and ``append`` can be used in a + :class:`Row`. + + :type row_key: bytes + :param row_key: The key for the row being created. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) Filter to be used for conditional mutations. + See :class:`.DirectRow` for more details. + + :type append: bool + :param append: (Optional) Flag to determine if the row should be used + for append mutations. + + :rtype: :class:`.DirectRow` + :returns: A row owned by this table. + :raises: :class:`ValueError ` if both + ``filter_`` and ``append`` are used. + """ + if append and filter_ is not None: + raise ValueError('At most one of filter_ and append can be set') + if append: + return AppendRow(row_key, self) + elif filter_ is not None: + return ConditionalRow(row_key, self, filter_=filter_) + else: + return DirectRow(row_key, self) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return (other.table_id == self.table_id and + other._instance == self._instance) + + def __ne__(self, other): + return not self.__eq__(other) + + def create(self, initial_split_keys=None): + """Creates this table. + + .. note:: + + Though a :class:`._generated_v2.table_pb2.Table` is also + allowed (as the ``table`` property) in a create table request, we + do not support it in this method. As mentioned in the + :class:`Table` docstring, the name is the only useful property in + the table proto. + + .. note:: + + A create request returns a + :class:`._generated_v2.table_pb2.Table` but we don't use + this response. The proto definition allows for the inclusion of a + ``current_operation`` in the response, but it does not appear that + the Cloud Bigtable API returns any operation. + + :type initial_split_keys: list + :param initial_split_keys: (Optional) List of row keys that will be + used to initially split the table into + several tablets (Tablets are similar to + HBase regions). Given two split keys, + ``"s1"`` and ``"s2"``, three tablets will be + created, spanning the key ranges: + ``[, s1)``, ``[s1, s2)``, ``[s2, )``. + """ + split_pb = table_admin_messages_v2_pb2.CreateTableRequest.Split + if initial_split_keys is not None: + initial_split_keys = [ + split_pb(key=key) for key in initial_split_keys] + request_pb = table_admin_messages_v2_pb2.CreateTableRequest( + initial_splits=initial_split_keys or [], + parent=self._instance.name, + table_id=self.table_id, + ) + client = self._instance._client + # We expect a `._generated_v2.table_pb2.Table` + client._table_stub.CreateTable(request_pb, client.timeout_seconds) + + def delete(self): + """Delete this table.""" + request_pb = table_admin_messages_v2_pb2.DeleteTableRequest( + name=self.name) + client = self._instance._client + # We expect a `google.protobuf.empty_pb2.Empty` + client._table_stub.DeleteTable(request_pb, client.timeout_seconds) + + def list_column_families(self): + """List the column families owned by this table. + + :rtype: dict + :returns: Dictionary of column families attached to this table. Keys + are strings (column family names) and values are + :class:`.ColumnFamily` instances. + :raises: :class:`ValueError ` if the column + family name from the response does not agree with the computed + name from the column family ID. + """ + request_pb = table_admin_messages_v2_pb2.GetTableRequest( + name=self.name) + client = self._instance._client + # We expect a `._generated_v2.table_pb2.Table` + table_pb = client._table_stub.GetTable(request_pb, + client.timeout_seconds) + + result = {} + for column_family_id, value_pb in table_pb.column_families.items(): + gc_rule = _gc_rule_from_pb(value_pb.gc_rule) + column_family = self.column_family(column_family_id, + gc_rule=gc_rule) + result[column_family_id] = column_family + return result + + def read_row(self, row_key, filter_=None): + """Read a single row from this table. + + :type row_key: bytes + :param row_key: The key of the row to read from. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + row. If unset, returns the entire row. + + :rtype: :class:`.PartialRowData`, :data:`NoneType ` + :returns: The contents of the row if any chunks were returned in + the response, otherwise :data:`None`. + :raises: :class:`ValueError ` if a commit row + chunk is never encountered. + """ + request_pb = _create_row_request(self.name, row_key=row_key, + filter_=filter_) + client = self._instance._client + response_iterator = client._data_stub.ReadRows(request_pb, + client.timeout_seconds) + rows_data = PartialRowsData(response_iterator) + rows_data.consume_all() + if rows_data.state not in (rows_data.NEW_ROW, rows_data.START): + raise ValueError('The row remains partial / is not committed.') + + if len(rows_data.rows) == 0: + return None + + return rows_data.rows[row_key] + + def read_rows(self, start_key=None, end_key=None, limit=None, + filter_=None): + """Read rows from this table. + + :type start_key: bytes + :param start_key: (Optional) The beginning of a range of row keys to + read from. The range will include ``start_key``. If + left empty, will be interpreted as the empty string. + + :type end_key: bytes + :param end_key: (Optional) The end of a range of row keys to read from. + The range will not include ``end_key``. If left empty, + will be interpreted as an infinite string. + + :type limit: int + :param limit: (Optional) The read will terminate after committing to N + rows' worth of results. The default (zero) is to return + all results. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + specified row(s). If unset, reads every column in + each row. + + :rtype: :class:`.PartialRowsData` + :returns: A :class:`.PartialRowsData` convenience wrapper for consuming + the streamed results. + """ + request_pb = _create_row_request( + self.name, start_key=start_key, end_key=end_key, filter_=filter_, + limit=limit) + client = self._instance._client + response_iterator = client._data_stub.ReadRows(request_pb, + client.timeout_seconds) + # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse` + return PartialRowsData(response_iterator) + + def sample_row_keys(self): + """Read a sample of row keys in the table. + + The returned row keys will delimit contiguous sections of the table of + approximately equal size, which can be used to break up the data for + distributed tasks like mapreduces. + + The elements in the iterator are a SampleRowKeys response and they have + the properties ``offset_bytes`` and ``row_key``. They occur in sorted + order. The table might have contents before the first row key in the + list and after the last one, but a key containing the empty string + indicates "end of table" and will be the last response given, if + present. + + .. note:: + + Row keys in this list may not have ever been written to or read + from, and users should therefore not make any assumptions about the + row key structure that are specific to their use case. + + The ``offset_bytes`` field on a response indicates the approximate + total storage space used by all rows in the table which precede + ``row_key``. Buffering the contents of all rows between two subsequent + samples would require space roughly equal to the difference in their + ``offset_bytes`` fields. + + :rtype: :class:`grpc.framework.alpha._reexport._CancellableIterator` + :returns: A cancel-able iterator. Can be consumed by calling ``next()`` + or by casting to a :class:`list` and can be cancelled by + calling ``cancel()``. + """ + request_pb = data_messages_v2_pb2.SampleRowKeysRequest( + table_name=self.name) + client = self._instance._client + response_iterator = client._data_stub.SampleRowKeys( + request_pb, client.timeout_seconds) + return response_iterator + + +def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, + filter_=None, limit=None): + """Creates a request to read rows in a table. + + :type table_name: str + :param table_name: The name of the table to read from. + + :type row_key: bytes + :param row_key: (Optional) The key of a specific row to read from. + + :type start_key: bytes + :param start_key: (Optional) The beginning of a range of row keys to + read from. The range will include ``start_key``. If + left empty, will be interpreted as the empty string. + + :type end_key: bytes + :param end_key: (Optional) The end of a range of row keys to read from. + The range will not include ``end_key``. If left empty, + will be interpreted as an infinite string. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + specified row(s). If unset, reads the entire table. + + :type limit: int + :param limit: (Optional) The read will terminate after committing to N + rows' worth of results. The default (zero) is to return + all results. + + :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` + :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. + :raises: :class:`ValueError ` if both + ``row_key`` and one of ``start_key`` and ``end_key`` are set + """ + request_kwargs = {'table_name': table_name} + if (row_key is not None and + (start_key is not None or end_key is not None)): + raise ValueError('Row key and row range cannot be ' + 'set simultaneously') + range_kwargs = {} + if start_key is not None or end_key is not None: + if start_key is not None: + range_kwargs['start_key_closed'] = _to_bytes(start_key) + if end_key is not None: + range_kwargs['end_key_open'] = _to_bytes(end_key) + if filter_ is not None: + request_kwargs['filter'] = filter_.to_pb() + if limit is not None: + request_kwargs['rows_limit'] = limit + + message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) + + if row_key is not None: + message.rows.row_keys.append(_to_bytes(row_key)) + + if range_kwargs: + message.rows.row_ranges.add(**range_kwargs) + + return message diff --git a/env/Lib/site-packages/gcloud/bigtable/test_client.py b/env/Lib/site-packages/gcloud/bigtable/test_client.py new file mode 100644 index 0000000..435798e --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_client.py @@ -0,0 +1,784 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class TestClient(unittest2.TestCase): + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + DISPLAY_NAME = 'display-name' + TIMEOUT_SECONDS = 80 + USER_AGENT = 'you-sir-age-int' + + def _getTargetClass(self): + from gcloud.bigtable.client import Client + return Client + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _constructor_test_helper(self, expected_scopes, creds, + read_only=False, admin=False, + user_agent=None, timeout_seconds=None, + expected_creds=None): + from gcloud.bigtable import client as MUT + + user_agent = user_agent or MUT.DEFAULT_USER_AGENT + timeout_seconds = timeout_seconds or MUT.DEFAULT_TIMEOUT_SECONDS + client = self._makeOne(project=self.PROJECT, credentials=creds, + read_only=read_only, admin=admin, + user_agent=user_agent, + timeout_seconds=timeout_seconds) + + expected_creds = expected_creds or creds + self.assertTrue(client._credentials is expected_creds) + if expected_scopes is not None: + self.assertEqual(client._credentials.scopes, expected_scopes) + + self.assertEqual(client.project, self.PROJECT) + self.assertEqual(client.timeout_seconds, timeout_seconds) + self.assertEqual(client.user_agent, user_agent) + # Check stubs are set (but null) + self.assertEqual(client._data_stub_internal, None) + self.assertEqual(client._instance_stub_internal, None) + self.assertEqual(client._operations_stub_internal, None) + self.assertEqual(client._table_stub_internal, None) + + def test_constructor_default_scopes(self): + from gcloud.bigtable import client as MUT + + expected_scopes = [MUT.DATA_SCOPE] + creds = _Credentials() + self._constructor_test_helper(expected_scopes, creds) + + def test_constructor_custom_user_agent_and_timeout(self): + from gcloud.bigtable import client as MUT + + CUSTOM_TIMEOUT_SECONDS = 1337 + CUSTOM_USER_AGENT = 'custom-application' + expected_scopes = [MUT.DATA_SCOPE] + creds = _Credentials() + self._constructor_test_helper(expected_scopes, creds, + user_agent=CUSTOM_USER_AGENT, + timeout_seconds=CUSTOM_TIMEOUT_SECONDS) + + def test_constructor_with_admin(self): + from gcloud.bigtable import client as MUT + + expected_scopes = [MUT.DATA_SCOPE, MUT.ADMIN_SCOPE] + creds = _Credentials() + self._constructor_test_helper(expected_scopes, creds, admin=True) + + def test_constructor_with_read_only(self): + from gcloud.bigtable import client as MUT + + expected_scopes = [MUT.READ_ONLY_SCOPE] + creds = _Credentials() + self._constructor_test_helper(expected_scopes, creds, read_only=True) + + def test_constructor_both_admin_and_read_only(self): + creds = _Credentials() + with self.assertRaises(ValueError): + self._constructor_test_helper([], creds, admin=True, + read_only=True) + + def test_constructor_implicit_credentials(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + + creds = _Credentials() + expected_scopes = [MUT.DATA_SCOPE] + + def mock_get_credentials(): + return creds + + with _Monkey(MUT, get_credentials=mock_get_credentials): + self._constructor_test_helper(expected_scopes, None, + expected_creds=creds) + + def test_constructor_credentials_wo_create_scoped(self): + creds = object() + expected_scopes = None + self._constructor_test_helper(expected_scopes, creds) + + def _context_manager_helper(self): + credentials = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=credentials) + + def mock_start(): + client._data_stub_internal = object() + client.start = mock_start + + def mock_stop(): + client._data_stub_internal = None + client.stop = mock_stop + return client + + def test_context_manager(self): + client = self._context_manager_helper() + self.assertFalse(client.is_started()) + with client: + self.assertTrue(client.is_started()) + self.assertFalse(client.is_started()) + + def test_context_manager_as_keyword(self): + with self._context_manager_helper() as client: + self.assertIsNotNone(client) + + def test_context_manager_with_exception(self): + client = self._context_manager_helper() + self.assertFalse(client.is_started()) + + class DummyException(Exception): + pass + try: + with client: + self.assertTrue(client.is_started()) + raise DummyException() + except DummyException: + pass + self.assertFalse(client.is_started()) + + def _copy_test_helper(self, read_only=False, admin=False): + credentials = _Credentials('value') + client = self._makeOne( + project=self.PROJECT, + credentials=credentials, + read_only=read_only, + admin=admin, + timeout_seconds=self.TIMEOUT_SECONDS, + user_agent=self.USER_AGENT) + # Put some fake stubs in place so that we can verify they + # don't get copied. + client._data_stub_internal = object() + client._instance_stub_internal = object() + client._operations_stub_internal = object() + client._table_stub_internal = object() + + new_client = client.copy() + self.assertEqual(new_client._admin, client._admin) + self.assertEqual(new_client._credentials, client._credentials) + self.assertEqual(new_client.project, client.project) + self.assertEqual(new_client.user_agent, client.user_agent) + self.assertEqual(new_client.timeout_seconds, client.timeout_seconds) + # Make sure stubs are not preserved. + self.assertEqual(new_client._data_stub_internal, None) + self.assertEqual(new_client._instance_stub_internal, None) + self.assertEqual(new_client._operations_stub_internal, None) + self.assertEqual(new_client._table_stub_internal, None) + + def test_copy(self): + self._copy_test_helper() + + def test_copy_admin(self): + self._copy_test_helper(admin=True) + + def test_copy_read_only(self): + self._copy_test_helper(read_only=True) + + def test_credentials_getter(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + self.assertTrue(client.credentials is credentials) + + def test_project_name_property(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + project_name = 'projects/' + project + self.assertEqual(client.project_name, project_name) + + def test_data_stub_getter(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + client._data_stub_internal = object() + self.assertTrue(client._data_stub is client._data_stub_internal) + + def test_data_stub_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + with self.assertRaises(ValueError): + getattr(client, '_data_stub') + + def test_instance_stub_getter(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + client._instance_stub_internal = object() + self.assertTrue( + client._instance_stub is client._instance_stub_internal) + + def test_instance_stub_non_admin_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=False) + with self.assertRaises(ValueError): + getattr(client, '_instance_stub') + + def test_instance_stub_unset_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + with self.assertRaises(ValueError): + getattr(client, '_instance_stub') + + def test_operations_stub_getter(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + client._operations_stub_internal = object() + self.assertTrue(client._operations_stub is + client._operations_stub_internal) + + def test_operations_stub_non_admin_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=False) + with self.assertRaises(ValueError): + getattr(client, '_operations_stub') + + def test_operations_stub_unset_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + with self.assertRaises(ValueError): + getattr(client, '_operations_stub') + + def test_table_stub_getter(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + client._table_stub_internal = object() + self.assertTrue(client._table_stub is client._table_stub_internal) + + def test_table_stub_non_admin_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=False) + with self.assertRaises(ValueError): + getattr(client, '_table_stub') + + def test_table_stub_unset_failure(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=True) + with self.assertRaises(ValueError): + getattr(client, '_table_stub') + + def test__make_data_stub(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + from gcloud.bigtable.client import DATA_API_HOST_V2 + from gcloud.bigtable.client import DATA_API_PORT_V2 + from gcloud.bigtable.client import DATA_STUB_FACTORY_V2 + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + + fake_stub = object() + make_stub_args = [] + + def mock_make_stub(*args): + make_stub_args.append(args) + return fake_stub + + with _Monkey(MUT, _make_stub=mock_make_stub): + result = client._make_data_stub() + + self.assertTrue(result is fake_stub) + self.assertEqual(make_stub_args, [ + ( + client, + DATA_STUB_FACTORY_V2, + DATA_API_HOST_V2, + DATA_API_PORT_V2, + ), + ]) + + def test__make_instance_stub(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + from gcloud.bigtable.client import INSTANCE_ADMIN_HOST_V2 + from gcloud.bigtable.client import INSTANCE_ADMIN_PORT_V2 + from gcloud.bigtable.client import INSTANCE_STUB_FACTORY_V2 + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + + fake_stub = object() + make_stub_args = [] + + def mock_make_stub(*args): + make_stub_args.append(args) + return fake_stub + + with _Monkey(MUT, _make_stub=mock_make_stub): + result = client._make_instance_stub() + + self.assertTrue(result is fake_stub) + self.assertEqual(make_stub_args, [ + ( + client, + INSTANCE_STUB_FACTORY_V2, + INSTANCE_ADMIN_HOST_V2, + INSTANCE_ADMIN_PORT_V2, + ), + ]) + + def test__make_operations_stub(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + from gcloud.bigtable.client import OPERATIONS_API_HOST_V2 + from gcloud.bigtable.client import OPERATIONS_API_PORT_V2 + from gcloud.bigtable.client import OPERATIONS_STUB_FACTORY_V2 + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + + fake_stub = object() + make_stub_args = [] + + def mock_make_stub(*args): + make_stub_args.append(args) + return fake_stub + + with _Monkey(MUT, _make_stub=mock_make_stub): + result = client._make_operations_stub() + + self.assertTrue(result is fake_stub) + self.assertEqual(make_stub_args, [ + ( + client, + OPERATIONS_STUB_FACTORY_V2, + OPERATIONS_API_HOST_V2, + OPERATIONS_API_PORT_V2, + ), + ]) + + def test__make_table_stub(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + from gcloud.bigtable.client import TABLE_ADMIN_HOST_V2 + from gcloud.bigtable.client import TABLE_ADMIN_PORT_V2 + from gcloud.bigtable.client import TABLE_STUB_FACTORY_V2 + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + + fake_stub = object() + make_stub_args = [] + + def mock_make_stub(*args): + make_stub_args.append(args) + return fake_stub + + with _Monkey(MUT, _make_stub=mock_make_stub): + result = client._make_table_stub() + + self.assertTrue(result is fake_stub) + self.assertEqual(make_stub_args, [ + ( + client, + TABLE_STUB_FACTORY_V2, + TABLE_ADMIN_HOST_V2, + TABLE_ADMIN_PORT_V2, + ), + ]) + + def test_is_started(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + + self.assertFalse(client.is_started()) + client._data_stub_internal = object() + self.assertTrue(client.is_started()) + client._data_stub_internal = None + self.assertFalse(client.is_started()) + + def _start_method_helper(self, admin): + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import client as MUT + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=admin) + + stub = _FakeStub() + make_stub_args = [] + + def mock_make_stub(*args): + make_stub_args.append(args) + return stub + + with _Monkey(MUT, _make_stub=mock_make_stub): + client.start() + + self.assertTrue(client._data_stub_internal is stub) + if admin: + self.assertTrue(client._instance_stub_internal is stub) + self.assertTrue(client._operations_stub_internal is stub) + self.assertTrue(client._table_stub_internal is stub) + self.assertEqual(stub._entered, 4) + self.assertEqual(len(make_stub_args), 4) + else: + self.assertTrue(client._instance_stub_internal is None) + self.assertTrue(client._operations_stub_internal is None) + self.assertTrue(client._table_stub_internal is None) + self.assertEqual(stub._entered, 1) + self.assertEqual(len(make_stub_args), 1) + self.assertEqual(stub._exited, []) + + def test_start_non_admin(self): + self._start_method_helper(admin=False) + + def test_start_with_admin(self): + self._start_method_helper(admin=True) + + def test_start_while_started(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + client._data_stub_internal = data_stub = object() + self.assertTrue(client.is_started()) + client.start() + + # Make sure the stub did not change. + self.assertEqual(client._data_stub_internal, data_stub) + + def _stop_method_helper(self, admin): + from gcloud.bigtable._testing import _FakeStub + + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials, + admin=admin) + + stub1 = _FakeStub() + stub2 = _FakeStub() + client._data_stub_internal = stub1 + client._instance_stub_internal = stub2 + client._operations_stub_internal = stub2 + client._table_stub_internal = stub2 + client.stop() + self.assertTrue(client._data_stub_internal is None) + self.assertTrue(client._instance_stub_internal is None) + self.assertTrue(client._operations_stub_internal is None) + self.assertTrue(client._table_stub_internal is None) + self.assertEqual(stub1._entered, 0) + self.assertEqual(stub2._entered, 0) + exc_none_triple = (None, None, None) + self.assertEqual(stub1._exited, [exc_none_triple]) + if admin: + self.assertEqual(stub2._exited, [exc_none_triple] * 3) + else: + self.assertEqual(stub2._exited, []) + + def test_stop_non_admin(self): + self._stop_method_helper(admin=False) + + def test_stop_with_admin(self): + self._stop_method_helper(admin=True) + + def test_stop_while_stopped(self): + credentials = _Credentials() + project = 'PROJECT' + client = self._makeOne(project=project, credentials=credentials) + self.assertFalse(client.is_started()) + + # This is a bit hacky. We set the cluster stub protected value + # since it isn't used in is_started() and make sure that stop + # doesn't reset this value to None. + client._instance_stub_internal = instance_stub = object() + client.stop() + # Make sure the cluster stub did not change. + self.assertEqual(client._instance_stub_internal, instance_stub) + + def test_instance_factory_defaults(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + from gcloud.bigtable.instance import Instance + from gcloud.bigtable.instance import _EXISTING_INSTANCE_LOCATION_ID + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + DISPLAY_NAME = 'display-name' + credentials = _Credentials() + client = self._makeOne(project=PROJECT, credentials=credentials) + + instance = client.instance(INSTANCE_ID, display_name=DISPLAY_NAME) + + self.assertTrue(isinstance(instance, Instance)) + self.assertEqual(instance.instance_id, INSTANCE_ID) + self.assertEqual(instance.display_name, DISPLAY_NAME) + self.assertEqual(instance._cluster_location_id, + _EXISTING_INSTANCE_LOCATION_ID) + self.assertEqual(instance._cluster_serve_nodes, DEFAULT_SERVE_NODES) + self.assertTrue(instance._client is client) + + def test_instance_factory_w_explicit_serve_nodes(self): + from gcloud.bigtable.instance import Instance + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + DISPLAY_NAME = 'display-name' + LOCATION_ID = 'locname' + SERVE_NODES = 5 + credentials = _Credentials() + client = self._makeOne(project=PROJECT, credentials=credentials) + + instance = client.instance( + INSTANCE_ID, display_name=DISPLAY_NAME, + location=LOCATION_ID, serve_nodes=SERVE_NODES) + + self.assertTrue(isinstance(instance, Instance)) + self.assertEqual(instance.instance_id, INSTANCE_ID) + self.assertEqual(instance.display_name, DISPLAY_NAME) + self.assertEqual(instance._cluster_location_id, LOCATION_ID) + self.assertEqual(instance._cluster_serve_nodes, SERVE_NODES) + self.assertTrue(instance._client is client) + + def test_list_instances(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + from gcloud.bigtable._testing import _FakeStub + + LOCATION = 'projects/' + self.PROJECT + '/locations/locname' + FAILED_LOCATION = 'FAILED' + INSTANCE_ID1 = 'instance-id1' + INSTANCE_ID2 = 'instance-id2' + INSTANCE_NAME1 = ( + 'projects/' + self.PROJECT + '/instances/' + INSTANCE_ID1) + INSTANCE_NAME2 = ( + 'projects/' + self.PROJECT + '/instances/' + INSTANCE_ID2) + + credentials = _Credentials() + client = self._makeOne( + project=self.PROJECT, + credentials=credentials, + admin=True, + timeout_seconds=self.TIMEOUT_SECONDS, + ) + + # Create request_pb + request_pb = messages_v2_pb2.ListInstancesRequest( + parent='projects/' + self.PROJECT, + ) + + # Create response_pb + response_pb = messages_v2_pb2.ListInstancesResponse( + failed_locations=[ + FAILED_LOCATION, + ], + instances=[ + data_v2_pb2.Instance( + name=INSTANCE_NAME1, + display_name=INSTANCE_NAME1, + ), + data_v2_pb2.Instance( + name=INSTANCE_NAME2, + display_name=INSTANCE_NAME2, + ), + ], + ) + + # Patch the stub used by the API method. + client._instance_stub_internal = stub = _FakeStub(response_pb) + + # Create expected_result. + failed_locations = [FAILED_LOCATION] + instances = [ + client.instance(INSTANCE_ID1, LOCATION), + client.instance(INSTANCE_ID2, LOCATION), + ] + expected_result = (instances, failed_locations) + + # Perform the method and check the result. + result = client.list_instances() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ListInstances', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + +class Test_MetadataPlugin(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.client import _MetadataPlugin + return _MetadataPlugin + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + from gcloud.bigtable.client import Client + from gcloud.bigtable.client import DATA_SCOPE + PROJECT = 'PROJECT' + USER_AGENT = 'USER_AGENT' + + credentials = _Credentials() + client = Client(project=PROJECT, credentials=credentials, + user_agent=USER_AGENT) + transformer = self._makeOne(client) + self.assertTrue(transformer._credentials is credentials) + self.assertEqual(transformer._user_agent, USER_AGENT) + self.assertEqual(credentials.scopes, [DATA_SCOPE]) + + def test___call__(self): + from gcloud.bigtable.client import Client + from gcloud.bigtable.client import DATA_SCOPE + from gcloud.bigtable.client import DEFAULT_USER_AGENT + + access_token_expected = 'FOOBARBAZ' + credentials = _Credentials(access_token=access_token_expected) + project = 'PROJECT' + client = Client(project=project, credentials=credentials) + callback_args = [] + + def callback(*args): + callback_args.append(args) + + transformer = self._makeOne(client) + result = transformer(None, callback) + cb_headers = [ + ('Authorization', 'Bearer ' + access_token_expected), + ('User-agent', DEFAULT_USER_AGENT), + ] + self.assertEqual(result, None) + self.assertEqual(callback_args, [(cb_headers, None)]) + self.assertEqual(credentials.scopes, [DATA_SCOPE]) + self.assertEqual(len(credentials._tokens), 1) + + +class Test__make_stub(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.client import _make_stub + return _make_stub(*args, **kwargs) + + def test_it(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import client as MUT + + mock_result = object() + stub_inputs = [] + + SSL_CREDS = object() + METADATA_CREDS = object() + COMPOSITE_CREDS = object() + CHANNEL = object() + + class _ImplementationsModule(object): + + def __init__(self): + self.ssl_channel_credentials_args = None + self.metadata_call_credentials_args = None + self.composite_channel_credentials_args = None + self.secure_channel_args = None + + def ssl_channel_credentials(self, *args): + self.ssl_channel_credentials_args = args + return SSL_CREDS + + def metadata_call_credentials(self, *args, **kwargs): + self.metadata_call_credentials_args = (args, kwargs) + return METADATA_CREDS + + def composite_channel_credentials(self, *args): + self.composite_channel_credentials_args = args + return COMPOSITE_CREDS + + def secure_channel(self, *args): + self.secure_channel_args = args + return CHANNEL + + implementations_mod = _ImplementationsModule() + + def mock_stub_factory(channel): + stub_inputs.append(channel) + return mock_result + + metadata_plugin = object() + clients = [] + + def mock_plugin(client): + clients.append(client) + return metadata_plugin + + host = 'HOST' + port = 1025 + client = object() + with _Monkey(MUT, implementations=implementations_mod, + _MetadataPlugin=mock_plugin): + result = self._callFUT(client, mock_stub_factory, host, port) + + self.assertTrue(result is mock_result) + self.assertEqual(stub_inputs, [CHANNEL]) + self.assertEqual(clients, [client]) + self.assertEqual(implementations_mod.ssl_channel_credentials_args, + (None, None, None)) + self.assertEqual(implementations_mod.metadata_call_credentials_args, + ((metadata_plugin,), {'name': 'google_creds'})) + self.assertEqual( + implementations_mod.composite_channel_credentials_args, + (SSL_CREDS, METADATA_CREDS)) + self.assertEqual(implementations_mod.secure_channel_args, + (host, port, COMPOSITE_CREDS)) + + +class _Credentials(object): + + scopes = None + + def __init__(self, access_token=None): + self._access_token = access_token + self._tokens = [] + + def get_access_token(self): + from oauth2client.client import AccessTokenInfo + token = AccessTokenInfo(access_token=self._access_token, + expires_in=None) + self._tokens.append(token) + return token + + def create_scoped(self, scope): + self.scopes = scope + return self + + def __eq__(self, other): + return self._access_token == other._access_token diff --git a/env/Lib/site-packages/gcloud/bigtable/test_cluster.py b/env/Lib/site-packages/gcloud/bigtable/test_cluster.py new file mode 100644 index 0000000..4f8da61 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_cluster.py @@ -0,0 +1,643 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class TestOperation(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.cluster import Operation + return Operation + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _constructor_test_helper(self, cluster=None): + op_type = 'fake-op' + op_id = 8915 + operation = self._makeOne(op_type, op_id, cluster=cluster) + + self.assertEqual(operation.op_type, op_type) + self.assertEqual(operation.op_id, op_id) + self.assertEqual(operation._cluster, cluster) + self.assertFalse(operation._complete) + + def test_constructor_defaults(self): + self._constructor_test_helper() + + def test_constructor_explicit_cluster(self): + cluster = object() + self._constructor_test_helper(cluster=cluster) + + def test___eq__(self): + op_type = 'fake-op' + op_id = 8915 + cluster = object() + operation1 = self._makeOne(op_type, op_id, cluster=cluster) + operation2 = self._makeOne(op_type, op_id, cluster=cluster) + self.assertEqual(operation1, operation2) + + def test___eq__type_differ(self): + operation1 = self._makeOne('foo', 123, None) + operation2 = object() + self.assertNotEqual(operation1, operation2) + + def test___ne__same_value(self): + op_type = 'fake-op' + op_id = 8915 + cluster = object() + operation1 = self._makeOne(op_type, op_id, cluster=cluster) + operation2 = self._makeOne(op_type, op_id, cluster=cluster) + comparison_val = (operation1 != operation2) + self.assertFalse(comparison_val) + + def test___ne__(self): + operation1 = self._makeOne('foo', 123, None) + operation2 = self._makeOne('bar', 456, None) + self.assertNotEqual(operation1, operation2) + + def test_finished_without_operation(self): + operation = self._makeOne(None, None, None) + operation._complete = True + with self.assertRaises(ValueError): + operation.finished() + + def _finished_helper(self, done): + from google.longrunning import operations_pb2 + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable.cluster import Cluster + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + CLUSTER_ID = 'cluster-id' + OP_TYPE = 'fake-op' + OP_ID = 789 + timeout_seconds = 1 + + client = _Client(PROJECT, timeout_seconds=timeout_seconds) + instance = _Instance(INSTANCE_ID, client) + cluster = Cluster(CLUSTER_ID, instance) + operation = self._makeOne(OP_TYPE, OP_ID, cluster=cluster) + + # Create request_pb + op_name = ('operations/projects/' + PROJECT + + '/instances/' + INSTANCE_ID + + '/clusters/' + CLUSTER_ID + + '/operations/%d' % (OP_ID,)) + request_pb = operations_pb2.GetOperationRequest(name=op_name) + + # Create response_pb + response_pb = operations_pb2.Operation(done=done) + + # Patch the stub used by the API method. + client._operations_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = done + + # Perform the method and check the result. + result = operation.finished() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'GetOperation', + (request_pb, timeout_seconds), + {}, + )]) + + if done: + self.assertTrue(operation._complete) + else: + self.assertFalse(operation._complete) + + def test_finished(self): + self._finished_helper(done=True) + + def test_finished_not_done(self): + self._finished_helper(done=False) + + +class TestCluster(unittest2.TestCase): + + PROJECT = 'project' + INSTANCE_ID = 'instance-id' + CLUSTER_ID = 'cluster-id' + CLUSTER_NAME = ('projects/' + PROJECT + + '/instances/' + INSTANCE_ID + + '/clusters/' + CLUSTER_ID) + TIMEOUT_SECONDS = 123 + + def _getTargetClass(self): + from gcloud.bigtable.cluster import Cluster + return Cluster + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster = self._makeOne(self.CLUSTER_ID, instance) + self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) + self.assertTrue(cluster._instance is instance) + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + + def test_constructor_non_default(self): + SERVE_NODES = 8 + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster = self._makeOne(self.CLUSTER_ID, instance, + serve_nodes=SERVE_NODES) + self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) + self.assertTrue(cluster._instance is instance) + self.assertEqual(cluster.serve_nodes, SERVE_NODES) + + def test_copy(self): + SERVE_NODES = 8 + + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster = self._makeOne(self.CLUSTER_ID, instance, + serve_nodes=SERVE_NODES) + new_cluster = cluster.copy() + + # Make sure the client copy succeeded. + self.assertFalse(new_cluster._instance is instance) + self.assertEqual(new_cluster.serve_nodes, SERVE_NODES) + # Make sure the client got copied to a new instance. + self.assertFalse(cluster is new_cluster) + self.assertEqual(cluster, new_cluster) + + def test__update_from_pb_success(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + + SERVE_NODES = 8 + cluster_pb = _ClusterPB( + serve_nodes=SERVE_NODES, + ) + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster = self._makeOne(self.CLUSTER_ID, instance) + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + cluster._update_from_pb(cluster_pb) + self.assertEqual(cluster.serve_nodes, SERVE_NODES) + + def test__update_from_pb_no_serve_nodes(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + + cluster_pb = _ClusterPB() + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster = self._makeOne(self.CLUSTER_ID, instance) + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + with self.assertRaises(ValueError): + cluster._update_from_pb(cluster_pb) + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + + def test_from_pb_success(self): + SERVE_NODES = 331 + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster_pb = _ClusterPB( + name=self.CLUSTER_NAME, + serve_nodes=SERVE_NODES, + ) + + klass = self._getTargetClass() + cluster = klass.from_pb(cluster_pb, instance) + self.assertTrue(isinstance(cluster, klass)) + self.assertTrue(cluster._instance is instance) + self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) + self.assertEqual(cluster.serve_nodes, SERVE_NODES) + + def test_from_pb_bad_cluster_name(self): + BAD_CLUSTER_NAME = 'INCORRECT_FORMAT' + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster_pb = _ClusterPB(name=BAD_CLUSTER_NAME) + + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_pb(cluster_pb, instance) + + def test_from_pb_project_mistmatch(self): + ALT_PROJECT = 'ALT_PROJECT' + client = _Client(ALT_PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + self.assertNotEqual(self.PROJECT, ALT_PROJECT) + + cluster_pb = _ClusterPB(name=self.CLUSTER_NAME) + + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_pb(cluster_pb, instance) + + def test_from_pb_instance_mistmatch(self): + ALT_INSTANCE_ID = 'ALT_INSTANCE_ID' + client = _Client(self.PROJECT) + instance = _Instance(ALT_INSTANCE_ID, client) + + self.assertNotEqual(self.INSTANCE_ID, ALT_INSTANCE_ID) + + cluster_pb = _ClusterPB(name=self.CLUSTER_NAME) + + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_pb(cluster_pb, instance) + + def test_name_property(self): + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + + cluster = self._makeOne(self.CLUSTER_ID, instance) + self.assertEqual(cluster.name, self.CLUSTER_NAME) + + def test___eq__(self): + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster1 = self._makeOne(self.CLUSTER_ID, instance) + cluster2 = self._makeOne(self.CLUSTER_ID, instance) + self.assertEqual(cluster1, cluster2) + + def test___eq__type_differ(self): + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster1 = self._makeOne(self.CLUSTER_ID, instance) + cluster2 = object() + self.assertNotEqual(cluster1, cluster2) + + def test___ne__same_value(self): + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster1 = self._makeOne(self.CLUSTER_ID, instance) + cluster2 = self._makeOne(self.CLUSTER_ID, instance) + comparison_val = (cluster1 != cluster2) + self.assertFalse(comparison_val) + + def test___ne__(self): + client = _Client(self.PROJECT) + instance = _Instance(self.INSTANCE_ID, client) + cluster1 = self._makeOne('cluster_id1', instance) + cluster2 = self._makeOne('cluster_id2', instance) + self.assertNotEqual(cluster1, cluster2) + + def test_reload(self): + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + + SERVE_NODES = 31 + LOCATION = 'LOCATION' + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_ID, client) + cluster = self._makeOne(self.CLUSTER_ID, instance) + + # Create request_pb + request_pb = _GetClusterRequestPB(name=self.CLUSTER_NAME) + + # Create response_pb + response_pb = _ClusterPB( + serve_nodes=SERVE_NODES, + location=LOCATION, + ) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # reload() has no return value. + + # Check Cluster optional config values before. + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + + # Perform the method and check the result. + result = cluster.reload() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'GetCluster', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + # Check Cluster optional config values before. + self.assertEqual(cluster.serve_nodes, SERVE_NODES) + self.assertEqual(cluster.location, LOCATION) + + def test_create(self): + from google.longrunning import operations_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import cluster as MUT + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_ID, client) + cluster = self._makeOne(self.CLUSTER_ID, instance) + + # Create request_pb. Just a mock since we monkey patch + # _prepare_create_request + request_pb = object() + + # Create response_pb + OP_ID = 5678 + OP_NAME = ( + 'operations/projects/%s/instances/%s/clusters/%s/operations/%d' % + (self.PROJECT, self.INSTANCE_ID, self.CLUSTER_ID, OP_ID)) + response_pb = operations_pb2.Operation(name=OP_NAME) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = MUT.Operation('create', OP_ID, cluster=cluster) + + # Create the mocks. + prep_create_called = [] + + def mock_prep_create_req(cluster): + prep_create_called.append(cluster) + return request_pb + + process_operation_called = [] + + def mock_process_operation(operation_pb): + process_operation_called.append(operation_pb) + return OP_ID + + # Perform the method and check the result. + with _Monkey(MUT, _prepare_create_request=mock_prep_create_req, + _process_operation=mock_process_operation): + result = cluster.create() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'CreateCluster', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + self.assertEqual(prep_create_called, [cluster]) + self.assertEqual(process_operation_called, [response_pb]) + + def test_update(self): + from google.longrunning import operations_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import cluster as MUT + + SERVE_NODES = 81 + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_ID, client) + cluster = self._makeOne(self.CLUSTER_ID, instance, + serve_nodes=SERVE_NODES) + + # Create request_pb + request_pb = _ClusterPB( + name=self.CLUSTER_NAME, + serve_nodes=SERVE_NODES, + ) + + # Create response_pb + response_pb = operations_pb2.Operation() + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + OP_ID = 5678 + expected_result = MUT.Operation('update', OP_ID, cluster=cluster) + + # Create mocks + process_operation_called = [] + + def mock_process_operation(operation_pb): + process_operation_called.append(operation_pb) + return OP_ID + + # Perform the method and check the result. + with _Monkey(MUT, _process_operation=mock_process_operation): + result = cluster.update() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'UpdateCluster', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + self.assertEqual(process_operation_called, [response_pb]) + + def test_delete(self): + from google.protobuf import empty_pb2 + from gcloud.bigtable._testing import _FakeStub + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_ID, client) + cluster = self._makeOne(self.CLUSTER_ID, instance) + + # Create request_pb + request_pb = _DeleteClusterRequestPB(name=self.CLUSTER_NAME) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + result = cluster.delete() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'DeleteCluster', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + +class Test__prepare_create_request(unittest2.TestCase): + + def _callFUT(self, cluster): + from gcloud.bigtable.cluster import _prepare_create_request + return _prepare_create_request(cluster) + + def test_it(self): + from gcloud.bigtable.cluster import Cluster + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + CLUSTER_ID = 'cluster-id' + SERVE_NODES = 8 + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster = Cluster(CLUSTER_ID, instance, + serve_nodes=SERVE_NODES) + + request_pb = self._callFUT(cluster) + + self.assertEqual(request_pb.cluster_id, CLUSTER_ID) + self.assertEqual(request_pb.parent, instance.name) + self.assertEqual(request_pb.cluster.serve_nodes, SERVE_NODES) + + +class Test__parse_pb_any_to_native(unittest2.TestCase): + + def _callFUT(self, any_val, expected_type=None): + from gcloud.bigtable.cluster import _parse_pb_any_to_native + return _parse_pb_any_to_native(any_val, expected_type=expected_type) + + def test_with_known_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable import cluster as MUT + + cell = _CellPB( + timestamp_micros=0, + value=b'foobar', + ) + + type_url = 'type.googleapis.com/' + cell.DESCRIPTOR.full_name + fake_type_url_map = {type_url: cell.__class__} + + any_val = any_pb2.Any( + type_url=type_url, + value=cell.SerializeToString(), + ) + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + result = self._callFUT(any_val) + + self.assertEqual(result, cell) + + def test_unknown_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable import cluster as MUT + + fake_type_url_map = {} + any_val = any_pb2.Any() + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + with self.assertRaises(KeyError): + self._callFUT(any_val) + + def test_disagreeing_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable import cluster as MUT + + type_url1 = 'foo' + type_url2 = 'bar' + fake_type_url_map = {type_url1: None} + any_val = any_pb2.Any(type_url=type_url2) + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + with self.assertRaises(ValueError): + self._callFUT(any_val, expected_type=type_url1) + + +class Test__process_operation(unittest2.TestCase): + + def _callFUT(self, operation_pb): + from gcloud.bigtable.cluster import _process_operation + return _process_operation(operation_pb) + + def test_it(self): + from google.longrunning import operations_pb2 + + PROJECT = 'project' + INSTANCE_ID = 'instance-id' + CLUSTER_ID = 'cluster-id' + EXPECTED_OPERATION_ID = 234 + OPERATION_NAME = ( + 'operations/projects/%s/instances/%s/clusters/%s/operations/%d' % + (PROJECT, INSTANCE_ID, CLUSTER_ID, EXPECTED_OPERATION_ID)) + + operation_pb = operations_pb2.Operation(name=OPERATION_NAME) + + # Exectute method with mocks in place. + operation_id = self._callFUT(operation_pb) + + # Check outputs. + self.assertEqual(operation_id, EXPECTED_OPERATION_ID) + + def test_op_name_parsing_failure(self): + from google.longrunning import operations_pb2 + + operation_pb = operations_pb2.Operation(name='invalid') + with self.assertRaises(ValueError): + self._callFUT(operation_pb) + + +def _CellPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Cell(*args, **kw) + + +def _ClusterPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as instance_v2_pb2) + return instance_v2_pb2.Cluster(*args, **kw) + + +def _DeleteClusterRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + return messages_v2_pb2.DeleteClusterRequest(*args, **kw) + + +def _GetClusterRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + return messages_v2_pb2.GetClusterRequest(*args, **kw) + + +class _Instance(object): + + def __init__(self, instance_id, client): + self.instance_id = instance_id + self._client = client + + @property + def name(self): + return 'projects/%s/instances/%s' % ( + self._client.project, self.instance_id) + + def copy(self): + return self.__class__(self.instance_id, self._client) + + def __eq__(self, other): + return (other.instance_id == self.instance_id and + other._client == self._client) + + +class _Client(object): + + def __init__(self, project, timeout_seconds=None): + self.project = project + self.project_name = 'projects/' + self.project + self.timeout_seconds = timeout_seconds + + def __eq__(self, other): + return (other.project == self.project and + other.project_name == self.project_name and + other.timeout_seconds == self.timeout_seconds) diff --git a/env/Lib/site-packages/gcloud/bigtable/test_column_family.py b/env/Lib/site-packages/gcloud/bigtable/test_column_family.py new file mode 100644 index 0000000..d9deaf8 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_column_family.py @@ -0,0 +1,669 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class Test__timedelta_to_duration_pb(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.column_family import _timedelta_to_duration_pb + return _timedelta_to_duration_pb(*args, **kwargs) + + def test_it(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = microseconds = 1 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._callFUT(timedelta_val) + self.assertTrue(isinstance(result, duration_pb2.Duration)) + self.assertEqual(result.seconds, seconds) + self.assertEqual(result.nanos, 1000 * microseconds) + + def test_with_negative_microseconds(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = 1 + microseconds = -5 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._callFUT(timedelta_val) + self.assertTrue(isinstance(result, duration_pb2.Duration)) + self.assertEqual(result.seconds, seconds - 1) + self.assertEqual(result.nanos, 10**9 + 1000 * microseconds) + + def test_with_negative_seconds(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = -1 + microseconds = 5 + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._callFUT(timedelta_val) + self.assertTrue(isinstance(result, duration_pb2.Duration)) + self.assertEqual(result.seconds, seconds + 1) + self.assertEqual(result.nanos, -(10**9 - 1000 * microseconds)) + + +class Test__duration_pb_to_timedelta(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.column_family import _duration_pb_to_timedelta + return _duration_pb_to_timedelta(*args, **kwargs) + + def test_it(self): + import datetime + from google.protobuf import duration_pb2 + + seconds = microseconds = 1 + duration_pb = duration_pb2.Duration(seconds=seconds, + nanos=1000 * microseconds) + timedelta_val = datetime.timedelta(seconds=seconds, + microseconds=microseconds) + result = self._callFUT(duration_pb) + self.assertTrue(isinstance(result, datetime.timedelta)) + self.assertEqual(result, timedelta_val) + + +class TestMaxVersionsGCRule(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + return MaxVersionsGCRule + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test___eq__type_differ(self): + gc_rule1 = self._makeOne(10) + gc_rule2 = object() + self.assertNotEqual(gc_rule1, gc_rule2) + + def test___eq__same_value(self): + gc_rule1 = self._makeOne(2) + gc_rule2 = self._makeOne(2) + self.assertEqual(gc_rule1, gc_rule2) + + def test___ne__same_value(self): + gc_rule1 = self._makeOne(99) + gc_rule2 = self._makeOne(99) + comparison_val = (gc_rule1 != gc_rule2) + self.assertFalse(comparison_val) + + def test_to_pb(self): + max_num_versions = 1337 + gc_rule = self._makeOne(max_num_versions=max_num_versions) + pb_val = gc_rule.to_pb() + expected = _GcRulePB(max_num_versions=max_num_versions) + self.assertEqual(pb_val, expected) + + +class TestMaxAgeGCRule(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.column_family import MaxAgeGCRule + return MaxAgeGCRule + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test___eq__type_differ(self): + max_age = object() + gc_rule1 = self._makeOne(max_age=max_age) + gc_rule2 = object() + self.assertNotEqual(gc_rule1, gc_rule2) + + def test___eq__same_value(self): + max_age = object() + gc_rule1 = self._makeOne(max_age=max_age) + gc_rule2 = self._makeOne(max_age=max_age) + self.assertEqual(gc_rule1, gc_rule2) + + def test___ne__same_value(self): + max_age = object() + gc_rule1 = self._makeOne(max_age=max_age) + gc_rule2 = self._makeOne(max_age=max_age) + comparison_val = (gc_rule1 != gc_rule2) + self.assertFalse(comparison_val) + + def test_to_pb(self): + import datetime + from google.protobuf import duration_pb2 + + max_age = datetime.timedelta(seconds=1) + duration = duration_pb2.Duration(seconds=1) + gc_rule = self._makeOne(max_age=max_age) + pb_val = gc_rule.to_pb() + self.assertEqual(pb_val, _GcRulePB(max_age=duration)) + + +class TestGCRuleUnion(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.column_family import GCRuleUnion + return GCRuleUnion + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + rules = object() + rule_union = self._makeOne(rules) + self.assertTrue(rule_union.rules is rules) + + def test___eq__(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = self._makeOne(rules) + self.assertEqual(gc_rule1, gc_rule2) + + def test___eq__type_differ(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = object() + self.assertNotEqual(gc_rule1, gc_rule2) + + def test___ne__same_value(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = self._makeOne(rules) + comparison_val = (gc_rule1 != gc_rule2) + self.assertFalse(comparison_val) + + def test_to_pb(self): + import datetime + from google.protobuf import duration_pb2 + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions = 42 + rule1 = MaxVersionsGCRule(max_num_versions) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB( + max_age=duration_pb2.Duration(seconds=1)) + + rule3 = self._makeOne(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB( + union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) + + gc_rule_pb = rule3.to_pb() + self.assertEqual(gc_rule_pb, pb_rule3) + + def test_to_pb_nested(self): + import datetime + from google.protobuf import duration_pb2 + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions1 = 42 + rule1 = MaxVersionsGCRule(max_num_versions1) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB( + max_age=duration_pb2.Duration(seconds=1)) + + rule3 = self._makeOne(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB( + union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) + + max_num_versions2 = 1337 + rule4 = MaxVersionsGCRule(max_num_versions2) + pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) + + rule5 = self._makeOne(rules=[rule3, rule4]) + pb_rule5 = _GcRulePB( + union=_GcRuleUnionPB(rules=[pb_rule3, pb_rule4])) + + gc_rule_pb = rule5.to_pb() + self.assertEqual(gc_rule_pb, pb_rule5) + + +class TestGCRuleIntersection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.column_family import GCRuleIntersection + return GCRuleIntersection + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + rules = object() + rule_intersection = self._makeOne(rules) + self.assertTrue(rule_intersection.rules is rules) + + def test___eq__(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = self._makeOne(rules) + self.assertEqual(gc_rule1, gc_rule2) + + def test___eq__type_differ(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = object() + self.assertNotEqual(gc_rule1, gc_rule2) + + def test___ne__same_value(self): + rules = object() + gc_rule1 = self._makeOne(rules) + gc_rule2 = self._makeOne(rules) + comparison_val = (gc_rule1 != gc_rule2) + self.assertFalse(comparison_val) + + def test_to_pb(self): + import datetime + from google.protobuf import duration_pb2 + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions = 42 + rule1 = MaxVersionsGCRule(max_num_versions) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB( + max_age=duration_pb2.Duration(seconds=1)) + + rule3 = self._makeOne(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB( + intersection=_GcRuleIntersectionPB( + rules=[pb_rule1, pb_rule2])) + + gc_rule_pb = rule3.to_pb() + self.assertEqual(gc_rule_pb, pb_rule3) + + def test_to_pb_nested(self): + import datetime + from google.protobuf import duration_pb2 + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions1 = 42 + rule1 = MaxVersionsGCRule(max_num_versions1) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB( + max_age=duration_pb2.Duration(seconds=1)) + + rule3 = self._makeOne(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB( + intersection=_GcRuleIntersectionPB( + rules=[pb_rule1, pb_rule2])) + + max_num_versions2 = 1337 + rule4 = MaxVersionsGCRule(max_num_versions2) + pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) + + rule5 = self._makeOne(rules=[rule3, rule4]) + pb_rule5 = _GcRulePB( + intersection=_GcRuleIntersectionPB( + rules=[pb_rule3, pb_rule4])) + + gc_rule_pb = rule5.to_pb() + self.assertEqual(gc_rule_pb, pb_rule5) + + +class TestColumnFamily(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.column_family import ColumnFamily + return ColumnFamily + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + column_family_id = u'column-family-id' + table = object() + gc_rule = object() + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) + + self.assertEqual(column_family.column_family_id, column_family_id) + self.assertTrue(column_family._table is table) + self.assertTrue(column_family.gc_rule is gc_rule) + + def test_name_property(self): + column_family_id = u'column-family-id' + table_name = 'table_name' + table = _Table(table_name) + column_family = self._makeOne(column_family_id, table) + + expected_name = table_name + '/columnFamilies/' + column_family_id + self.assertEqual(column_family.name, expected_name) + + def test___eq__(self): + column_family_id = 'column_family_id' + table = object() + gc_rule = object() + column_family1 = self._makeOne(column_family_id, table, + gc_rule=gc_rule) + column_family2 = self._makeOne(column_family_id, table, + gc_rule=gc_rule) + self.assertEqual(column_family1, column_family2) + + def test___eq__type_differ(self): + column_family1 = self._makeOne('column_family_id', None) + column_family2 = object() + self.assertNotEqual(column_family1, column_family2) + + def test___ne__same_value(self): + column_family_id = 'column_family_id' + table = object() + gc_rule = object() + column_family1 = self._makeOne(column_family_id, table, + gc_rule=gc_rule) + column_family2 = self._makeOne(column_family_id, table, + gc_rule=gc_rule) + comparison_val = (column_family1 != column_family2) + self.assertFalse(comparison_val) + + def test___ne__(self): + column_family1 = self._makeOne('column_family_id1', None) + column_family2 = self._makeOne('column_family_id2', None) + self.assertNotEqual(column_family1, column_family2) + + def _create_test_helper(self, gc_rule=None): + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + from gcloud.bigtable._testing import _FakeStub + + project_id = 'project-id' + zone = 'zone' + cluster_id = 'cluster-id' + table_id = 'table-id' + column_family_id = 'column-family-id' + timeout_seconds = 4 + table_name = ('projects/' + project_id + '/zones/' + zone + + '/clusters/' + cluster_id + '/tables/' + table_id) + + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) + + # Create request_pb + if gc_rule is None: + column_family_pb = _ColumnFamilyPB() + else: + column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=table_name) + request_pb.modifications.add( + id=column_family_id, + create=column_family_pb, + ) + + # Create response_pb + response_pb = _ColumnFamilyPB() + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # create() has no return value. + + # Perform the method and check the result. + self.assertEqual(stub.results, (response_pb,)) + result = column_family.create() + self.assertEqual(stub.results, ()) + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ModifyColumnFamilies', + (request_pb, timeout_seconds), + {}, + )]) + + def test_create(self): + self._create_test_helper(gc_rule=None) + + def test_create_with_gc_rule(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + gc_rule = MaxVersionsGCRule(1337) + self._create_test_helper(gc_rule=gc_rule) + + def _update_test_helper(self, gc_rule=None): + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + + project_id = 'project-id' + zone = 'zone' + cluster_id = 'cluster-id' + table_id = 'table-id' + column_family_id = 'column-family-id' + timeout_seconds = 28 + table_name = ('projects/' + project_id + '/zones/' + zone + + '/clusters/' + cluster_id + '/tables/' + table_id) + + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + column_family = self._makeOne( + column_family_id, table, gc_rule=gc_rule) + + # Create request_pb + if gc_rule is None: + column_family_pb = _ColumnFamilyPB() + else: + column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=table_name) + request_pb.modifications.add( + id=column_family_id, + update=column_family_pb, + ) + + # Create response_pb + response_pb = _ColumnFamilyPB() + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # update() has no return value. + + # Perform the method and check the result. + self.assertEqual(stub.results, (response_pb,)) + result = column_family.update() + self.assertEqual(stub.results, ()) + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ModifyColumnFamilies', + (request_pb, timeout_seconds), + {}, + )]) + + def test_update(self): + self._update_test_helper(gc_rule=None) + + def test_update_with_gc_rule(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + gc_rule = MaxVersionsGCRule(1337) + self._update_test_helper(gc_rule=gc_rule) + + def test_delete(self): + from google.protobuf import empty_pb2 + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + from gcloud.bigtable._testing import _FakeStub + + project_id = 'project-id' + zone = 'zone' + cluster_id = 'cluster-id' + table_id = 'table-id' + column_family_id = 'column-family-id' + timeout_seconds = 7 + table_name = ('projects/' + project_id + '/zones/' + zone + + '/clusters/' + cluster_id + '/tables/' + table_id) + + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + column_family = self._makeOne(column_family_id, table) + + # Create request_pb + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest( + name=table_name) + request_pb.modifications.add( + id=column_family_id, + drop=True) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + self.assertEqual(stub.results, (response_pb,)) + result = column_family.delete() + self.assertEqual(stub.results, ()) + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ModifyColumnFamilies', + (request_pb, timeout_seconds), + {}, + )]) + + +class Test__gc_rule_from_pb(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.bigtable.column_family import _gc_rule_from_pb + return _gc_rule_from_pb(*args, **kwargs) + + def test_empty(self): + + gc_rule_pb = _GcRulePB() + self.assertEqual(self._callFUT(gc_rule_pb), None) + + def test_max_num_versions(self): + from gcloud.bigtable.column_family import MaxVersionsGCRule + + orig_rule = MaxVersionsGCRule(1) + gc_rule_pb = orig_rule.to_pb() + result = self._callFUT(gc_rule_pb) + self.assertTrue(isinstance(result, MaxVersionsGCRule)) + self.assertEqual(result, orig_rule) + + def test_max_age(self): + import datetime + from gcloud.bigtable.column_family import MaxAgeGCRule + + orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1)) + gc_rule_pb = orig_rule.to_pb() + result = self._callFUT(gc_rule_pb) + self.assertTrue(isinstance(result, MaxAgeGCRule)) + self.assertEqual(result, orig_rule) + + def test_union(self): + import datetime + from gcloud.bigtable.column_family import GCRuleUnion + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + rule1 = MaxVersionsGCRule(1) + rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) + orig_rule = GCRuleUnion([rule1, rule2]) + gc_rule_pb = orig_rule.to_pb() + result = self._callFUT(gc_rule_pb) + self.assertTrue(isinstance(result, GCRuleUnion)) + self.assertEqual(result, orig_rule) + + def test_intersection(self): + import datetime + from gcloud.bigtable.column_family import GCRuleIntersection + from gcloud.bigtable.column_family import MaxAgeGCRule + from gcloud.bigtable.column_family import MaxVersionsGCRule + + rule1 = MaxVersionsGCRule(1) + rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) + orig_rule = GCRuleIntersection([rule1, rule2]) + gc_rule_pb = orig_rule.to_pb() + result = self._callFUT(gc_rule_pb) + self.assertTrue(isinstance(result, GCRuleIntersection)) + self.assertEqual(result, orig_rule) + + def test_unknown_field_name(self): + class MockProto(object): + + names = [] + + @classmethod + def WhichOneof(cls, name): + cls.names.append(name) + return 'unknown' + + self.assertEqual(MockProto.names, []) + self.assertRaises(ValueError, self._callFUT, MockProto) + self.assertEqual(MockProto.names, ['rule']) + + +def _GcRulePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.GcRule(*args, **kw) + + +def _GcRuleIntersectionPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.GcRule.Intersection(*args, **kw) + + +def _GcRuleUnionPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.GcRule.Union(*args, **kw) + + +def _ColumnFamilyPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.ColumnFamily(*args, **kw) + + +class _Instance(object): + + def __init__(self, client=None): + self._client = client + + +class _Client(object): + + def __init__(self, timeout_seconds=None): + self.timeout_seconds = timeout_seconds + + +class _Table(object): + + def __init__(self, name, client=None): + self.name = name + self._instance = _Instance(client) diff --git a/env/Lib/site-packages/gcloud/bigtable/test_instance.py b/env/Lib/site-packages/gcloud/bigtable/test_instance.py new file mode 100644 index 0000000..da88276 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_instance.py @@ -0,0 +1,866 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import unittest2 + + +class TestOperation(unittest2.TestCase): + + OP_TYPE = 'fake-op' + OP_ID = 8915 + BEGIN = datetime.datetime(2015, 10, 22, 1, 1) + LOCATION_ID = 'loc-id' + + def _getTargetClass(self): + from gcloud.bigtable.instance import Operation + return Operation + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _constructor_test_helper(self, instance=None): + operation = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + + self.assertEqual(operation.op_type, self.OP_TYPE) + self.assertEqual(operation.op_id, self.OP_ID) + self.assertEqual(operation.begin, self.BEGIN) + self.assertEqual(operation.location_id, self.LOCATION_ID) + self.assertEqual(operation._instance, instance) + self.assertFalse(operation._complete) + + def test_constructor_defaults(self): + self._constructor_test_helper() + + def test_constructor_explicit_instance(self): + instance = object() + self._constructor_test_helper(instance=instance) + + def test___eq__(self): + instance = object() + operation1 = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + operation2 = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + self.assertEqual(operation1, operation2) + + def test___eq__type_differ(self): + operation1 = self._makeOne('foo', 123, None, self.LOCATION_ID) + operation2 = object() + self.assertNotEqual(operation1, operation2) + + def test___ne__same_value(self): + instance = object() + operation1 = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + operation2 = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + comparison_val = (operation1 != operation2) + self.assertFalse(comparison_val) + + def test___ne__(self): + operation1 = self._makeOne('foo', 123, None, self.LOCATION_ID) + operation2 = self._makeOne('bar', 456, None, self.LOCATION_ID) + self.assertNotEqual(operation1, operation2) + + def test_finished_without_operation(self): + operation = self._makeOne(None, None, None, None) + operation._complete = True + with self.assertRaises(ValueError): + operation.finished() + + def _finished_helper(self, done): + from google.longrunning import operations_pb2 + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable.instance import Instance + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + TIMEOUT_SECONDS = 1 + + client = _Client(PROJECT, timeout_seconds=TIMEOUT_SECONDS) + instance = Instance(INSTANCE_ID, client, self.LOCATION_ID) + operation = self._makeOne( + self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID, + instance=instance) + + # Create request_pb + op_name = ('operations/projects/' + PROJECT + + '/instances/' + INSTANCE_ID + + '/locations/' + self.LOCATION_ID + + '/operations/%d' % (self.OP_ID,)) + request_pb = operations_pb2.GetOperationRequest(name=op_name) + + # Create response_pb + response_pb = operations_pb2.Operation(done=done) + + # Patch the stub used by the API method. + client._operations_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = done + + # Perform the method and check the result. + result = operation.finished() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'GetOperation', + (request_pb, TIMEOUT_SECONDS), + {}, + )]) + + if done: + self.assertTrue(operation._complete) + else: + self.assertFalse(operation._complete) + + def test_finished(self): + self._finished_helper(done=True) + + def test_finished_not_done(self): + self._finished_helper(done=False) + + +class TestInstance(unittest2.TestCase): + + PROJECT = 'project' + INSTANCE_ID = 'instance-id' + INSTANCE_NAME = 'projects/' + PROJECT + '/instances/' + INSTANCE_ID + LOCATION_ID = 'locname' + LOCATION = 'projects/' + PROJECT + '/locations/' + LOCATION_ID + DISPLAY_NAME = 'display_name' + OP_ID = 8915 + OP_NAME = ('operations/projects/%s/instances/%soperations/%d' % + (PROJECT, INSTANCE_ID, OP_ID)) + TABLE_ID = 'table_id' + TABLE_NAME = INSTANCE_NAME + '/tables/' + TABLE_ID + TIMEOUT_SECONDS = 1 + + def _getTargetClass(self): + from gcloud.bigtable.instance import Instance + return Instance + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + + client = object() + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + self.assertEqual(instance.instance_id, self.INSTANCE_ID) + self.assertEqual(instance.display_name, self.INSTANCE_ID) + self.assertTrue(instance._client is client) + self.assertEqual(instance._cluster_location_id, self.LOCATION_ID) + self.assertEqual(instance._cluster_serve_nodes, DEFAULT_SERVE_NODES) + + def test_constructor_non_default(self): + display_name = 'display_name' + client = object() + + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID, + display_name=display_name) + self.assertEqual(instance.instance_id, self.INSTANCE_ID) + self.assertEqual(instance.display_name, display_name) + self.assertTrue(instance._client is client) + + def test_copy(self): + display_name = 'display_name' + + client = _Client(self.PROJECT) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID, + display_name=display_name) + new_instance = instance.copy() + + # Make sure the client copy succeeded. + self.assertFalse(new_instance._client is client) + self.assertEqual(new_instance._client, client) + # Make sure the client got copied to a new instance. + self.assertFalse(instance is new_instance) + self.assertEqual(instance, new_instance) + + def test_table_factory(self): + from gcloud.bigtable.table import Table + + instance = self._makeOne(self.INSTANCE_ID, None, self.LOCATION_ID) + + table = instance.table(self.TABLE_ID) + self.assertTrue(isinstance(table, Table)) + self.assertEqual(table.table_id, self.TABLE_ID) + self.assertEqual(table._instance, instance) + + def test__update_from_pb_success(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + + display_name = 'display_name' + instance_pb = data_v2_pb2.Instance( + display_name=display_name, + ) + + instance = self._makeOne(None, None, None, None) + self.assertEqual(instance.display_name, None) + instance._update_from_pb(instance_pb) + self.assertEqual(instance.display_name, display_name) + + def test__update_from_pb_no_display_name(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + + instance_pb = data_v2_pb2.Instance() + instance = self._makeOne(None, None, None, None) + self.assertEqual(instance.display_name, None) + with self.assertRaises(ValueError): + instance._update_from_pb(instance_pb) + self.assertEqual(instance.display_name, None) + + def test_from_pb_success(self): + from gcloud.bigtable.instance import _EXISTING_INSTANCE_LOCATION_ID + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + + client = _Client(project=self.PROJECT) + + instance_pb = data_v2_pb2.Instance( + name=self.INSTANCE_NAME, + display_name=self.INSTANCE_ID, + ) + + klass = self._getTargetClass() + instance = klass.from_pb(instance_pb, client) + self.assertTrue(isinstance(instance, klass)) + self.assertEqual(instance._client, client) + self.assertEqual(instance.instance_id, self.INSTANCE_ID) + self.assertEqual(instance._cluster_location_id, + _EXISTING_INSTANCE_LOCATION_ID) + + def test_from_pb_bad_instance_name(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + + instance_name = 'INCORRECT_FORMAT' + instance_pb = data_v2_pb2.Instance(name=instance_name) + + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_pb(instance_pb, None) + + def test_from_pb_project_mistmatch(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + + ALT_PROJECT = 'ALT_PROJECT' + client = _Client(project=ALT_PROJECT) + + self.assertNotEqual(self.PROJECT, ALT_PROJECT) + + instance_pb = data_v2_pb2.Instance(name=self.INSTANCE_NAME) + + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_pb(instance_pb, client) + + def test_name_property(self): + client = _Client(project=self.PROJECT) + + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + self.assertEqual(instance.name, self.INSTANCE_NAME) + + def test___eq__(self): + client = object() + instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + instance2 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + self.assertEqual(instance1, instance2) + + def test___eq__type_differ(self): + client = object() + instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + instance2 = object() + self.assertNotEqual(instance1, instance2) + + def test___ne__same_value(self): + client = object() + instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + instance2 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + comparison_val = (instance1 != instance2) + self.assertFalse(comparison_val) + + def test___ne__(self): + instance1 = self._makeOne('instance_id1', 'client1', self.LOCATION_ID) + instance2 = self._makeOne('instance_id2', 'client2', self.LOCATION_ID) + self.assertNotEqual(instance1, instance2) + + def test_reload(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + from gcloud.bigtable._testing import _FakeStub + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create request_pb + request_pb = messages_v2_pb.GetInstanceRequest( + name=self.INSTANCE_NAME) + + # Create response_pb + DISPLAY_NAME = u'hey-hi-hello' + response_pb = data_v2_pb2.Instance( + display_name=DISPLAY_NAME, + ) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # reload() has no return value. + + # Check Instance optional config values before. + self.assertEqual(instance.display_name, self.INSTANCE_ID) + + # Perform the method and check the result. + result = instance.reload() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'GetInstance', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + # Check Instance optional config values before. + self.assertEqual(instance.display_name, DISPLAY_NAME) + + def test_create(self): + from google.longrunning import operations_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import instance as MUT + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create request_pb. Just a mock since we monkey patch + # _prepare_create_request + request_pb = object() + + # Create response_pb + OP_BEGIN = object() + response_pb = operations_pb2.Operation(name=self.OP_NAME) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = MUT.Operation('create', self.OP_ID, OP_BEGIN, + self.LOCATION_ID, instance=instance) + + # Create the mocks. + prep_create_called = [] + + def mock_prep_create_req(instance): + prep_create_called.append(instance) + return request_pb + + process_operation_called = [] + + def mock_process_operation(operation_pb): + process_operation_called.append(operation_pb) + return self.OP_ID, self.LOCATION_ID, OP_BEGIN + + # Perform the method and check the result. + with _Monkey(MUT, + _prepare_create_request=mock_prep_create_req, + _process_operation=mock_process_operation): + result = instance.create() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'CreateInstance', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + self.assertEqual(prep_create_called, [instance]) + self.assertEqual(process_operation_called, [response_pb]) + + def test_create_w_explicit_serve_nodes(self): + from google.longrunning import operations_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import instance as MUT + + SERVE_NODES = 5 + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID, + serve_nodes=SERVE_NODES) + + # Create request_pb. Just a mock since we monkey patch + # _prepare_create_request + request_pb = object() + + # Create response_pb + OP_BEGIN = object() + response_pb = operations_pb2.Operation(name=self.OP_NAME) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = MUT.Operation('create', self.OP_ID, OP_BEGIN, + self.LOCATION_ID, instance=instance) + + # Create the mocks. + prep_create_called = [] + + def mock_prep_create_req(instance): + prep_create_called.append(instance) + return request_pb + + process_operation_called = [] + + def mock_process_operation(operation_pb): + process_operation_called.append(operation_pb) + return self.OP_ID, self.LOCATION_ID, OP_BEGIN + + # Perform the method and check the result. + with _Monkey(MUT, + _prepare_create_request=mock_prep_create_req, + _process_operation=mock_process_operation): + result = instance.create() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'CreateInstance', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + self.assertEqual(prep_create_called, [instance]) + self.assertEqual(process_operation_called, [response_pb]) + + def test_update(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._testing import _FakeStub + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID, + display_name=self.DISPLAY_NAME) + + # Create request_pb + request_pb = data_v2_pb2.Instance( + name=self.INSTANCE_NAME, + display_name=self.DISPLAY_NAME, + ) + + # Create response_pb + response_pb = data_v2_pb2.Instance() + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None + + # Perform the method and check the result. + result = instance.update() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'UpdateInstance', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def test_delete(self): + from google.protobuf import empty_pb2 + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + from gcloud.bigtable._testing import _FakeStub + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create request_pb + request_pb = messages_v2_pb.DeleteInstanceRequest( + name=self.INSTANCE_NAME) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + result = instance.delete() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'DeleteInstance', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def test_list_clusters(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as instance_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + from gcloud.bigtable._testing import _FakeStub + + FAILED_LOCATION = 'FAILED' + FAILED_LOCATIONS = [FAILED_LOCATION] + CLUSTER_ID1 = 'cluster-id1' + CLUSTER_ID2 = 'cluster-id2' + SERVE_NODES = 4 + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + + CLUSTER_NAME1 = (instance.name + '/clusters/' + CLUSTER_ID1) + CLUSTER_NAME2 = (instance.name + '/clusters/' + CLUSTER_ID2) + # Create request_pb + request_pb = messages_v2_pb2.ListClustersRequest( + parent=instance.name, + ) + + # Create response_pb + response_pb = messages_v2_pb2.ListClustersResponse( + failed_locations=[FAILED_LOCATION], + clusters=[ + instance_v2_pb2.Cluster( + name=CLUSTER_NAME1, + serve_nodes=SERVE_NODES, + ), + instance_v2_pb2.Cluster( + name=CLUSTER_NAME2, + serve_nodes=SERVE_NODES, + ), + ], + ) + + # Patch the stub used by the API method. + client._instance_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + clusters = [ + instance.cluster(CLUSTER_ID1), + instance.cluster(CLUSTER_ID2), + ] + expected_result = (clusters, FAILED_LOCATIONS) + + # Perform the method and check the result. + result = instance.list_clusters() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ListClusters', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def _list_tables_helper(self, table_name=None): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_messages_v1_pb2) + from gcloud.bigtable._testing import _FakeStub + + client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS) + instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create request_ + request_pb = table_messages_v1_pb2.ListTablesRequest( + parent=self.INSTANCE_NAME) + + # Create response_pb + if table_name is None: + table_name = self.TABLE_NAME + + response_pb = table_messages_v1_pb2.ListTablesResponse( + tables=[ + table_data_v2_pb2.Table(name=table_name), + ], + ) + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_table = instance.table(self.TABLE_ID) + expected_result = [expected_table] + + # Perform the method and check the result. + result = instance.list_tables() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ListTables', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def test_list_tables(self): + self._list_tables_helper() + + def test_list_tables_failure_bad_split(self): + with self.assertRaises(ValueError): + self._list_tables_helper(table_name='wrong-format') + + def test_list_tables_failure_name_bad_before(self): + BAD_TABLE_NAME = ('nonempty-section-before' + + 'projects/' + self.PROJECT + + '/instances/' + self.INSTANCE_ID + + '/tables/' + self.TABLE_ID) + with self.assertRaises(ValueError): + self._list_tables_helper(table_name=BAD_TABLE_NAME) + + +class Test__prepare_create_request(unittest2.TestCase): + PROJECT = 'PROJECT' + PARENT = 'projects/' + PROJECT + LOCATION_ID = 'locname' + LOCATION_NAME = 'projects/' + PROJECT + '/locations/' + LOCATION_ID + INSTANCE_ID = 'instance-id' + INSTANCE_NAME = PARENT + '/instances/' + INSTANCE_ID + CLUSTER_NAME = INSTANCE_NAME + '/clusters/' + INSTANCE_ID + + def _callFUT(self, instance, **kw): + from gcloud.bigtable.instance import _prepare_create_request + return _prepare_create_request(instance, **kw) + + def test_w_defaults(self): + from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + from gcloud.bigtable.instance import Instance + + client = _Client(self.PROJECT) + + instance = Instance(self.INSTANCE_ID, client, self.LOCATION_ID) + request_pb = self._callFUT(instance) + self.assertTrue(isinstance(request_pb, + messages_v2_pb.CreateInstanceRequest)) + self.assertEqual(request_pb.instance_id, self.INSTANCE_ID) + self.assertEqual(request_pb.parent, self.PARENT) + self.assertTrue(isinstance(request_pb.instance, data_v2_pb2.Instance)) + self.assertEqual(request_pb.instance.name, u'') + self.assertEqual(request_pb.instance.display_name, self.INSTANCE_ID) + + # An instance must also define a same-named cluster + cluster = request_pb.clusters[self.INSTANCE_ID] + self.assertTrue(isinstance(cluster, data_v2_pb2.Cluster)) + self.assertEqual(cluster.name, self.CLUSTER_NAME) + self.assertEqual(cluster.location, self.LOCATION_NAME) + self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) + + def test_w_explicit_serve_nodes(self): + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + from gcloud.bigtable.instance import Instance + DISPLAY_NAME = u'DISPLAY_NAME' + SERVE_NODES = 5 + client = _Client(self.PROJECT) + instance = Instance(self.INSTANCE_ID, client, self.LOCATION_ID, + display_name=DISPLAY_NAME, + serve_nodes=SERVE_NODES) + + request_pb = self._callFUT(instance) + + self.assertTrue(isinstance(request_pb, + messages_v2_pb.CreateInstanceRequest)) + self.assertEqual(request_pb.instance_id, self.INSTANCE_ID) + self.assertEqual(request_pb.parent, + 'projects/' + self.PROJECT) + self.assertTrue(isinstance(request_pb.instance, data_v2_pb2.Instance)) + self.assertEqual(request_pb.instance.display_name, DISPLAY_NAME) + # An instance must also define a same-named cluster + cluster = request_pb.clusters[self.INSTANCE_ID] + self.assertTrue(isinstance(cluster, data_v2_pb2.Cluster)) + self.assertEqual(cluster.location, self.LOCATION_NAME) + self.assertEqual(cluster.serve_nodes, SERVE_NODES) + + +class Test__parse_pb_any_to_native(unittest2.TestCase): + + def _callFUT(self, any_val, expected_type=None): + from gcloud.bigtable.instance import _parse_pb_any_to_native + return _parse_pb_any_to_native(any_val, expected_type=expected_type) + + def test_with_known_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + from gcloud.bigtable import instance as MUT + + TYPE_URL = 'type.googleapis.com/' + data_v2_pb2._CELL.full_name + fake_type_url_map = {TYPE_URL: data_v2_pb2.Cell} + + cell = data_v2_pb2.Cell( + timestamp_micros=0, + value=b'foobar', + ) + any_val = any_pb2.Any( + type_url=TYPE_URL, + value=cell.SerializeToString(), + ) + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + result = self._callFUT(any_val) + + self.assertEqual(result, cell) + + def test_with_create_instance_metadata(self): + from google.protobuf import any_pb2 + from google.protobuf.timestamp_pb2 import Timestamp + from gcloud.bigtable._generated_v2 import ( + instance_pb2 as data_v2_pb2) + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + + TYPE_URL = ('type.googleapis.com/' + + messages_v2_pb._CREATEINSTANCEMETADATA.full_name) + metadata = messages_v2_pb.CreateInstanceMetadata( + request_time=Timestamp(seconds=1, nanos=1234), + finish_time=Timestamp(seconds=10, nanos=891011), + original_request=messages_v2_pb.CreateInstanceRequest( + parent='foo', + instance_id='bar', + instance=data_v2_pb2.Instance( + display_name='quux', + ), + ), + ) + + any_val = any_pb2.Any( + type_url=TYPE_URL, + value=metadata.SerializeToString(), + ) + result = self._callFUT(any_val) + self.assertEqual(result, metadata) + + def test_unknown_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable import instance as MUT + + fake_type_url_map = {} + any_val = any_pb2.Any() + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + with self.assertRaises(KeyError): + self._callFUT(any_val) + + def test_disagreeing_type_url(self): + from google.protobuf import any_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable import instance as MUT + + TYPE_URL1 = 'foo' + TYPE_URL2 = 'bar' + fake_type_url_map = {TYPE_URL1: None} + any_val = any_pb2.Any(type_url=TYPE_URL2) + with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map): + with self.assertRaises(ValueError): + self._callFUT(any_val, expected_type=TYPE_URL1) + + +class Test__process_operation(unittest2.TestCase): + + def _callFUT(self, operation_pb): + from gcloud.bigtable.instance import _process_operation + return _process_operation(operation_pb) + + def test_it(self): + from google.longrunning import operations_pb2 + from gcloud._testing import _Monkey + from gcloud.bigtable._generated_v2 import ( + bigtable_instance_admin_pb2 as messages_v2_pb) + from gcloud.bigtable import instance as MUT + + PROJECT = 'PROJECT' + INSTANCE_ID = 'instance-id' + LOCATION_ID = 'location' + OP_ID = 234 + OPERATION_NAME = ( + 'operations/projects/%s/instances/%s/locations/%s/operations/%d' % + (PROJECT, INSTANCE_ID, LOCATION_ID, OP_ID)) + + current_op = operations_pb2.Operation(name=OPERATION_NAME) + + # Create mocks. + request_metadata = messages_v2_pb.CreateInstanceMetadata() + parse_pb_any_called = [] + + def mock_parse_pb_any_to_native(any_val, expected_type=None): + parse_pb_any_called.append((any_val, expected_type)) + return request_metadata + + expected_operation_begin = object() + ts_to_dt_called = [] + + def mock_pb_timestamp_to_datetime(timestamp): + ts_to_dt_called.append(timestamp) + return expected_operation_begin + + # Exectute method with mocks in place. + with _Monkey(MUT, _parse_pb_any_to_native=mock_parse_pb_any_to_native, + _pb_timestamp_to_datetime=mock_pb_timestamp_to_datetime): + op_id, loc_id, op_begin = self._callFUT(current_op) + + # Check outputs. + self.assertEqual(op_id, OP_ID) + self.assertTrue(op_begin is expected_operation_begin) + self.assertEqual(loc_id, LOCATION_ID) + + # Check mocks were used correctly. + self.assertEqual(parse_pb_any_called, [(current_op.metadata, None)]) + self.assertEqual(ts_to_dt_called, [request_metadata.request_time]) + + def test_op_name_parsing_failure(self): + from google.longrunning import operations_pb2 + + operation_pb = operations_pb2.Operation(name='invalid') + with self.assertRaises(ValueError): + self._callFUT(operation_pb) + + +class _Client(object): + + def __init__(self, project, timeout_seconds=None): + self.project = project + self.project_name = 'projects/' + self.project + self.timeout_seconds = timeout_seconds + + def copy(self): + from copy import deepcopy + return deepcopy(self) + + def __eq__(self, other): + return (other.project == self.project and + other.project_name == self.project_name and + other.timeout_seconds == self.timeout_seconds) diff --git a/env/Lib/site-packages/gcloud/bigtable/test_row.py b/env/Lib/site-packages/gcloud/bigtable/test_row.py new file mode 100644 index 0000000..ff18945 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_row.py @@ -0,0 +1,915 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class Test_SetDeleteRow(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row import _SetDeleteRow + return _SetDeleteRow + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test__get_mutations_virtual(self): + row = self._makeOne(b'row-key', None) + with self.assertRaises(NotImplementedError): + row._get_mutations(None) + + +class TestDirectRow(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row import DirectRow + return DirectRow + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + row_key = b'row_key' + table = object() + + row = self._makeOne(row_key, table) + self.assertEqual(row._row_key, row_key) + self.assertTrue(row._table is table) + self.assertEqual(row._pb_mutations, []) + + def test_constructor_with_unicode(self): + row_key = u'row_key' + row_key_bytes = b'row_key' + table = object() + + row = self._makeOne(row_key, table) + self.assertEqual(row._row_key, row_key_bytes) + self.assertTrue(row._table is table) + + def test_constructor_with_non_bytes(self): + row_key = object() + with self.assertRaises(TypeError): + self._makeOne(row_key, None) + + def test__get_mutations(self): + row_key = b'row_key' + row = self._makeOne(row_key, None) + + row._pb_mutations = mutations = object() + self.assertTrue(mutations is row._get_mutations(None)) + + def _set_cell_helper(self, column=None, column_bytes=None, + value=b'foobar', timestamp=None, + timestamp_micros=-1): + import six + import struct + row_key = b'row_key' + column_family_id = u'column_family_id' + if column is None: + column = b'column' + table = object() + row = self._makeOne(row_key, table) + self.assertEqual(row._pb_mutations, []) + row.set_cell(column_family_id, column, + value, timestamp=timestamp) + + if isinstance(value, six.integer_types): + value = struct.pack('>q', value) + expected_pb = _MutationPB( + set_cell=_MutationSetCellPB( + family_name=column_family_id, + column_qualifier=column_bytes or column, + timestamp_micros=timestamp_micros, + value=value, + ), + ) + self.assertEqual(row._pb_mutations, [expected_pb]) + + def test_set_cell(self): + self._set_cell_helper() + + def test_set_cell_with_string_column(self): + column_bytes = b'column' + column_non_bytes = u'column' + self._set_cell_helper(column=column_non_bytes, + column_bytes=column_bytes) + + def test_set_cell_with_integer_value(self): + value = 1337 + self._set_cell_helper(value=value) + + def test_set_cell_with_non_bytes_value(self): + row_key = b'row_key' + column = b'column' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + value = object() # Not bytes + with self.assertRaises(TypeError): + row.set_cell(column_family_id, column, value) + + def test_set_cell_with_non_null_timestamp(self): + import datetime + from gcloud._helpers import _EPOCH + + microseconds = 898294371 + millis_granularity = microseconds - (microseconds % 1000) + timestamp = _EPOCH + datetime.timedelta(microseconds=microseconds) + self._set_cell_helper(timestamp=timestamp, + timestamp_micros=millis_granularity) + + def test_delete(self): + row_key = b'row_key' + row = self._makeOne(row_key, object()) + self.assertEqual(row._pb_mutations, []) + row.delete() + + expected_pb = _MutationPB( + delete_from_row=_MutationDeleteFromRowPB(), + ) + self.assertEqual(row._pb_mutations, [expected_pb]) + + def test_delete_cell(self): + klass = self._getTargetClass() + + class MockRow(klass): + + def __init__(self, *args, **kwargs): + super(MockRow, self).__init__(*args, **kwargs) + self._args = [] + self._kwargs = [] + + # Replace the called method with one that logs arguments. + def _delete_cells(self, *args, **kwargs): + self._args.append(args) + self._kwargs.append(kwargs) + + row_key = b'row_key' + column = b'column' + column_family_id = u'column_family_id' + table = object() + + mock_row = MockRow(row_key, table) + # Make sure no values are set before calling the method. + self.assertEqual(mock_row._pb_mutations, []) + self.assertEqual(mock_row._args, []) + self.assertEqual(mock_row._kwargs, []) + + # Actually make the request against the mock class. + time_range = object() + mock_row.delete_cell(column_family_id, column, time_range=time_range) + self.assertEqual(mock_row._pb_mutations, []) + self.assertEqual(mock_row._args, [(column_family_id, [column])]) + self.assertEqual(mock_row._kwargs, [{ + 'state': None, + 'time_range': time_range, + }]) + + def test_delete_cells_non_iterable(self): + row_key = b'row_key' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + columns = object() # Not iterable + with self.assertRaises(TypeError): + row.delete_cells(column_family_id, columns) + + def test_delete_cells_all_columns(self): + row_key = b'row_key' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + klass = self._getTargetClass() + self.assertEqual(row._pb_mutations, []) + row.delete_cells(column_family_id, klass.ALL_COLUMNS) + + expected_pb = _MutationPB( + delete_from_family=_MutationDeleteFromFamilyPB( + family_name=column_family_id, + ), + ) + self.assertEqual(row._pb_mutations, [expected_pb]) + + def test_delete_cells_no_columns(self): + row_key = b'row_key' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + columns = [] + self.assertEqual(row._pb_mutations, []) + row.delete_cells(column_family_id, columns) + self.assertEqual(row._pb_mutations, []) + + def _delete_cells_helper(self, time_range=None): + row_key = b'row_key' + column = b'column' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + columns = [column] + self.assertEqual(row._pb_mutations, []) + row.delete_cells(column_family_id, columns, time_range=time_range) + + expected_pb = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, + column_qualifier=column, + ), + ) + if time_range is not None: + expected_pb.delete_from_column.time_range.CopyFrom( + time_range.to_pb()) + self.assertEqual(row._pb_mutations, [expected_pb]) + + def test_delete_cells_no_time_range(self): + self._delete_cells_helper() + + def test_delete_cells_with_time_range(self): + import datetime + from gcloud._helpers import _EPOCH + from gcloud.bigtable.row_filters import TimestampRange + + microseconds = 30871000 # Makes sure already milliseconds granularity + start = _EPOCH + datetime.timedelta(microseconds=microseconds) + time_range = TimestampRange(start=start) + self._delete_cells_helper(time_range=time_range) + + def test_delete_cells_with_bad_column(self): + # This makes sure a failure on one of the columns doesn't leave + # the row's mutations in a bad state. + row_key = b'row_key' + column = b'column' + column_family_id = u'column_family_id' + table = object() + + row = self._makeOne(row_key, table) + columns = [column, object()] + self.assertEqual(row._pb_mutations, []) + with self.assertRaises(TypeError): + row.delete_cells(column_family_id, columns) + self.assertEqual(row._pb_mutations, []) + + def test_delete_cells_with_string_columns(self): + row_key = b'row_key' + column_family_id = u'column_family_id' + column1 = u'column1' + column1_bytes = b'column1' + column2 = u'column2' + column2_bytes = b'column2' + table = object() + + row = self._makeOne(row_key, table) + columns = [column1, column2] + self.assertEqual(row._pb_mutations, []) + row.delete_cells(column_family_id, columns) + + expected_pb1 = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, + column_qualifier=column1_bytes, + ), + ) + expected_pb2 = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, + column_qualifier=column2_bytes, + ), + ) + self.assertEqual(row._pb_mutations, [expected_pb1, expected_pb2]) + + def test_commit(self): + from google.protobuf import empty_pb2 + from gcloud.bigtable._testing import _FakeStub + + row_key = b'row_key' + table_name = 'projects/more-stuff' + column_family_id = u'column_family_id' + column = b'column' + timeout_seconds = 711 + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + row = self._makeOne(row_key, table) + + # Create request_pb + value = b'bytes-value' + mutation = _MutationPB( + set_cell=_MutationSetCellPB( + family_name=column_family_id, + column_qualifier=column, + timestamp_micros=-1, # Default value. + value=value, + ), + ) + request_pb = _MutateRowRequestPB( + table_name=table_name, + row_key=row_key, + mutations=[mutation], + ) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # commit() has no return value when no filter. + + # Perform the method and check the result. + row.set_cell(column_family_id, column, value) + result = row.commit() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'MutateRow', + (request_pb, timeout_seconds), + {}, + )]) + self.assertEqual(row._pb_mutations, []) + + def test_commit_too_many_mutations(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import row as MUT + + row_key = b'row_key' + table = object() + row = self._makeOne(row_key, table) + row._pb_mutations = [1, 2, 3] + num_mutations = len(row._pb_mutations) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with self.assertRaises(ValueError): + row.commit() + + def test_commit_no_mutations(self): + from gcloud.bigtable._testing import _FakeStub + + row_key = b'row_key' + client = _Client() + table = _Table(None, client=client) + row = self._makeOne(row_key, table) + self.assertEqual(row._pb_mutations, []) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub() + + # Perform the method and check the result. + result = row.commit() + self.assertEqual(result, None) + # Make sure no request was sent. + self.assertEqual(stub.method_calls, []) + + +class TestConditionalRow(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row import ConditionalRow + return ConditionalRow + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + row_key = b'row_key' + table = object() + filter_ = object() + + row = self._makeOne(row_key, table, filter_=filter_) + self.assertEqual(row._row_key, row_key) + self.assertTrue(row._table is table) + self.assertTrue(row._filter is filter_) + self.assertEqual(row._true_pb_mutations, []) + self.assertEqual(row._false_pb_mutations, []) + + def test__get_mutations(self): + row_key = b'row_key' + filter_ = object() + row = self._makeOne(row_key, None, filter_=filter_) + + row._true_pb_mutations = true_mutations = object() + row._false_pb_mutations = false_mutations = object() + self.assertTrue(true_mutations is row._get_mutations(True)) + self.assertTrue(false_mutations is row._get_mutations(False)) + self.assertTrue(false_mutations is row._get_mutations(None)) + + def test_commit(self): + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable.row_filters import RowSampleFilter + + row_key = b'row_key' + table_name = 'projects/more-stuff' + column_family_id1 = u'column_family_id1' + column_family_id2 = u'column_family_id2' + column_family_id3 = u'column_family_id3' + column1 = b'column1' + column2 = b'column2' + timeout_seconds = 262 + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + row_filter = RowSampleFilter(0.33) + row = self._makeOne(row_key, table, filter_=row_filter) + + # Create request_pb + value1 = b'bytes-value' + mutation1 = _MutationPB( + set_cell=_MutationSetCellPB( + family_name=column_family_id1, + column_qualifier=column1, + timestamp_micros=-1, # Default value. + value=value1, + ), + ) + mutation2 = _MutationPB( + delete_from_row=_MutationDeleteFromRowPB(), + ) + mutation3 = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id2, + column_qualifier=column2, + ), + ) + mutation4 = _MutationPB( + delete_from_family=_MutationDeleteFromFamilyPB( + family_name=column_family_id3, + ), + ) + request_pb = _CheckAndMutateRowRequestPB( + table_name=table_name, + row_key=row_key, + predicate_filter=row_filter.to_pb(), + true_mutations=[mutation1, mutation3, mutation4], + false_mutations=[mutation2], + ) + + # Create response_pb + predicate_matched = True + response_pb = _CheckAndMutateRowResponsePB( + predicate_matched=predicate_matched) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = predicate_matched + + # Perform the method and check the result. + row.set_cell(column_family_id1, column1, value1, state=True) + row.delete(state=False) + row.delete_cell(column_family_id2, column2, state=True) + row.delete_cells(column_family_id3, row.ALL_COLUMNS, state=True) + result = row.commit() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'CheckAndMutateRow', + (request_pb, timeout_seconds), + {}, + )]) + self.assertEqual(row._true_pb_mutations, []) + self.assertEqual(row._false_pb_mutations, []) + + def test_commit_too_many_mutations(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import row as MUT + + row_key = b'row_key' + table = object() + filter_ = object() + row = self._makeOne(row_key, table, filter_=filter_) + row._true_pb_mutations = [1, 2, 3] + num_mutations = len(row._true_pb_mutations) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with self.assertRaises(ValueError): + row.commit() + + def test_commit_no_mutations(self): + from gcloud.bigtable._testing import _FakeStub + + row_key = b'row_key' + client = _Client() + table = _Table(None, client=client) + filter_ = object() + row = self._makeOne(row_key, table, filter_=filter_) + self.assertEqual(row._true_pb_mutations, []) + self.assertEqual(row._false_pb_mutations, []) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub() + + # Perform the method and check the result. + result = row.commit() + self.assertEqual(result, None) + # Make sure no request was sent. + self.assertEqual(stub.method_calls, []) + + +class TestAppendRow(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row import AppendRow + return AppendRow + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + row_key = b'row_key' + table = object() + + row = self._makeOne(row_key, table) + self.assertEqual(row._row_key, row_key) + self.assertTrue(row._table is table) + self.assertEqual(row._rule_pb_list, []) + + def test_clear(self): + row_key = b'row_key' + table = object() + row = self._makeOne(row_key, table) + row._rule_pb_list = [1, 2, 3] + row.clear() + self.assertEqual(row._rule_pb_list, []) + + def test_append_cell_value(self): + table = object() + row_key = b'row_key' + row = self._makeOne(row_key, table) + self.assertEqual(row._rule_pb_list, []) + + column = b'column' + column_family_id = u'column_family_id' + value = b'bytes-val' + row.append_cell_value(column_family_id, column, value) + expected_pb = _ReadModifyWriteRulePB( + family_name=column_family_id, column_qualifier=column, + append_value=value) + self.assertEqual(row._rule_pb_list, [expected_pb]) + + def test_increment_cell_value(self): + table = object() + row_key = b'row_key' + row = self._makeOne(row_key, table) + self.assertEqual(row._rule_pb_list, []) + + column = b'column' + column_family_id = u'column_family_id' + int_value = 281330 + row.increment_cell_value(column_family_id, column, int_value) + expected_pb = _ReadModifyWriteRulePB( + family_name=column_family_id, column_qualifier=column, + increment_amount=int_value) + self.assertEqual(row._rule_pb_list, [expected_pb]) + + def test_commit(self): + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import row as MUT + + row_key = b'row_key' + table_name = 'projects/more-stuff' + column_family_id = u'column_family_id' + column = b'column' + timeout_seconds = 87 + client = _Client(timeout_seconds=timeout_seconds) + table = _Table(table_name, client=client) + row = self._makeOne(row_key, table) + + # Create request_pb + value = b'bytes-value' + # We will call row.append_cell_value(COLUMN_FAMILY_ID, COLUMN, value). + request_pb = _ReadModifyWriteRowRequestPB( + table_name=table_name, + row_key=row_key, + rules=[ + _ReadModifyWriteRulePB( + family_name=column_family_id, + column_qualifier=column, + append_value=value, + ), + ], + ) + + # Create response_pb + response_pb = object() + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + row_responses = [] + expected_result = object() + + def mock_parse_rmw_row_response(row_response): + row_responses.append(row_response) + return expected_result + + # Perform the method and check the result. + with _Monkey(MUT, _parse_rmw_row_response=mock_parse_rmw_row_response): + row.append_cell_value(column_family_id, column, value) + result = row.commit() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ReadModifyWriteRow', + (request_pb, timeout_seconds), + {}, + )]) + self.assertEqual(row_responses, [response_pb]) + self.assertEqual(row._rule_pb_list, []) + + def test_commit_no_rules(self): + from gcloud.bigtable._testing import _FakeStub + + row_key = b'row_key' + client = _Client() + table = _Table(None, client=client) + row = self._makeOne(row_key, table) + self.assertEqual(row._rule_pb_list, []) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub() + + # Perform the method and check the result. + result = row.commit() + self.assertEqual(result, {}) + # Make sure no request was sent. + self.assertEqual(stub.method_calls, []) + + def test_commit_too_many_mutations(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import row as MUT + + row_key = b'row_key' + table = object() + row = self._makeOne(row_key, table) + row._rule_pb_list = [1, 2, 3] + num_mutations = len(row._rule_pb_list) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with self.assertRaises(ValueError): + row.commit() + + +class Test__parse_rmw_row_response(unittest2.TestCase): + + def _callFUT(self, row_response): + from gcloud.bigtable.row import _parse_rmw_row_response + return _parse_rmw_row_response(row_response) + + def test_it(self): + from gcloud._helpers import _datetime_from_microseconds + col_fam1 = u'col-fam-id' + col_fam2 = u'col-fam-id2' + col_name1 = b'col-name1' + col_name2 = b'col-name2' + col_name3 = b'col-name3-but-other-fam' + cell_val1 = b'cell-val' + cell_val2 = b'cell-val-newer' + cell_val3 = b'altcol-cell-val' + cell_val4 = b'foo' + + microseconds = 1000871 + timestamp = _datetime_from_microseconds(microseconds) + expected_output = { + col_fam1: { + col_name1: [ + (cell_val1, timestamp), + (cell_val2, timestamp), + ], + col_name2: [ + (cell_val3, timestamp), + ], + }, + col_fam2: { + col_name3: [ + (cell_val4, timestamp), + ], + }, + } + response_row = _RowPB( + families=[ + _FamilyPB( + name=col_fam1, + columns=[ + _ColumnPB( + qualifier=col_name1, + cells=[ + _CellPB( + value=cell_val1, + timestamp_micros=microseconds, + ), + _CellPB( + value=cell_val2, + timestamp_micros=microseconds, + ), + ], + ), + _ColumnPB( + qualifier=col_name2, + cells=[ + _CellPB( + value=cell_val3, + timestamp_micros=microseconds, + ), + ], + ), + ], + ), + _FamilyPB( + name=col_fam2, + columns=[ + _ColumnPB( + qualifier=col_name3, + cells=[ + _CellPB( + value=cell_val4, + timestamp_micros=microseconds, + ), + ], + ), + ], + ), + ], + ) + sample_input = _ReadModifyWriteRowResponsePB(row=response_row) + self.assertEqual(expected_output, self._callFUT(sample_input)) + + +class Test__parse_family_pb(unittest2.TestCase): + + def _callFUT(self, family_pb): + from gcloud.bigtable.row import _parse_family_pb + return _parse_family_pb(family_pb) + + def test_it(self): + from gcloud._helpers import _datetime_from_microseconds + col_fam1 = u'col-fam-id' + col_name1 = b'col-name1' + col_name2 = b'col-name2' + cell_val1 = b'cell-val' + cell_val2 = b'cell-val-newer' + cell_val3 = b'altcol-cell-val' + + microseconds = 5554441037 + timestamp = _datetime_from_microseconds(microseconds) + expected_dict = { + col_name1: [ + (cell_val1, timestamp), + (cell_val2, timestamp), + ], + col_name2: [ + (cell_val3, timestamp), + ], + } + expected_output = (col_fam1, expected_dict) + sample_input = _FamilyPB( + name=col_fam1, + columns=[ + _ColumnPB( + qualifier=col_name1, + cells=[ + _CellPB( + value=cell_val1, + timestamp_micros=microseconds, + ), + _CellPB( + value=cell_val2, + timestamp_micros=microseconds, + ), + ], + ), + _ColumnPB( + qualifier=col_name2, + cells=[ + _CellPB( + value=cell_val3, + timestamp_micros=microseconds, + ), + ], + ), + ], + ) + self.assertEqual(expected_output, self._callFUT(sample_input)) + + +def _CheckAndMutateRowRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.CheckAndMutateRowRequest(*args, **kw) + + +def _CheckAndMutateRowResponsePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.CheckAndMutateRowResponse(*args, **kw) + + +def _MutateRowRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.MutateRowRequest(*args, **kw) + + +def _ReadModifyWriteRowRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.ReadModifyWriteRowRequest(*args, **kw) + + +def _ReadModifyWriteRowResponsePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.ReadModifyWriteRowResponse(*args, **kw) + + +def _CellPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Cell(*args, **kw) + + +def _ColumnPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Column(*args, **kw) + + +def _FamilyPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Family(*args, **kw) + + +def _MutationPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Mutation(*args, **kw) + + +def _MutationSetCellPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Mutation.SetCell(*args, **kw) + + +def _MutationDeleteFromColumnPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Mutation.DeleteFromColumn(*args, **kw) + + +def _MutationDeleteFromFamilyPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Mutation.DeleteFromFamily(*args, **kw) + + +def _MutationDeleteFromRowPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Mutation.DeleteFromRow(*args, **kw) + + +def _RowPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.Row(*args, **kw) + + +def _ReadModifyWriteRulePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.ReadModifyWriteRule(*args, **kw) + + +class _Client(object): + + data_stub = None + + def __init__(self, timeout_seconds=None): + self.timeout_seconds = timeout_seconds + + +class _Instance(object): + + def __init__(self, client=None): + self._client = client + + +class _Table(object): + + def __init__(self, name, client=None): + self.name = name + self._instance = _Instance(client) diff --git a/env/Lib/site-packages/gcloud/bigtable/test_row_data.py b/env/Lib/site-packages/gcloud/bigtable/test_row_data.py new file mode 100644 index 0000000..2162212 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_row_data.py @@ -0,0 +1,727 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class TestCell(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_data import Cell + return Cell + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _from_pb_test_helper(self, labels=None): + import datetime + from gcloud._helpers import _EPOCH + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + + timestamp_micros = 18738724000 # Make sure millis granularity + timestamp = _EPOCH + datetime.timedelta(microseconds=timestamp_micros) + value = b'value-bytes' + + if labels is None: + cell_pb = data_v2_pb2.Cell( + value=value, timestamp_micros=timestamp_micros) + cell_expected = self._makeOne(value, timestamp) + else: + cell_pb = data_v2_pb2.Cell( + value=value, timestamp_micros=timestamp_micros, labels=labels) + cell_expected = self._makeOne(value, timestamp, labels=labels) + + klass = self._getTargetClass() + result = klass.from_pb(cell_pb) + self.assertEqual(result, cell_expected) + + def test_from_pb(self): + self._from_pb_test_helper() + + def test_from_pb_with_labels(self): + labels = [u'label1', u'label2'] + self._from_pb_test_helper(labels) + + def test_constructor(self): + value = object() + timestamp = object() + cell = self._makeOne(value, timestamp) + self.assertEqual(cell.value, value) + self.assertEqual(cell.timestamp, timestamp) + + def test___eq__(self): + value = object() + timestamp = object() + cell1 = self._makeOne(value, timestamp) + cell2 = self._makeOne(value, timestamp) + self.assertEqual(cell1, cell2) + + def test___eq__type_differ(self): + cell1 = self._makeOne(None, None) + cell2 = object() + self.assertNotEqual(cell1, cell2) + + def test___ne__same_value(self): + value = object() + timestamp = object() + cell1 = self._makeOne(value, timestamp) + cell2 = self._makeOne(value, timestamp) + comparison_val = (cell1 != cell2) + self.assertFalse(comparison_val) + + def test___ne__(self): + value1 = 'value1' + value2 = 'value2' + timestamp = object() + cell1 = self._makeOne(value1, timestamp) + cell2 = self._makeOne(value2, timestamp) + self.assertNotEqual(cell1, cell2) + + +class TestPartialRowData(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_data import PartialRowData + return PartialRowData + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + row_key = object() + partial_row_data = self._makeOne(row_key) + self.assertTrue(partial_row_data._row_key is row_key) + self.assertEqual(partial_row_data._cells, {}) + + def test___eq__(self): + row_key = object() + partial_row_data1 = self._makeOne(row_key) + partial_row_data2 = self._makeOne(row_key) + self.assertEqual(partial_row_data1, partial_row_data2) + + def test___eq__type_differ(self): + partial_row_data1 = self._makeOne(None) + partial_row_data2 = object() + self.assertNotEqual(partial_row_data1, partial_row_data2) + + def test___ne__same_value(self): + row_key = object() + partial_row_data1 = self._makeOne(row_key) + partial_row_data2 = self._makeOne(row_key) + comparison_val = (partial_row_data1 != partial_row_data2) + self.assertFalse(comparison_val) + + def test___ne__(self): + row_key1 = object() + partial_row_data1 = self._makeOne(row_key1) + row_key2 = object() + partial_row_data2 = self._makeOne(row_key2) + self.assertNotEqual(partial_row_data1, partial_row_data2) + + def test___ne__cells(self): + row_key = object() + partial_row_data1 = self._makeOne(row_key) + partial_row_data1._cells = object() + partial_row_data2 = self._makeOne(row_key) + self.assertNotEqual(partial_row_data1, partial_row_data2) + + def test_to_dict(self): + cell1 = object() + cell2 = object() + cell3 = object() + + family_name1 = u'name1' + family_name2 = u'name2' + qual1 = b'col1' + qual2 = b'col2' + qual3 = b'col3' + + partial_row_data = self._makeOne(None) + partial_row_data._cells = { + family_name1: { + qual1: cell1, + qual2: cell2, + }, + family_name2: { + qual3: cell3, + }, + } + + result = partial_row_data.to_dict() + expected_result = { + b'name1:col1': cell1, + b'name1:col2': cell2, + b'name2:col3': cell3, + } + self.assertEqual(result, expected_result) + + def test_cells_property(self): + partial_row_data = self._makeOne(None) + cells = {1: 2} + partial_row_data._cells = cells + # Make sure we get a copy, not the original. + self.assertFalse(partial_row_data.cells is cells) + self.assertEqual(partial_row_data.cells, cells) + + def test_row_key_getter(self): + row_key = object() + partial_row_data = self._makeOne(row_key) + self.assertTrue(partial_row_data.row_key is row_key) + + +class TestPartialRowsData(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_data import PartialRowsData + return PartialRowsData + + def _getDoNothingClass(self): + klass = self._getTargetClass() + + class FakePartialRowsData(klass): + + def __init__(self, *args, **kwargs): + super(FakePartialRowsData, self).__init__(*args, **kwargs) + self._consumed = [] + + def consume_next(self): + value = self._response_iterator.next() + self._consumed.append(value) + return value + + return FakePartialRowsData + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + response_iterator = object() + partial_rows_data = self._makeOne(response_iterator) + self.assertTrue(partial_rows_data._response_iterator + is response_iterator) + self.assertEqual(partial_rows_data._rows, {}) + + def test___eq__(self): + response_iterator = object() + partial_rows_data1 = self._makeOne(response_iterator) + partial_rows_data2 = self._makeOne(response_iterator) + self.assertEqual(partial_rows_data1, partial_rows_data2) + + def test___eq__type_differ(self): + partial_rows_data1 = self._makeOne(None) + partial_rows_data2 = object() + self.assertNotEqual(partial_rows_data1, partial_rows_data2) + + def test___ne__same_value(self): + response_iterator = object() + partial_rows_data1 = self._makeOne(response_iterator) + partial_rows_data2 = self._makeOne(response_iterator) + comparison_val = (partial_rows_data1 != partial_rows_data2) + self.assertFalse(comparison_val) + + def test___ne__(self): + response_iterator1 = object() + partial_rows_data1 = self._makeOne(response_iterator1) + response_iterator2 = object() + partial_rows_data2 = self._makeOne(response_iterator2) + self.assertNotEqual(partial_rows_data1, partial_rows_data2) + + def test_state_start(self): + prd = self._makeOne([]) + self.assertEqual(prd.state, prd.START) + + def test_state_new_row_w_row(self): + prd = self._makeOne([]) + prd._last_scanned_row_key = '' + prd._row = object() + self.assertEqual(prd.state, prd.NEW_ROW) + + def test_rows_getter(self): + partial_rows_data = self._makeOne(None) + partial_rows_data._rows = value = object() + self.assertTrue(partial_rows_data.rows is value) + + def test_cancel(self): + response_iterator = _MockCancellableIterator() + partial_rows_data = self._makeOne(response_iterator) + self.assertEqual(response_iterator.cancel_calls, 0) + partial_rows_data.cancel() + self.assertEqual(response_iterator.cancel_calls, 1) + + # 'consume_nest' tested via 'TestPartialRowsData_JSON_acceptance_tests' + + def test_consume_all(self): + klass = self._getDoNothingClass() + + value1, value2, value3 = object(), object(), object() + response_iterator = _MockCancellableIterator(value1, value2, value3) + partial_rows_data = klass(response_iterator) + self.assertEqual(partial_rows_data._consumed, []) + partial_rows_data.consume_all() + self.assertEqual( + partial_rows_data._consumed, [value1, value2, value3]) + + def test_consume_all_with_max_loops(self): + klass = self._getDoNothingClass() + + value1, value2, value3 = object(), object(), object() + response_iterator = _MockCancellableIterator(value1, value2, value3) + partial_rows_data = klass(response_iterator) + self.assertEqual(partial_rows_data._consumed, []) + partial_rows_data.consume_all(max_loops=1) + self.assertEqual(partial_rows_data._consumed, [value1]) + # Make sure the iterator still has the remaining values. + self.assertEqual( + list(response_iterator.iter_values), [value2, value3]) + + def test__copy_from_current_unset(self): + prd = self._makeOne([]) + chunks = _generate_cell_chunks(['']) + chunk = chunks[0] + prd._copy_from_current(chunk) + self.assertEqual(chunk.row_key, b'') + self.assertEqual(chunk.family_name.value, u'') + self.assertEqual(chunk.qualifier.value, b'') + self.assertEqual(chunk.timestamp_micros, 0) + self.assertEqual(chunk.labels, []) + + def test__copy_from_current_blank(self): + ROW_KEY = b'RK' + FAMILY_NAME = u'A' + QUALIFIER = b'C' + TIMESTAMP_MICROS = 100 + LABELS = ['L1', 'L2'] + prd = self._makeOne([]) + prd._cell = _PartialCellData() + chunks = _generate_cell_chunks(['']) + chunk = chunks[0] + chunk.row_key = ROW_KEY + chunk.family_name.value = FAMILY_NAME + chunk.qualifier.value = QUALIFIER + chunk.timestamp_micros = TIMESTAMP_MICROS + chunk.labels.extend(LABELS) + prd._copy_from_current(chunk) + self.assertEqual(chunk.row_key, ROW_KEY) + self.assertEqual(chunk.family_name.value, FAMILY_NAME) + self.assertEqual(chunk.qualifier.value, QUALIFIER) + self.assertEqual(chunk.timestamp_micros, TIMESTAMP_MICROS) + self.assertEqual(chunk.labels, LABELS) + + def test__copy_from_previous_unset(self): + prd = self._makeOne([]) + cell = _PartialCellData() + prd._copy_from_previous(cell) + self.assertEqual(cell.row_key, '') + self.assertEqual(cell.family_name, u'') + self.assertEqual(cell.qualifier, b'') + self.assertEqual(cell.timestamp_micros, 0) + self.assertEqual(cell.labels, []) + + def test__copy_from_previous_blank(self): + ROW_KEY = 'RK' + FAMILY_NAME = u'A' + QUALIFIER = b'C' + TIMESTAMP_MICROS = 100 + LABELS = ['L1', 'L2'] + prd = self._makeOne([]) + cell = _PartialCellData( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + labels=LABELS, + ) + prd._previous_cell = _PartialCellData() + prd._copy_from_previous(cell) + self.assertEqual(cell.row_key, ROW_KEY) + self.assertEqual(cell.family_name, FAMILY_NAME) + self.assertEqual(cell.qualifier, QUALIFIER) + self.assertEqual(cell.timestamp_micros, TIMESTAMP_MICROS) + self.assertEqual(cell.labels, LABELS) + + def test__copy_from_previous_filled(self): + ROW_KEY = 'RK' + FAMILY_NAME = u'A' + QUALIFIER = b'C' + TIMESTAMP_MICROS = 100 + LABELS = ['L1', 'L2'] + prd = self._makeOne([]) + prd._previous_cell = _PartialCellData( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + labels=LABELS, + ) + cell = _PartialCellData() + prd._copy_from_previous(cell) + self.assertEqual(cell.row_key, ROW_KEY) + self.assertEqual(cell.family_name, FAMILY_NAME) + self.assertEqual(cell.qualifier, QUALIFIER) + self.assertEqual(cell.timestamp_micros, 0) + self.assertEqual(cell.labels, []) + + def test__save_row_no_cell(self): + ROW_KEY = 'RK' + prd = self._makeOne([]) + row = prd._row = _Dummy(row_key=ROW_KEY) + prd._cell = None + prd._save_current_row() + self.assertTrue(prd._rows[ROW_KEY] is row) + + def test_invalid_last_scanned_row_key_on_start(self): + from gcloud.bigtable.row_data import InvalidReadRowsResponse + response = _ReadRowsResponseV2(chunks=(), last_scanned_row_key='ABC') + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + with self.assertRaises(InvalidReadRowsResponse): + prd.consume_next() + + def test_valid_last_scanned_row_key_on_start(self): + response = _ReadRowsResponseV2( + chunks=(), last_scanned_row_key='AFTER') + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + prd._last_scanned_row_key = 'BEFORE' + prd.consume_next() + self.assertEqual(prd._last_scanned_row_key, 'AFTER') + + def test_invalid_empty_chunk(self): + from gcloud.bigtable.row_data import InvalidChunk + chunks = _generate_cell_chunks(['']) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + with self.assertRaises(InvalidChunk): + prd.consume_next() + + def test_invalid_empty_second_chunk(self): + from gcloud.bigtable.row_data import InvalidChunk + chunks = _generate_cell_chunks(['', '']) + first = chunks[0] + first.row_key = b'RK' + first.family_name.value = 'A' + first.qualifier.value = b'C' + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + with self.assertRaises(InvalidChunk): + prd.consume_next() + + +class TestPartialRowsData_JSON_acceptance_tests(unittest2.TestCase): + + _json_tests = None + + def _getTargetClass(self): + from gcloud.bigtable.row_data import PartialRowsData + return PartialRowsData + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def _load_json_test(self, test_name): + import os + if self.__class__._json_tests is None: + dirname = os.path.dirname(__file__) + filename = os.path.join(dirname, 'read-rows-acceptance-test.json') + raw = _parse_readrows_acceptance_tests(filename) + tests = self.__class__._json_tests = {} + for (name, chunks, results) in raw: + tests[name] = chunks, results + return self.__class__._json_tests[test_name] + + # JSON Error cases: invalid chunks + + def _fail_during_consume(self, testcase_name): + from gcloud.bigtable.row_data import InvalidChunk + chunks, results = self._load_json_test(testcase_name) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + with self.assertRaises(InvalidChunk): + prd.consume_next() + expected_result = self._sort_flattend_cells( + [result for result in results if not result['error']]) + flattened = self._sort_flattend_cells(_flatten_cells(prd)) + self.assertEqual(flattened, expected_result) + + def test_invalid_no_cell_key_before_commit(self): + self._fail_during_consume('invalid - no cell key before commit') + + def test_invalid_no_cell_key_before_value(self): + self._fail_during_consume('invalid - no cell key before value') + + def test_invalid_new_col_family_wo_qualifier(self): + self._fail_during_consume( + 'invalid - new col family must specify qualifier') + + def test_invalid_no_commit_between_rows(self): + self._fail_during_consume('invalid - no commit between rows') + + def test_invalid_no_commit_after_first_row(self): + self._fail_during_consume('invalid - no commit after first row') + + def test_invalid_duplicate_row_key(self): + self._fail_during_consume('invalid - duplicate row key') + + def test_invalid_new_row_missing_row_key(self): + self._fail_during_consume('invalid - new row missing row key') + + def test_invalid_bare_reset(self): + self._fail_during_consume('invalid - bare reset') + + def test_invalid_bad_reset_no_commit(self): + self._fail_during_consume('invalid - bad reset, no commit') + + def test_invalid_missing_key_after_reset(self): + self._fail_during_consume('invalid - missing key after reset') + + def test_invalid_reset_with_chunk(self): + self._fail_during_consume('invalid - reset with chunk') + + def test_invalid_commit_with_chunk(self): + self._fail_during_consume('invalid - commit with chunk') + + # JSON Error cases: incomplete final row + + def _sort_flattend_cells(self, flattened): + import operator + key_func = operator.itemgetter('rk', 'fm', 'qual') + return sorted(flattened, key=key_func) + + def _incomplete_final_row(self, testcase_name): + chunks, results = self._load_json_test(testcase_name) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + prd.consume_next() + self.assertEqual(prd.state, prd.ROW_IN_PROGRESS) + expected_result = self._sort_flattend_cells( + [result for result in results if not result['error']]) + flattened = self._sort_flattend_cells(_flatten_cells(prd)) + self.assertEqual(flattened, expected_result) + + def test_invalid_no_commit(self): + self._incomplete_final_row('invalid - no commit') + + def test_invalid_last_row_missing_commit(self): + self._incomplete_final_row('invalid - last row missing commit') + + # Non-error cases + + _marker = object() + + def _match_results(self, testcase_name, expected_result=_marker): + chunks, results = self._load_json_test(testcase_name) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + prd.consume_next() + flattened = self._sort_flattend_cells(_flatten_cells(prd)) + if expected_result is self._marker: + expected_result = self._sort_flattend_cells(results) + self.assertEqual(flattened, expected_result) + + def test_bare_commit_implies_ts_zero(self): + self._match_results('bare commit implies ts=0') + + def test_simple_row_with_timestamp(self): + self._match_results('simple row with timestamp') + + def test_missing_timestamp_implies_ts_zero(self): + self._match_results('missing timestamp, implied ts=0') + + def test_empty_cell_value(self): + self._match_results('empty cell value') + + def test_two_unsplit_cells(self): + self._match_results('two unsplit cells') + + def test_two_qualifiers(self): + self._match_results('two qualifiers') + + def test_two_families(self): + self._match_results('two families') + + def test_with_labels(self): + self._match_results('with labels') + + def test_split_cell_bare_commit(self): + self._match_results('split cell, bare commit') + + def test_split_cell(self): + self._match_results('split cell') + + def test_split_four_ways(self): + self._match_results('split four ways') + + def test_two_split_cells(self): + self._match_results('two split cells') + + def test_multi_qualifier_splits(self): + self._match_results('multi-qualifier splits') + + def test_multi_qualifier_multi_split(self): + self._match_results('multi-qualifier multi-split') + + def test_multi_family_split(self): + self._match_results('multi-family split') + + def test_two_rows(self): + self._match_results('two rows') + + def test_two_rows_implicit_timestamp(self): + self._match_results('two rows implicit timestamp') + + def test_two_rows_empty_value(self): + self._match_results('two rows empty value') + + def test_two_rows_one_with_multiple_cells(self): + self._match_results('two rows, one with multiple cells') + + def test_two_rows_multiple_cells_multiple_families(self): + self._match_results('two rows, multiple cells, multiple families') + + def test_two_rows_multiple_cells(self): + self._match_results('two rows, multiple cells') + + def test_two_rows_four_cells_two_labels(self): + self._match_results('two rows, four cells, 2 labels') + + def test_two_rows_with_splits_same_timestamp(self): + self._match_results('two rows with splits, same timestamp') + + def test_no_data_after_reset(self): + # JSON testcase has `"results": null` + self._match_results('no data after reset', expected_result=[]) + + def test_simple_reset(self): + self._match_results('simple reset') + + def test_reset_to_new_val(self): + self._match_results('reset to new val') + + def test_reset_to_new_qual(self): + self._match_results('reset to new qual') + + def test_reset_with_splits(self): + self._match_results('reset with splits') + + def test_two_resets(self): + self._match_results('two resets') + + def test_reset_to_new_row(self): + self._match_results('reset to new row') + + def test_reset_in_between_chunks(self): + self._match_results('reset in between chunks') + + def test_empty_cell_chunk(self): + self._match_results('empty cell chunk') + + +def _flatten_cells(prd): + # Match results format from JSON testcases. + # Doesn't handle error cases. + from gcloud._helpers import _bytes_to_unicode + from gcloud._helpers import _microseconds_from_datetime + for row_key, row in prd.rows.items(): + for family_name, family in row.cells.items(): + for qualifier, column in family.items(): + for cell in column: + yield { + u'rk': _bytes_to_unicode(row_key), + u'fm': family_name, + u'qual': _bytes_to_unicode(qualifier), + u'ts': _microseconds_from_datetime(cell.timestamp), + u'value': _bytes_to_unicode(cell.value), + u'label': u' '.join(cell.labels), + u'error': False, + } + + +class _MockCancellableIterator(object): + + cancel_calls = 0 + + def __init__(self, *values): + self.iter_values = iter(values) + + def cancel(self): + self.cancel_calls += 1 + + def next(self): + return next(self.iter_values) + + def __next__(self): # pragma: NO COVER Py3k + return self.next() + + +class _Dummy(object): + + def __init__(self, **kw): + self.__dict__.update(kw) + + +class _PartialCellData(object): + + row_key = '' + family_name = u'' + qualifier = b'' + timestamp_micros = 0 + + def __init__(self, **kw): + self.labels = kw.pop('labels', []) + self.__dict__.update(kw) + + +class _ReadRowsResponseV2(object): + + def __init__(self, chunks, last_scanned_row_key=''): + self.chunks = chunks + self.last_scanned_row_key = last_scanned_row_key + + +def _generate_cell_chunks(chunk_text_pbs): + from google.protobuf.text_format import Merge + from gcloud.bigtable._generated_v2.bigtable_pb2 import ReadRowsResponse + + chunks = [] + + for chunk_text_pb in chunk_text_pbs: + chunk = ReadRowsResponse.CellChunk() + chunks.append(Merge(chunk_text_pb, chunk)) + + return chunks + + +def _parse_readrows_acceptance_tests(filename): + """Parse acceptance tests from JSON + + See: + https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/master/bigtable-client-core/src/test/resources/com/google/cloud/bigtable/grpc/scanner/v2/read-rows-acceptance-test.json + """ + import json + + with open(filename) as json_file: + test_json = json.load(json_file) + + for test in test_json['tests']: + name = test['name'] + chunks = _generate_cell_chunks(test['chunks']) + results = test['results'] + yield name, chunks, results diff --git a/env/Lib/site-packages/gcloud/bigtable/test_row_filters.py b/env/Lib/site-packages/gcloud/bigtable/test_row_filters.py new file mode 100644 index 0000000..594a4fe --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_row_filters.py @@ -0,0 +1,1001 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class Test_BoolFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import _BoolFilter + return _BoolFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + flag = object() + row_filter = self._makeOne(flag) + self.assertTrue(row_filter.flag is flag) + + def test___eq__type_differ(self): + flag = object() + row_filter1 = self._makeOne(flag) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + flag = object() + row_filter1 = self._makeOne(flag) + row_filter2 = self._makeOne(flag) + self.assertEqual(row_filter1, row_filter2) + + def test___ne__same_value(self): + flag = object() + row_filter1 = self._makeOne(flag) + row_filter2 = self._makeOne(flag) + comparison_val = (row_filter1 != row_filter2) + self.assertFalse(comparison_val) + + +class TestSinkFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import SinkFilter + return SinkFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + flag = True + row_filter = self._makeOne(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(sink=flag) + self.assertEqual(pb_val, expected_pb) + + +class TestPassAllFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import PassAllFilter + return PassAllFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + flag = True + row_filter = self._makeOne(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(pass_all_filter=flag) + self.assertEqual(pb_val, expected_pb) + + +class TestBlockAllFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import BlockAllFilter + return BlockAllFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + flag = True + row_filter = self._makeOne(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(block_all_filter=flag) + self.assertEqual(pb_val, expected_pb) + + +class Test_RegexFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import _RegexFilter + return _RegexFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + regex = b'abc' + row_filter = self._makeOne(regex) + self.assertTrue(row_filter.regex is regex) + + def test_constructor_non_bytes(self): + regex = u'abc' + row_filter = self._makeOne(regex) + self.assertEqual(row_filter.regex, b'abc') + + def test___eq__type_differ(self): + regex = b'def-rgx' + row_filter1 = self._makeOne(regex) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + regex = b'trex-regex' + row_filter1 = self._makeOne(regex) + row_filter2 = self._makeOne(regex) + self.assertEqual(row_filter1, row_filter2) + + def test___ne__same_value(self): + regex = b'abc' + row_filter1 = self._makeOne(regex) + row_filter2 = self._makeOne(regex) + comparison_val = (row_filter1 != row_filter2) + self.assertFalse(comparison_val) + + +class TestRowKeyRegexFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import RowKeyRegexFilter + return RowKeyRegexFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + regex = b'row-key-regex' + row_filter = self._makeOne(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(row_key_regex_filter=regex) + self.assertEqual(pb_val, expected_pb) + + +class TestRowSampleFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import RowSampleFilter + return RowSampleFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + sample = object() + row_filter = self._makeOne(sample) + self.assertTrue(row_filter.sample is sample) + + def test___eq__type_differ(self): + sample = object() + row_filter1 = self._makeOne(sample) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + sample = object() + row_filter1 = self._makeOne(sample) + row_filter2 = self._makeOne(sample) + self.assertEqual(row_filter1, row_filter2) + + def test_to_pb(self): + sample = 0.25 + row_filter = self._makeOne(sample) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(row_sample_filter=sample) + self.assertEqual(pb_val, expected_pb) + + +class TestFamilyNameRegexFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import FamilyNameRegexFilter + return FamilyNameRegexFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + regex = u'family-regex' + row_filter = self._makeOne(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(family_name_regex_filter=regex) + self.assertEqual(pb_val, expected_pb) + + +class TestColumnQualifierRegexFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ColumnQualifierRegexFilter + return ColumnQualifierRegexFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + regex = b'column-regex' + row_filter = self._makeOne(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB( + column_qualifier_regex_filter=regex) + self.assertEqual(pb_val, expected_pb) + + +class TestTimestampRange(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import TimestampRange + return TimestampRange + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + start = object() + end = object() + time_range = self._makeOne(start=start, end=end) + self.assertTrue(time_range.start is start) + self.assertTrue(time_range.end is end) + + def test___eq__(self): + start = object() + end = object() + time_range1 = self._makeOne(start=start, end=end) + time_range2 = self._makeOne(start=start, end=end) + self.assertEqual(time_range1, time_range2) + + def test___eq__type_differ(self): + start = object() + end = object() + time_range1 = self._makeOne(start=start, end=end) + time_range2 = object() + self.assertNotEqual(time_range1, time_range2) + + def test___ne__same_value(self): + start = object() + end = object() + time_range1 = self._makeOne(start=start, end=end) + time_range2 = self._makeOne(start=start, end=end) + comparison_val = (time_range1 != time_range2) + self.assertFalse(comparison_val) + + def _to_pb_helper(self, start_micros=None, end_micros=None): + import datetime + from gcloud._helpers import _EPOCH + pb_kwargs = {} + + start = None + if start_micros is not None: + start = _EPOCH + datetime.timedelta(microseconds=start_micros) + pb_kwargs['start_timestamp_micros'] = start_micros + end = None + if end_micros is not None: + end = _EPOCH + datetime.timedelta(microseconds=end_micros) + pb_kwargs['end_timestamp_micros'] = end_micros + time_range = self._makeOne(start=start, end=end) + + expected_pb = _TimestampRangePB(**pb_kwargs) + self.assertEqual(time_range.to_pb(), expected_pb) + + def test_to_pb(self): + # Makes sure already milliseconds granularity + start_micros = 30871000 + end_micros = 12939371000 + self._to_pb_helper(start_micros=start_micros, + end_micros=end_micros) + + def test_to_pb_start_only(self): + # Makes sure already milliseconds granularity + start_micros = 30871000 + self._to_pb_helper(start_micros=start_micros) + + def test_to_pb_end_only(self): + # Makes sure already milliseconds granularity + end_micros = 12939371000 + self._to_pb_helper(end_micros=end_micros) + + +class TestTimestampRangeFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import TimestampRangeFilter + return TimestampRangeFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + range_ = object() + row_filter = self._makeOne(range_) + self.assertTrue(row_filter.range_ is range_) + + def test___eq__type_differ(self): + range_ = object() + row_filter1 = self._makeOne(range_) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + range_ = object() + row_filter1 = self._makeOne(range_) + row_filter2 = self._makeOne(range_) + self.assertEqual(row_filter1, row_filter2) + + def test_to_pb(self): + from gcloud.bigtable.row_filters import TimestampRange + + range_ = TimestampRange() + row_filter = self._makeOne(range_) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB( + timestamp_range_filter=_TimestampRangePB()) + self.assertEqual(pb_val, expected_pb) + + +class TestColumnRangeFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ColumnRangeFilter + return ColumnRangeFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + column_family_id = object() + row_filter = self._makeOne(column_family_id) + self.assertTrue(row_filter.column_family_id is column_family_id) + self.assertEqual(row_filter.start_column, None) + self.assertEqual(row_filter.end_column, None) + self.assertTrue(row_filter.inclusive_start) + self.assertTrue(row_filter.inclusive_end) + + def test_constructor_explicit(self): + column_family_id = object() + start_column = object() + end_column = object() + inclusive_start = object() + inclusive_end = object() + row_filter = self._makeOne( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + self.assertTrue(row_filter.column_family_id is column_family_id) + self.assertTrue(row_filter.start_column is start_column) + self.assertTrue(row_filter.end_column is end_column) + self.assertTrue(row_filter.inclusive_start is inclusive_start) + self.assertTrue(row_filter.inclusive_end is inclusive_end) + + def test_constructor_bad_start(self): + column_family_id = object() + self.assertRaises(ValueError, self._makeOne, + column_family_id, inclusive_start=True) + + def test_constructor_bad_end(self): + column_family_id = object() + self.assertRaises(ValueError, self._makeOne, + column_family_id, inclusive_end=True) + + def test___eq__(self): + column_family_id = object() + start_column = object() + end_column = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = self._makeOne(column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + row_filter2 = self._makeOne(column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + self.assertEqual(row_filter1, row_filter2) + + def test___eq__type_differ(self): + column_family_id = object() + row_filter1 = self._makeOne(column_family_id) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test_to_pb(self): + column_family_id = u'column-family-id' + row_filter = self._makeOne(column_family_id) + col_range_pb = _ColumnRangePB(family_name=column_family_id) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_inclusive_start(self): + column_family_id = u'column-family-id' + column = b'column' + row_filter = self._makeOne(column_family_id, start_column=column) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, + start_qualifier_closed=column, + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_exclusive_start(self): + column_family_id = u'column-family-id' + column = b'column' + row_filter = self._makeOne(column_family_id, start_column=column, + inclusive_start=False) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, + start_qualifier_open=column, + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_inclusive_end(self): + column_family_id = u'column-family-id' + column = b'column' + row_filter = self._makeOne(column_family_id, end_column=column) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, + end_qualifier_closed=column, + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_exclusive_end(self): + column_family_id = u'column-family-id' + column = b'column' + row_filter = self._makeOne(column_family_id, end_column=column, + inclusive_end=False) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, + end_qualifier_open=column, + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + +class TestValueRegexFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ValueRegexFilter + return ValueRegexFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + regex = b'value-regex' + row_filter = self._makeOne(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + self.assertEqual(pb_val, expected_pb) + + +class TestValueRangeFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ValueRangeFilter + return ValueRangeFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + row_filter = self._makeOne() + self.assertEqual(row_filter.start_value, None) + self.assertEqual(row_filter.end_value, None) + self.assertTrue(row_filter.inclusive_start) + self.assertTrue(row_filter.inclusive_end) + + def test_constructor_explicit(self): + start_value = object() + end_value = object() + inclusive_start = object() + inclusive_end = object() + row_filter = self._makeOne(start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + self.assertTrue(row_filter.start_value is start_value) + self.assertTrue(row_filter.end_value is end_value) + self.assertTrue(row_filter.inclusive_start is inclusive_start) + self.assertTrue(row_filter.inclusive_end is inclusive_end) + + def test_constructor_bad_start(self): + self.assertRaises(ValueError, self._makeOne, inclusive_start=True) + + def test_constructor_bad_end(self): + self.assertRaises(ValueError, self._makeOne, inclusive_end=True) + + def test___eq__(self): + start_value = object() + end_value = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = self._makeOne(start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + row_filter2 = self._makeOne(start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end) + self.assertEqual(row_filter1, row_filter2) + + def test___eq__type_differ(self): + row_filter1 = self._makeOne() + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test_to_pb(self): + row_filter = self._makeOne() + expected_pb = _RowFilterPB( + value_range_filter=_ValueRangePB()) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_inclusive_start(self): + value = b'some-value' + row_filter = self._makeOne(start_value=value) + val_range_pb = _ValueRangePB(start_value_closed=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_exclusive_start(self): + value = b'some-value' + row_filter = self._makeOne(start_value=value, inclusive_start=False) + val_range_pb = _ValueRangePB(start_value_open=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_inclusive_end(self): + value = b'some-value' + row_filter = self._makeOne(end_value=value) + val_range_pb = _ValueRangePB(end_value_closed=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + def test_to_pb_exclusive_end(self): + value = b'some-value' + row_filter = self._makeOne(end_value=value, inclusive_end=False) + val_range_pb = _ValueRangePB(end_value_open=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + self.assertEqual(row_filter.to_pb(), expected_pb) + + +class Test_CellCountFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import _CellCountFilter + return _CellCountFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + num_cells = object() + row_filter = self._makeOne(num_cells) + self.assertTrue(row_filter.num_cells is num_cells) + + def test___eq__type_differ(self): + num_cells = object() + row_filter1 = self._makeOne(num_cells) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + num_cells = object() + row_filter1 = self._makeOne(num_cells) + row_filter2 = self._makeOne(num_cells) + self.assertEqual(row_filter1, row_filter2) + + def test___ne__same_value(self): + num_cells = object() + row_filter1 = self._makeOne(num_cells) + row_filter2 = self._makeOne(num_cells) + comparison_val = (row_filter1 != row_filter2) + self.assertFalse(comparison_val) + + +class TestCellsRowOffsetFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import CellsRowOffsetFilter + return CellsRowOffsetFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + num_cells = 76 + row_filter = self._makeOne(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB( + cells_per_row_offset_filter=num_cells) + self.assertEqual(pb_val, expected_pb) + + +class TestCellsRowLimitFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import CellsRowLimitFilter + return CellsRowLimitFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + num_cells = 189 + row_filter = self._makeOne(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB( + cells_per_row_limit_filter=num_cells) + self.assertEqual(pb_val, expected_pb) + + +class TestCellsColumnLimitFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import CellsColumnLimitFilter + return CellsColumnLimitFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + num_cells = 10 + row_filter = self._makeOne(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB( + cells_per_column_limit_filter=num_cells) + self.assertEqual(pb_val, expected_pb) + + +class TestStripValueTransformerFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import StripValueTransformerFilter + return StripValueTransformerFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + flag = True + row_filter = self._makeOne(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(strip_value_transformer=flag) + self.assertEqual(pb_val, expected_pb) + + +class TestApplyLabelFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ApplyLabelFilter + return ApplyLabelFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + label = object() + row_filter = self._makeOne(label) + self.assertTrue(row_filter.label is label) + + def test___eq__type_differ(self): + label = object() + row_filter1 = self._makeOne(label) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + def test___eq__same_value(self): + label = object() + row_filter1 = self._makeOne(label) + row_filter2 = self._makeOne(label) + self.assertEqual(row_filter1, row_filter2) + + def test_to_pb(self): + label = u'label' + row_filter = self._makeOne(label) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(apply_label_transformer=label) + self.assertEqual(pb_val, expected_pb) + + +class Test_FilterCombination(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import _FilterCombination + return _FilterCombination + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor_defaults(self): + row_filter = self._makeOne() + self.assertEqual(row_filter.filters, []) + + def test_constructor_explicit(self): + filters = object() + row_filter = self._makeOne(filters=filters) + self.assertTrue(row_filter.filters is filters) + + def test___eq__(self): + filters = object() + row_filter1 = self._makeOne(filters=filters) + row_filter2 = self._makeOne(filters=filters) + self.assertEqual(row_filter1, row_filter2) + + def test___eq__type_differ(self): + filters = object() + row_filter1 = self._makeOne(filters=filters) + row_filter2 = object() + self.assertNotEqual(row_filter1, row_filter2) + + +class TestRowFilterChain(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import RowFilterChain + return RowFilterChain + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + chain=_RowFilterChainPB( + filters=[row_filter1_pb, row_filter2_pb], + ), + ) + self.assertEqual(filter_pb, expected_pb) + + def test_to_pb_nested(self): + from gcloud.bigtable.row_filters import CellsRowLimitFilter + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter2 = RowSampleFilter(0.25) + + row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = CellsRowLimitFilter(11) + row_filter4_pb = row_filter4.to_pb() + + row_filter5 = self._makeOne(filters=[row_filter3, row_filter4]) + filter_pb = row_filter5.to_pb() + + expected_pb = _RowFilterPB( + chain=_RowFilterChainPB( + filters=[row_filter3_pb, row_filter4_pb], + ), + ) + self.assertEqual(filter_pb, expected_pb) + + +class TestRowFilterUnion(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import RowFilterUnion + return RowFilterUnion + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_to_pb(self): + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + interleave=_RowFilterInterleavePB( + filters=[row_filter1_pb, row_filter2_pb], + ), + ) + self.assertEqual(filter_pb, expected_pb) + + def test_to_pb_nested(self): + from gcloud.bigtable.row_filters import CellsRowLimitFilter + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter2 = RowSampleFilter(0.25) + + row_filter3 = self._makeOne(filters=[row_filter1, row_filter2]) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = CellsRowLimitFilter(11) + row_filter4_pb = row_filter4.to_pb() + + row_filter5 = self._makeOne(filters=[row_filter3, row_filter4]) + filter_pb = row_filter5.to_pb() + + expected_pb = _RowFilterPB( + interleave=_RowFilterInterleavePB( + filters=[row_filter3_pb, row_filter4_pb], + ), + ) + self.assertEqual(filter_pb, expected_pb) + + +class TestConditionalRowFilter(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigtable.row_filters import ConditionalRowFilter + return ConditionalRowFilter + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter = self._makeOne(base_filter, + true_filter=true_filter, + false_filter=false_filter) + self.assertTrue(cond_filter.base_filter is base_filter) + self.assertTrue(cond_filter.true_filter is true_filter) + self.assertTrue(cond_filter.false_filter is false_filter) + + def test___eq__(self): + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter1 = self._makeOne(base_filter, + true_filter=true_filter, + false_filter=false_filter) + cond_filter2 = self._makeOne(base_filter, + true_filter=true_filter, + false_filter=false_filter) + self.assertEqual(cond_filter1, cond_filter2) + + def test___eq__type_differ(self): + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter1 = self._makeOne(base_filter, + true_filter=true_filter, + false_filter=false_filter) + cond_filter2 = object() + self.assertNotEqual(cond_filter1, cond_filter2) + + def test_to_pb(self): + from gcloud.bigtable.row_filters import CellsRowOffsetFilter + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = CellsRowOffsetFilter(11) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = self._makeOne(row_filter1, true_filter=row_filter2, + false_filter=row_filter3) + filter_pb = row_filter4.to_pb() + + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, + true_filter=row_filter2_pb, + false_filter=row_filter3_pb, + ), + ) + self.assertEqual(filter_pb, expected_pb) + + def test_to_pb_true_only(self): + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = self._makeOne(row_filter1, true_filter=row_filter2) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, + true_filter=row_filter2_pb, + ), + ) + self.assertEqual(filter_pb, expected_pb) + + def test_to_pb_false_only(self): + from gcloud.bigtable.row_filters import RowSampleFilter + from gcloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = self._makeOne(row_filter1, false_filter=row_filter2) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, + false_filter=row_filter2_pb, + ), + ) + self.assertEqual(filter_pb, expected_pb) + + +def _ColumnRangePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.ColumnRange(*args, **kw) + + +def _RowFilterPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.RowFilter(*args, **kw) + + +def _RowFilterChainPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.RowFilter.Chain(*args, **kw) + + +def _RowFilterConditionPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.RowFilter.Condition(*args, **kw) + + +def _RowFilterInterleavePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.RowFilter.Interleave(*args, **kw) + + +def _TimestampRangePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.TimestampRange(*args, **kw) + + +def _ValueRangePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + data_pb2 as data_v2_pb2) + return data_v2_pb2.ValueRange(*args, **kw) diff --git a/env/Lib/site-packages/gcloud/bigtable/test_table.py b/env/Lib/site-packages/gcloud/bigtable/test_table.py new file mode 100644 index 0000000..1494b39 --- /dev/null +++ b/env/Lib/site-packages/gcloud/bigtable/test_table.py @@ -0,0 +1,565 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest2 + + +class TestTable(unittest2.TestCase): + + PROJECT_ID = 'project-id' + INSTANCE_ID = 'instance-id' + INSTANCE_NAME = ('projects/' + PROJECT_ID + '/instances/' + INSTANCE_ID) + TABLE_ID = 'table-id' + TABLE_NAME = INSTANCE_NAME + '/tables/' + TABLE_ID + TIMEOUT_SECONDS = 1333 + ROW_KEY = b'row-key' + FAMILY_NAME = u'family' + QUALIFIER = b'qualifier' + TIMESTAMP_MICROS = 100 + VALUE = b'value' + + def _getTargetClass(self): + from gcloud.bigtable.table import Table + return Table + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + table_id = 'table-id' + instance = object() + + table = self._makeOne(table_id, instance) + self.assertEqual(table.table_id, table_id) + self.assertTrue(table._instance is instance) + + def test_name_property(self): + table_id = 'table-id' + instance_name = 'instance_name' + + instance = _Instance(instance_name) + table = self._makeOne(table_id, instance) + expected_name = instance_name + '/tables/' + table_id + self.assertEqual(table.name, expected_name) + + def test_column_family_factory(self): + from gcloud.bigtable.column_family import ColumnFamily + + table_id = 'table-id' + gc_rule = object() + table = self._makeOne(table_id, None) + column_family_id = 'column_family_id' + column_family = table.column_family(column_family_id, gc_rule=gc_rule) + + self.assertTrue(isinstance(column_family, ColumnFamily)) + self.assertEqual(column_family.column_family_id, column_family_id) + self.assertTrue(column_family.gc_rule is gc_rule) + self.assertEqual(column_family._table, table) + + def test_row_factory_direct(self): + from gcloud.bigtable.row import DirectRow + + table_id = 'table-id' + table = self._makeOne(table_id, None) + row_key = b'row_key' + row = table.row(row_key) + + self.assertTrue(isinstance(row, DirectRow)) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + + def test_row_factory_conditional(self): + from gcloud.bigtable.row import ConditionalRow + + table_id = 'table-id' + table = self._makeOne(table_id, None) + row_key = b'row_key' + filter_ = object() + row = table.row(row_key, filter_=filter_) + + self.assertTrue(isinstance(row, ConditionalRow)) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + + def test_row_factory_append(self): + from gcloud.bigtable.row import AppendRow + + table_id = 'table-id' + table = self._makeOne(table_id, None) + row_key = b'row_key' + row = table.row(row_key, append=True) + + self.assertTrue(isinstance(row, AppendRow)) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + + def test_row_factory_failure(self): + table = self._makeOne(self.TABLE_ID, None) + with self.assertRaises(ValueError): + table.row(b'row_key', filter_=object(), append=True) + + def test___eq__(self): + instance = object() + table1 = self._makeOne(self.TABLE_ID, instance) + table2 = self._makeOne(self.TABLE_ID, instance) + self.assertEqual(table1, table2) + + def test___eq__type_differ(self): + table1 = self._makeOne(self.TABLE_ID, None) + table2 = object() + self.assertNotEqual(table1, table2) + + def test___ne__same_value(self): + instance = object() + table1 = self._makeOne(self.TABLE_ID, instance) + table2 = self._makeOne(self.TABLE_ID, instance) + comparison_val = (table1 != table2) + self.assertFalse(comparison_val) + + def test___ne__(self): + table1 = self._makeOne('table_id1', 'instance1') + table2 = self._makeOne('table_id2', 'instance2') + self.assertNotEqual(table1, table2) + + def _create_test_helper(self, initial_split_keys): + from gcloud._helpers import _to_bytes + from gcloud.bigtable._testing import _FakeStub + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + splits_pb = [ + _CreateTableRequestSplitPB(key=_to_bytes(key)) + for key in initial_split_keys or ()] + request_pb = _CreateTableRequestPB( + initial_splits=splits_pb, + parent=self.INSTANCE_NAME, + table_id=self.TABLE_ID, + ) + + # Create response_pb + response_pb = _TablePB() + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # create() has no return value. + + # Perform the method and check the result. + result = table.create(initial_split_keys=initial_split_keys) + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'CreateTable', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def test_create(self): + initial_split_keys = None + self._create_test_helper(initial_split_keys) + + def test_create_with_split_keys(self): + initial_split_keys = [b's1', b's2'] + self._create_test_helper(initial_split_keys) + + def _list_column_families_helper(self): + from gcloud.bigtable._testing import _FakeStub + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + request_pb = _GetTableRequestPB(name=self.TABLE_NAME) + + # Create response_pb + COLUMN_FAMILY_ID = 'foo' + column_family = _ColumnFamilyPB() + response_pb = _TablePB( + column_families={COLUMN_FAMILY_ID: column_family}, + ) + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = { + COLUMN_FAMILY_ID: table.column_family(COLUMN_FAMILY_ID), + } + + # Perform the method and check the result. + result = table.list_column_families() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'GetTable', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def test_list_column_families(self): + self._list_column_families_helper() + + def test_delete(self): + from google.protobuf import empty_pb2 + from gcloud.bigtable._testing import _FakeStub + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + request_pb = _DeleteTableRequestPB(name=self.TABLE_NAME) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._table_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + result = table.delete() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'DeleteTable', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + def _read_row_helper(self, chunks, expected_result): + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import table as MUT + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, row_key, filter_): + mock_created.append((table_name, row_key, filter_)) + return request_pb + + # Create response_iterator + if chunks is None: + response_iterator = iter(()) # no responses at all + else: + response_pb = _ReadRowsResponsePB(chunks=chunks) + response_iterator = iter([response_pb]) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_iterator) + + # Perform the method and check the result. + filter_obj = object() + with _Monkey(MUT, _create_row_request=mock_create_row_request): + result = table.read_row(self.ROW_KEY, filter_=filter_obj) + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ReadRows', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + self.assertEqual(mock_created, + [(table.name, self.ROW_KEY, filter_obj)]) + + def test_read_row_miss_no__responses(self): + self._read_row_helper(None, None) + + def test_read_row_miss_no_chunks_in_response(self): + chunks = [] + self._read_row_helper(chunks, None) + + def test_read_row_complete(self): + from gcloud.bigtable.row_data import Cell + from gcloud.bigtable.row_data import PartialRowData + + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + commit_row=True, + ) + chunks = [chunk] + expected_result = PartialRowData(row_key=self.ROW_KEY) + family = expected_result._cells.setdefault(self.FAMILY_NAME, {}) + column = family.setdefault(self.QUALIFIER, []) + column.append(Cell.from_pb(chunk)) + self._read_row_helper(chunks, expected_result) + + def test_read_row_still_partial(self): + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + ) + # No "commit row". + chunks = [chunk] + with self.assertRaises(ValueError): + self._read_row_helper(chunks, None) + + def test_read_rows(self): + from gcloud._testing import _Monkey + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable.row_data import PartialRowsData + from gcloud.bigtable import table as MUT + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create response_iterator + response_iterator = object() + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_iterator) + + # Create expected_result. + expected_result = PartialRowsData(response_iterator) + + # Perform the method and check the result. + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit) + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'ReadRows', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + created_kwargs = { + 'start_key': start_key, + 'end_key': end_key, + 'filter_': filter_obj, + 'limit': limit, + } + self.assertEqual(mock_created, [(table.name, created_kwargs)]) + + def test_sample_row_keys(self): + from gcloud.bigtable._testing import _FakeStub + + client = _Client(timeout_seconds=self.TIMEOUT_SECONDS) + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._makeOne(self.TABLE_ID, instance) + + # Create request_pb + request_pb = _SampleRowKeysRequestPB(table_name=self.TABLE_NAME) + + # Create response_iterator + response_iterator = object() # Just passed to a mock. + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_iterator) + + # Create expected_result. + expected_result = response_iterator + + # Perform the method and check the result. + result = table.sample_row_keys() + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'SampleRowKeys', + (request_pb, self.TIMEOUT_SECONDS), + {}, + )]) + + +class Test__create_row_request(unittest2.TestCase): + + def _callFUT(self, table_name, row_key=None, start_key=None, end_key=None, + filter_=None, limit=None): + from gcloud.bigtable.table import _create_row_request + return _create_row_request( + table_name, row_key=row_key, start_key=start_key, end_key=end_key, + filter_=filter_, limit=limit) + + def test_table_name_only(self): + table_name = 'table_name' + result = self._callFUT(table_name) + expected_result = _ReadRowsRequestPB( + table_name=table_name) + self.assertEqual(result, expected_result) + + def test_row_key_row_range_conflict(self): + with self.assertRaises(ValueError): + self._callFUT(None, row_key=object(), end_key=object()) + + def test_row_key(self): + table_name = 'table_name' + row_key = b'row_key' + result = self._callFUT(table_name, row_key=row_key) + expected_result = _ReadRowsRequestPB( + table_name=table_name, + ) + expected_result.rows.row_keys.append(row_key) + self.assertEqual(result, expected_result) + + def test_row_range_start_key(self): + table_name = 'table_name' + start_key = b'start_key' + result = self._callFUT(table_name, start_key=start_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.add(start_key_closed=start_key) + self.assertEqual(result, expected_result) + + def test_row_range_end_key(self): + table_name = 'table_name' + end_key = b'end_key' + result = self._callFUT(table_name, end_key=end_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.add(end_key_open=end_key) + self.assertEqual(result, expected_result) + + def test_row_range_both_keys(self): + table_name = 'table_name' + start_key = b'start_key' + end_key = b'end_key' + result = self._callFUT(table_name, start_key=start_key, + end_key=end_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.add( + start_key_closed=start_key, end_key_open=end_key) + self.assertEqual(result, expected_result) + + def test_with_filter(self): + from gcloud.bigtable.row_filters import RowSampleFilter + table_name = 'table_name' + row_filter = RowSampleFilter(0.33) + result = self._callFUT(table_name, filter_=row_filter) + expected_result = _ReadRowsRequestPB( + table_name=table_name, + filter=row_filter.to_pb(), + ) + self.assertEqual(result, expected_result) + + def test_with_limit(self): + table_name = 'table_name' + limit = 1337 + result = self._callFUT(table_name, limit=limit) + expected_result = _ReadRowsRequestPB( + table_name=table_name, + rows_limit=limit, + ) + self.assertEqual(result, expected_result) + + +def _CreateTableRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + return table_admin_v2_pb2.CreateTableRequest(*args, **kw) + + +def _CreateTableRequestSplitPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + return table_admin_v2_pb2.CreateTableRequest.Split(*args, **kw) + + +def _DeleteTableRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + return table_admin_v2_pb2.DeleteTableRequest(*args, **kw) + + +def _GetTableRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_table_admin_pb2 as table_admin_v2_pb2) + return table_admin_v2_pb2.GetTableRequest(*args, **kw) + + +def _ReadRowsRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.ReadRowsRequest(*args, **kw) + + +def _ReadRowsResponseCellChunkPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + family_name = kw.pop('family_name') + qualifier = kw.pop('qualifier') + message = messages_v2_pb2.ReadRowsResponse.CellChunk(*args, **kw) + message.family_name.value = family_name + message.qualifier.value = qualifier + return message + + +def _ReadRowsResponsePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.ReadRowsResponse(*args, **kw) + + +def _SampleRowKeysRequestPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + bigtable_pb2 as messages_v2_pb2) + return messages_v2_pb2.SampleRowKeysRequest(*args, **kw) + + +def _TablePB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.Table(*args, **kw) + + +def _ColumnFamilyPB(*args, **kw): + from gcloud.bigtable._generated_v2 import ( + table_pb2 as table_v2_pb2) + return table_v2_pb2.ColumnFamily(*args, **kw) + + +class _Client(object): + + data_stub = None + instance_stub = None + operations_stub = None + table_stub = None + + def __init__(self, timeout_seconds=None): + self.timeout_seconds = timeout_seconds + + +class _Instance(object): + + def __init__(self, name, client=None): + self.name = name + self._client = client diff --git a/env/Lib/site-packages/gcloud/client.py b/env/Lib/site-packages/gcloud/client.py new file mode 100644 index 0000000..a3bc021 --- /dev/null +++ b/env/Lib/site-packages/gcloud/client.py @@ -0,0 +1,186 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Base classes for client used to interact with Google Cloud APIs.""" + +from oauth2client.service_account import ServiceAccountCredentials +import six + +from gcloud._helpers import _determine_default_project +from gcloud.connection import Connection +from gcloud.credentials import get_credentials + + +class _ClientFactoryMixin(object): + """Mixin to allow factories that create credentials. + + .. note:: + + This class is virtual. + """ + + @classmethod + def from_service_account_json(cls, json_credentials_path, *args, **kwargs): + """Factory to retrieve JSON credentials while creating client. + + :type json_credentials_path: string + :param json_credentials_path: The path to a private key file (this file + was given to you when you created the + service account). This file must contain + a JSON object with a private key and + other credentials information (downloaded + from the Google APIs console). + + :type args: tuple + :param args: Remaining positional arguments to pass to constructor. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments to pass to constructor. + + :rtype: :class:`gcloud.pubsub.client.Client` + :returns: The client created with the retrieved JSON credentials. + :raises: :class:`TypeError` if there is a conflict with the kwargs + and the credentials created by the factory. + """ + if 'credentials' in kwargs: + raise TypeError('credentials must not be in keyword arguments') + credentials = ServiceAccountCredentials.from_json_keyfile_name( + json_credentials_path) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_p12(cls, client_email, private_key_path, + *args, **kwargs): + """Factory to retrieve P12 credentials while creating client. + + .. note:: + Unless you have an explicit reason to use a PKCS12 key for your + service account, we recommend using a JSON key. + + :type client_email: string + :param client_email: The e-mail attached to the service account. + + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). This file must be in P12 format. + + :type args: tuple + :param args: Remaining positional arguments to pass to constructor. + + :type kwargs: dict + :param kwargs: Remaining keyword arguments to pass to constructor. + + :rtype: :class:`gcloud.client.Client` + :returns: The client created with the retrieved P12 credentials. + :raises: :class:`TypeError` if there is a conflict with the kwargs + and the credentials created by the factory. + """ + if 'credentials' in kwargs: + raise TypeError('credentials must not be in keyword arguments') + credentials = ServiceAccountCredentials.from_p12_keyfile( + client_email, private_key_path) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + +class Client(_ClientFactoryMixin): + """Client to bundle configuration needed for API requests. + + Assumes that the associated ``_connection_class`` only accepts + ``http`` and ``credentials`` in its constructor. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def __init__(self, credentials=None, http=None): + if credentials is None and http is None: + credentials = get_credentials() + self.connection = self._connection_class( + credentials=credentials, http=http) + + +class _ClientProjectMixin(object): + """Mixin to allow setting the project on the client. + + :type project: string + :param project: the project which the client acts on behalf of. If not + passed falls back to the default inferred from the + environment. + + :raises: :class:`EnvironmentError` if the project is neither passed in nor + set in the environment. :class:`ValueError` if the project value + is invalid. + """ + + def __init__(self, project=None): + project = self._determine_default(project) + if project is None: + raise EnvironmentError('Project was not passed and could not be ' + 'determined from the environment.') + if isinstance(project, six.binary_type): + project = project.decode('utf-8') + if not isinstance(project, six.string_types): + raise ValueError('Project must be a string.') + self.project = project + + @staticmethod + def _determine_default(project): + """Helper: use default project detection.""" + return _determine_default_project(project) + + +class JSONClient(Client, _ClientProjectMixin): + """Client to for Google JSON-based API. + + Assumes such APIs use the ``project`` and the client needs to store this + value. + + :type project: string + :param project: the project which the client acts on behalf of. If not + passed falls back to the default inferred from the + environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + + :raises: :class:`ValueError` if the project is neither passed in nor + set in the environment. + """ + + def __init__(self, project=None, credentials=None, http=None): + _ClientProjectMixin.__init__(self, project=project) + Client.__init__(self, credentials=credentials, http=http) diff --git a/env/Lib/site-packages/gcloud/connection.py b/env/Lib/site-packages/gcloud/connection.py new file mode 100644 index 0000000..b7518d0 --- /dev/null +++ b/env/Lib/site-packages/gcloud/connection.py @@ -0,0 +1,355 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared implementation of connections to API servers.""" + +import json +from pkg_resources import get_distribution +import six +from six.moves.urllib.parse import urlencode + +import httplib2 + +from gcloud.exceptions import make_exception + + +API_BASE_URL = 'https://www.googleapis.com' +"""The base of the API call URL.""" + + +class Connection(object): + """A generic connection to Google Cloud Platform. + + Subclasses should understand only the basic types in method arguments, + however they should be capable of returning advanced types. + + If no value is passed in for ``http``, a :class:`httplib2.Http` object + will be created and authorized with the ``credentials``. If not, the + ``credentials`` and ``http`` need not be related. + + Subclasses may seek to use the private key from ``credentials`` to sign + data. + + A custom (non-``httplib2``) HTTP object must have a ``request`` method + which accepts the following arguments: + + * ``uri`` + * ``method`` + * ``body`` + * ``headers`` + + In addition, ``redirections`` and ``connection_type`` may be used. + + Without the use of ``credentials.authorize(http)``, a custom ``http`` + object will also need to be able to add a bearer token to API + requests and handle token refresh on 401 errors. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for this connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. + """ + + USER_AGENT = "gcloud-python/{0}".format(get_distribution('gcloud').version) + """The user agent for gcloud-python requests.""" + + SCOPE = None + """The scopes required for authenticating with a service. + + Needs to be set by subclasses. + """ + + def __init__(self, credentials=None, http=None): + self._http = http + self._credentials = self._create_scoped_credentials( + credentials, self.SCOPE) + + @property + def credentials(self): + """Getter for current credentials. + + :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :returns: The credentials object associated with this connection. + """ + return self._credentials + + @property + def http(self): + """A getter for the HTTP transport used in talking to the API. + + :rtype: :class:`httplib2.Http` + :returns: A Http object used to transport data. + """ + if self._http is None: + self._http = httplib2.Http() + if self._credentials: + self._http = self._credentials.authorize(self._http) + return self._http + + @staticmethod + def _create_scoped_credentials(credentials, scope): + """Create a scoped set of credentials if it is required. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to add a scope to. + + :type scope: list of URLs + :param scope: the effective service auth scopes for the connection. + + :rtype: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :returns: A new credentials object that has a scope added (if needed). + """ + if credentials: + try: + if credentials.create_scoped_required(): + credentials = credentials.create_scoped(scope) + except AttributeError: + pass + return credentials + + +class JSONConnection(Connection): + """A connection to a Google JSON-based API. + + These APIs are discovery based. For reference: + + https://developers.google.com/discovery/ + + This defines :meth:`api_request` for making a generic JSON + API request and API requests are created elsewhere. + + The class constants + + * :attr:`API_BASE_URL` + * :attr:`API_VERSION` + * :attr:`API_URL_TEMPLATE` + + must be updated by subclasses. + """ + + API_BASE_URL = None + """The base of the API call URL.""" + + API_VERSION = None + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = None + """A template for the URL of a particular API call.""" + + @classmethod + def build_api_url(cls, path, query_params=None, + api_base_url=None, api_version=None): + """Construct an API url given a few components, some optional. + + Typically, you shouldn't need to use this method. + + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). + + :type query_params: dict or list + :param query_params: A dictionary of keys and values (or list of + key-value pairs) to insert into the query + string of the URL. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. + + :rtype: string + :returns: The URL assembled from the pieces provided. + """ + url = cls.API_URL_TEMPLATE.format( + api_base_url=(api_base_url or cls.API_BASE_URL), + api_version=(api_version or cls.API_VERSION), + path=path) + + query_params = query_params or {} + if query_params: + url += '?' + urlencode(query_params) + + return url + + def _make_request(self, method, url, data=None, content_type=None, + headers=None, target_object=None): + """A low level method to send a request to the API. + + Typically, you shouldn't need to use this method. + + :type method: string + :param method: The HTTP method to use in the request. + + :type url: string + :param url: The URL to send the request to. + + :type data: string + :param data: The data to send as the body of the request. + + :type content_type: string + :param content_type: The proper MIME type of the data provided. + + :type headers: dict + :param headers: A dictionary of HTTP headers to send with the request. + + :type target_object: object or :class:`NoneType` + :param target_object: Argument to be used by library callers. + This can allow custom behavior, for example, to + defer an HTTP request and complete initialization + of the object at a later time. + + :rtype: tuple of ``response`` (a dictionary of sorts) + and ``content`` (a string). + :returns: The HTTP response object and the content of the response, + returned by :meth:`_do_request`. + """ + headers = headers or {} + headers['Accept-Encoding'] = 'gzip' + + if data: + content_length = len(str(data)) + else: + content_length = 0 + + # NOTE: str is intended, bytes are sufficient for headers. + headers['Content-Length'] = str(content_length) + + if content_type: + headers['Content-Type'] = content_type + + headers['User-Agent'] = self.USER_AGENT + + return self._do_request(method, url, headers, data, target_object) + + def _do_request(self, method, url, headers, data, + target_object): # pylint: disable=unused-argument + """Low-level helper: perform the actual API request over HTTP. + + Allows batch context managers to override and defer a request. + + :type method: string + :param method: The HTTP method to use in the request. + + :type url: string + :param url: The URL to send the request to. + + :type headers: dict + :param headers: A dictionary of HTTP headers to send with the request. + + :type data: string + :param data: The data to send as the body of the request. + + :type target_object: object or :class:`NoneType` + :param target_object: Unused ``target_object`` here but may be used + by a superclass. + + :rtype: tuple of ``response`` (a dictionary of sorts) + and ``content`` (a string). + :returns: The HTTP response object and the content of the response. + """ + return self.http.request(uri=url, method=method, headers=headers, + body=data) + + def api_request(self, method, path, query_params=None, + data=None, content_type=None, + api_base_url=None, api_version=None, + expect_json=True, _target_object=None): + """Make a request over the HTTP transport to the API. + + You shouldn't need to use this method, but if you plan to + interact with the API using these primitives, this is the + correct one to use. + + :type method: string + :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). + Required. + + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). + Required. + + :type query_params: dict or list + :param query_params: A dictionary of keys and values (or list of + key-value pairs) to insert into the query + string of the URL. + + :type data: string + :param data: The data to send as the body of the request. Default is + the empty string. + + :type content_type: string + :param content_type: The proper MIME type of the data provided. Default + is None. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + Default is the standard API base URL. + + :type api_version: string + :param api_version: The version of the API to call. Typically + you shouldn't provide this and instead use + the default for the library. Default is the + latest API version supported by + gcloud-python. + + :type expect_json: bool + :param expect_json: If True, this method will try to parse the + response as JSON and raise an exception if + that cannot be done. Default is True. + + :type _target_object: :class:`object` or :class:`NoneType` + :param _target_object: Protected argument to be used by library + callers. This can allow custom behavior, for + example, to defer an HTTP request and complete + initialization of the object at a later time. + + :raises: Exception if the response code is not 200 OK. + """ + url = self.build_api_url(path=path, query_params=query_params, + api_base_url=api_base_url, + api_version=api_version) + + # Making the executive decision that any dictionary + # data will be sent properly as JSON. + if data and isinstance(data, dict): + data = json.dumps(data) + content_type = 'application/json' + + response, content = self._make_request( + method=method, url=url, data=data, content_type=content_type, + target_object=_target_object) + + if not 200 <= response.status < 300: + raise make_exception(response, content, + error_info=method + ' ' + url) + + string_or_bytes = (six.binary_type, six.text_type) + if content and expect_json and isinstance(content, string_or_bytes): + content_type = response.get('content-type', '') + if not content_type.startswith('application/json'): + raise TypeError('Expected JSON, got %s' % content_type) + if isinstance(content, six.binary_type): + content = content.decode('utf-8') + return json.loads(content) + + return content diff --git a/env/Lib/site-packages/gcloud/credentials.py b/env/Lib/site-packages/gcloud/credentials.py new file mode 100644 index 0000000..3d95733 --- /dev/null +++ b/env/Lib/site-packages/gcloud/credentials.py @@ -0,0 +1,234 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A simple wrapper around the OAuth2 credentials library.""" + +import base64 +import datetime +import six +from six.moves.urllib.parse import urlencode + +from oauth2client import client + +from gcloud._helpers import UTC +from gcloud._helpers import _NOW +from gcloud._helpers import _microseconds_from_datetime + + +def get_credentials(): + """Gets credentials implicitly from the current environment. + + .. note:: + + You should not need to use this function directly. Instead, use a + helper method which uses this method under the hood. + + Checks environment in order of precedence: + + * Google App Engine (production and testing) + * Environment variable :envvar:`GOOGLE_APPLICATION_CREDENTIALS` pointing to + a file with stored credentials information. + * Stored "well known" file associated with ``gcloud`` command line tool. + * Google Compute Engine production environment. + + The file referred to in :envvar:`GOOGLE_APPLICATION_CREDENTIALS` is + expected to contain information about credentials that are ready to use. + This means either service account information or user account information + with a ready-to-use refresh token: + + .. code:: json + + { + 'type': 'authorized_user', + 'client_id': '...', + 'client_secret': '...', + 'refresh_token': '...' + } + + or + + .. code:: json + + { + 'type': 'service_account', + 'client_id': '...', + 'client_email': '...', + 'private_key_id': '...', + 'private_key': '...' + } + + The second of these is simply a JSON key downloaded from the Google APIs + console. The first is a close cousin of the "client secrets" JSON file + used by :mod:`oauth2client.clientsecrets` but differs in formatting. + + :rtype: :class:`oauth2client.client.GoogleCredentials`, + :class:`oauth2client.contrib.appengine.AppAssertionCredentials`, + :class:`oauth2client.contrib.gce.AppAssertionCredentials`, + :class:`oauth2client.service_account.ServiceAccountCredentials` + :returns: A new credentials instance corresponding to the implicit + environment. + """ + return client.GoogleCredentials.get_application_default() + + +def _get_signed_query_params(credentials, expiration, string_to_sign): + """Gets query parameters for creating a signed URL. + + :type credentials: :class:`oauth2client.client.AssertionCredentials` + :param credentials: The credentials used to create a private key + for signing text. + + :type expiration: int or long + :param expiration: When the signed URL should expire. + + :type string_to_sign: string + :param string_to_sign: The string to be signed by the credentials. + + :rtype: dict + :returns: Query parameters matching the signing credentials with a + signed payload. + """ + _, signature_bytes = credentials.sign_blob(string_to_sign) + signature = base64.b64encode(signature_bytes) + service_account_name = credentials.service_account_email + return { + 'GoogleAccessId': service_account_name, + 'Expires': str(expiration), + 'Signature': signature, + } + + +def _get_expiration_seconds(expiration): + """Convert 'expiration' to a number of seconds in the future. + + :type expiration: int, long, datetime.datetime, datetime.timedelta + :param expiration: When the signed URL should expire. + + :rtype: int + :returns: a timestamp as an absolute number of seconds. + """ + # If it's a timedelta, add it to `now` in UTC. + if isinstance(expiration, datetime.timedelta): + now = _NOW().replace(tzinfo=UTC) + expiration = now + expiration + + # If it's a datetime, convert to a timestamp. + if isinstance(expiration, datetime.datetime): + micros = _microseconds_from_datetime(expiration) + expiration = micros // 10**6 + + if not isinstance(expiration, six.integer_types): + raise TypeError('Expected an integer timestamp, datetime, or ' + 'timedelta. Got %s' % type(expiration)) + return expiration + + +def generate_signed_url(credentials, resource, expiration, + api_access_endpoint='', + method='GET', content_md5=None, + content_type=None, response_type=None, + response_disposition=None, generation=None): + """Generate signed URL to provide query-string auth'n to a resource. + + .. note:: + + Assumes ``credentials`` implements a ``sign_blob()`` method that takes + bytes to sign and returns a pair of the key ID (unused here) and the + signed bytes (this is abstract in the base class + :class:`oauth2client.client.AssertionCredentials`). Also assumes + ``credentials`` has a ``service_account_email`` property which + identifies the credentials. + + .. note:: + + If you are on Google Compute Engine, you can't generate a signed URL. + Follow `Issue 922`_ for updates on this. If you'd like to be able to + generate a signed URL from GCE, you can use a standard service account + from a JSON file rather than a GCE service account. + + See headers `reference`_ for more details on optional arguments. + + .. _Issue 922: https://github.com/GoogleCloudPlatform/\ + gcloud-python/issues/922 + .. _reference: https://cloud.google.com/storage/docs/reference-headers + + :type credentials: :class:`oauth2client.appengine.AppAssertionCredentials` + :param credentials: Credentials object with an associated private key to + sign text. + + :type resource: string + :param resource: A pointer to a specific resource + (typically, ``/bucket-name/path/to/blob.txt``). + + :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`, + :class:`datetime.timedelta` + :param expiration: When the signed URL should expire. + + :type api_access_endpoint: str + :param api_access_endpoint: Optional URI base. Defaults to empty string. + + :type method: str + :param method: The HTTP verb that will be used when requesting the URL. + Defaults to ``'GET'``. + + :type content_md5: str + :param content_md5: (Optional) The MD5 hash of the object referenced by + ``resource``. + + :type content_type: str + :param content_type: (Optional) The content type of the object referenced + by ``resource``. + + :type response_type: str + :param response_type: (Optional) Content type of responses to requests for + the signed URL. Used to over-ride the content type of + the underlying resource. + + :type response_disposition: str + :param response_disposition: (Optional) Content disposition of responses to + requests for the signed URL. + + :type generation: str + :param generation: (Optional) A value that indicates which generation of + the resource to fetch. + + :rtype: string + :returns: A signed URL you can use to access the resource + until expiration. + """ + expiration = _get_expiration_seconds(expiration) + + # Generate the string to sign. + string_to_sign = '\n'.join([ + method, + content_md5 or '', + content_type or '', + str(expiration), + resource]) + + # Set the right query parameters. + query_params = _get_signed_query_params(credentials, + expiration, + string_to_sign) + if response_type is not None: + query_params['response-content-type'] = response_type + if response_disposition is not None: + query_params['response-content-disposition'] = response_disposition + if generation is not None: + query_params['generation'] = generation + + # Return the built URL. + return '{endpoint}{resource}?{querystring}'.format( + endpoint=api_access_endpoint, resource=resource, + querystring=urlencode(query_params)) diff --git a/env/Lib/site-packages/gcloud/datastore/__init__.py b/env/Lib/site-packages/gcloud/datastore/__init__.py new file mode 100644 index 0000000..6393c2e --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/__init__.py @@ -0,0 +1,62 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shortcut methods for getting set up with Google Cloud Datastore. + +You'll typically use these to get started with the API: + +>>> from gcloud import datastore +>>> +>>> client = datastore.Client() +>>> key = client.key('EntityKind', 1234) +>>> entity = datastore.Entity(key) +>>> query = client.query(kind='EntityKind') + +The main concepts with this API are: + +- :class:`gcloud.datastore.connection.Connection` + which represents a connection between your machine and the Cloud Datastore + API. + +- :class:`gcloud.datastore.client.Client` + which represents a project (string) and namespace (string) bundled with + a connection and has convenience methods for constructing objects with that + project / namespace. + +- :class:`gcloud.datastore.entity.Entity` + which represents a single entity in the datastore + (akin to a row in relational database world). + +- :class:`gcloud.datastore.key.Key` + which represents a pointer to a particular entity in the datastore + (akin to a unique identifier in relational database world). + +- :class:`gcloud.datastore.query.Query` + which represents a lookup or search over the rows in the datastore. + +- :class:`gcloud.datastore.transaction.Transaction` + which represents an all-or-none transaction and enables consistency + when race conditions may occur. +""" + +from gcloud.datastore.batch import Batch +from gcloud.datastore.connection import Connection +from gcloud.datastore.client import Client +from gcloud.datastore.entity import Entity +from gcloud.datastore.key import Key +from gcloud.datastore.query import Query +from gcloud.datastore.transaction import Transaction + + +SCOPE = Connection.SCOPE diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/__init__.py b/env/Lib/site-packages/gcloud/datastore/_generated/__init__.py new file mode 100644 index 0000000..19a0f26 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generated protobuf modules for Google Cloud Datastore API.""" diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/_datastore.proto b/env/Lib/site-packages/gcloud/datastore/_generated/_datastore.proto new file mode 100644 index 0000000..6f6aedb --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/_datastore.proto @@ -0,0 +1,289 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1beta3; + +import "google/api/annotations.proto"; +import "google/datastore/v1beta3/entity.proto"; +import "google/datastore/v1beta3/query.proto"; + +option java_multiple_files = true; +option java_outer_classname = "DatastoreProto"; +option java_package = "com.google.datastore.v1beta3"; + + +// Each RPC normalizes the partition IDs of the keys in its input entities, +// and always returns entities with keys with normalized partition IDs. +// This applies to all keys and entities, including those in values, except keys +// with both an empty path and an empty or unset partition ID. Normalization of +// input keys sets the project ID (if not already set) to the project ID from +// the request. +// +service Datastore { + // Look up entities by key. + rpc Lookup(LookupRequest) returns (LookupResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:lookup" body: "*" }; + } + + // Query for entities. + rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:runQuery" body: "*" }; + } + + // Begin a new transaction. + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:beginTransaction" body: "*" }; + } + + // Commit a transaction, optionally creating, deleting or modifying some + // entities. + rpc Commit(CommitRequest) returns (CommitResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:commit" body: "*" }; + } + + // Roll back a transaction. + rpc Rollback(RollbackRequest) returns (RollbackResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:rollback" body: "*" }; + } + + // Allocate IDs for the given keys (useful for referencing an entity before + // it is inserted). + rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) { + option (google.api.http) = { post: "/v1beta3/projects/{project_id}:allocateIds" body: "*" }; + } +} + +// The request for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup]. +message LookupRequest { + // Project ID against which to make the request. + string project_id = 8; + + // Options for this lookup request. + ReadOptions read_options = 1; + + // Keys of entities to look up. + repeated Key keys = 3; +} + +// The response for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup]. +message LookupResponse { + // Entities found as `ResultType.FULL` entities. The order of results in this + // field is undefined and has no relation to the order of the keys in the + // input. + repeated EntityResult found = 1; + + // Entities not found as `ResultType.KEY_ONLY` entities. The order of results + // in this field is undefined and has no relation to the order of the keys + // in the input. + repeated EntityResult missing = 2; + + // A list of keys that were not looked up due to resource constraints. The + // order of results in this field is undefined and has no relation to the + // order of the keys in the input. + repeated Key deferred = 3; +} + +// The request for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery]. +message RunQueryRequest { + // Project ID against which to make the request. + string project_id = 8; + + // Entities are partitioned into subsets, identified by a partition ID. + // Queries are scoped to a single partition. + // This partition ID is normalized with the standard default context + // partition ID. + PartitionId partition_id = 2; + + // The options for this query. + ReadOptions read_options = 1; + + // The type of query. + oneof query_type { + // The query to run. + Query query = 3; + + // The GQL query to run. + GqlQuery gql_query = 7; + } +} + +// The response for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery]. +message RunQueryResponse { + // A batch of query results (always present). + QueryResultBatch batch = 1; + + // The parsed form of the `GqlQuery` from the request, if it was set. + Query query = 2; +} + +// The request for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. +message BeginTransactionRequest { + // Project ID against which to make the request. + string project_id = 8; +} + +// The response for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. +message BeginTransactionResponse { + // The transaction identifier (always present). + bytes transaction = 1; +} + +// The request for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback]. +message RollbackRequest { + // Project ID against which to make the request. + string project_id = 8; + + // The transaction identifier, returned by a call to + // [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. + bytes transaction = 1; +} + +// The response for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback] +// (an empty message). +message RollbackResponse { + +} + +// The request for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit]. +message CommitRequest { + // Commit modes. + enum Mode { + // Unspecified. + MODE_UNSPECIFIED = 0; + + // Transactional. + TRANSACTIONAL = 1; + + // Non-transactional. + NON_TRANSACTIONAL = 2; + } + + // Project ID against which to make the request. + string project_id = 8; + + // The type of commit to perform. Defaults to `TRANSACTIONAL`. + Mode mode = 5; + + // Must be set when mode is `TRANSACTIONAL`. + oneof transaction_selector { + // The transaction in which to write. + bytes transaction = 1; + } + + // The mutations to perform. + // + // When mode is `TRANSACTIONAL`, mutations affecting a single entity are + // applied in order. The following sequences of mutations affecting a single + // entity are not permitted in a single `Commit` request: + // - `insert` followed by `insert` + // - `update` followed by `insert` + // - `upsert` followed by `insert` + // - `delete` followed by `update` + // + // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single + // entity. + repeated Mutation mutations = 6; +} + +// The response for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit]. +message CommitResponse { + // The result of performing the mutations. + // The i-th mutation result corresponds to the i-th mutation in the request. + repeated MutationResult mutation_results = 3; + + // The number of index entries updated during the commit. + int32 index_updates = 4; +} + +// The request for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds]. +message AllocateIdsRequest { + // Project ID against which to make the request. + string project_id = 8; + + // A list of keys with incomplete key paths for which to allocate IDs. + // No key may be reserved/read-only. + repeated Key keys = 1; +} + +// The response for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds]. +message AllocateIdsResponse { + // The keys specified in the request (in the same order), each with + // its key path completed with a newly allocated ID. + repeated Key keys = 1; +} + +// A mutation to apply to an entity. +message Mutation { + // The mutation operation. + // + // For `insert`, `update`, and `upsert`: + // - The entity's key must not be reserved/read-only. + // - No property in the entity may have a reserved name, + // not even a property in an entity in a value. + // - No value in the entity may have meaning 18, + // not even a value in an entity in another value. + oneof operation { + // The entity to insert. The entity must not already exist. + // The entity's key's final path element may be incomplete. + Entity insert = 4; + + // The entity to update. The entity must already exist. + // Must have a complete key path. + Entity update = 5; + + // The entity to upsert. The entity may or may not already exist. + // The entity's key's final path element may be incomplete. + Entity upsert = 6; + + // The key of the entity to delete. The entity may or may not already exist. + // Must have a complete key path and must not be reserved/read-only. + Key delete = 7; + } +} + +// The result of applying a mutation. +message MutationResult { + // The automatically allocated key. + // Set only when the mutation allocated a key. + Key key = 3; +} + +// Options shared by read requests. +message ReadOptions { + // Read consistencies. + enum ReadConsistency { + // Unspecified. + READ_CONSISTENCY_UNSPECIFIED = 0; + + // Strong consistency. + STRONG = 1; + + // Eventual consistency. + EVENTUAL = 2; + } + + // If not specified, lookups and ancestor queries default to + // `read_consistency`=`STRONG`, global queries default to + // `read_consistency`=`EVENTUAL`. + oneof consistency_type { + // The non-transactional read consistency to use. + // Cannot be set to `STRONG` for global queries. + ReadConsistency read_consistency = 1; + + // The transaction in which to read. + bytes transaction = 2; + } +} diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/_entity.proto b/env/Lib/site-packages/gcloud/datastore/_generated/_entity.proto new file mode 100644 index 0000000..12423eb --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/_entity.proto @@ -0,0 +1,196 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1beta3; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/latlng.proto"; + +option java_multiple_files = true; +option java_outer_classname = "EntityProto"; +option java_package = "com.google.datastore.v1beta3"; + + +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +// Partition dimensions: +// - A dimension may be `""`. +// - A dimension must be valid UTF-8 bytes. +// - A dimension's value must match regex `[A-Za-z\d\.\-_]{1,100}` +// If the value of any dimension matches regex `__.*__`, the partition is +// reserved/read-only. +// A reserved/read-only partition ID is forbidden in certain documented +// contexts. +// +// Foreign partition IDs (in which the project ID does +// not match the context project ID ) are discouraged. +// Reads and writes of foreign partition IDs may fail if the project is not in an active state. +message PartitionId { + // Project ID. + string project_id = 2; + + // Namespace ID. + string namespace_id = 4; +} + +// A unique identifier for an entity. +// If a key's partition id or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +message Key { + // A (kind, ID/name) pair used to construct a key path. + // + // If either name nor ID is set, the element is complete. + // If neither is set, the element is incomplete. + message PathElement { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string kind = 1; + + // The type of id. + oneof id_type { + // The auto allocated ID of the entity. + // Never equal to zero. Values less than zero are discouraged and may not + // be supported in the future. + int64 id = 2; + + // The name of the entity. + // A name matching regex `__.*__` is reserved/read-only. + // A name must not be more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + string name = 3; + } + } + + // Entities are partitioned into subsets, currently identified by a dataset + // (usually implicitly specified by the project) and namespace ID. + // Queries are scoped to a single partition. + PartitionId partition_id = 1; + + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // An entity path is always fully complete: *all* of the entity's ancestors + // are required to be in the path along with the entity identifier itself. + // The only exception is that in some documented cases, the identifier in the + // last path element (for the entity) itself may be omitted. A path can never + // be empty. The path can have at most 100 elements. + repeated PathElement path = 2; +} + +// An array value. +message ArrayValue { + // Values in the array. + // The order of this array may not be preserved if it contains a mix of + // indexed and unindexed values. + repeated Value values = 1; +} + +// A message that can hold any of the supported value types and associated +// metadata. +message Value { + // Must have a value set. + oneof value_type { + // A null value. + google.protobuf.NullValue null_value = 11; + + // A boolean value. + bool boolean_value = 1; + + // An integer value. + int64 integer_value = 2; + + // A double value. + double double_value = 3; + + // A timestamp value. + // When stored in the Datastore, precise only to microseconds; + // any additional precision is rounded down. + google.protobuf.Timestamp timestamp_value = 10; + + // A key value. + Key key_value = 5; + + // A UTF-8 encoded string value. + // When `exclude_from_indexes` is false (it is indexed) and meaning is not + // 2, may have at most 1500 bytes. + // When meaning is 2, may have at most 2083 bytes. + // Otherwise, may be set to at least 1,000,000 bytes + string string_value = 17; + + // A blob value. + // May have at most 1,000,000 bytes. + // When `exclude_from_indexes` is false, may have at most 1500 bytes. + // In JSON requests, must be base64-encoded. + bytes blob_value = 18; + + // A geo point value representing a point on the surface of Earth. + google.type.LatLng geo_point_value = 8; + + // An entity value. + // May have no key. + // May have a key with an incomplete key path. + // May have a reserved/read-only key. + Entity entity_value = 6; + + // An array value. + // Cannot contain another array value. + // A `Value` instance that sets field `array_value` must not set fields + // `meaning` or `exclude_from_indexes`. + ArrayValue array_value = 9; + } + + // The `meaning` field should only be populated for backwards compatibility. + int32 meaning = 14; + + // If the value should be excluded from all indexes including those defined + // explicitly. + bool exclude_from_indexes = 19; +} + +// An entity. +// +// An entity is limited to 1 megabyte when stored. That _roughly_ +// corresponds to a limit of 1 megabyte for the serialized form of this +// message. +message Entity { + // The entity's key. + // + // An entity must have a key, unless otherwise documented (for example, + // an entity in `Value.entity_value` may have no key). + // An entity's kind is its key's path's last element's kind, + // or null if it has no key. + Key key = 1; + + // The entity's properties. + // The map's keys are property names. + // A property name matching regex `__.*__` is reserved. + // A reserved property name is forbidden in certain documented contexts. + // The name must not contain more than 500 characters. + // The name cannot be `""`. + map properties = 3; +} diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/_query.proto b/env/Lib/site-packages/gcloud/datastore/_generated/_query.proto new file mode 100644 index 0000000..80cbb20 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/_query.proto @@ -0,0 +1,281 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.v1beta3; + +import "google/api/annotations.proto"; +import "google/datastore/v1beta3/entity.proto"; +import "google/protobuf/wrappers.proto"; + +option java_multiple_files = true; +option java_outer_classname = "QueryProto"; +option java_package = "com.google.datastore.v1beta3"; + + +// The result of fetching an entity from the datastore. +message EntityResult { + // Specifies what data the 'entity' field contains. + // A `ResultType` is either implied (for example, in `LookupResponse.found` + // from `datastore.proto`, it is always `FULL`) or specified by context (for + // example, in message `QueryResultBatch`, field `entity_result_type` + // specifies a `ResultType` for all the values in field `entity_results`). + enum ResultType { + // Unspecified. + RESULT_TYPE_UNSPECIFIED = 0; + + // The entire entity. + FULL = 1; + + // A projected subset of properties. The entity may have no key. A property + // value may have meaning 18. + PROJECTION = 2; + + // Only the key. + KEY_ONLY = 3; + } + + // The resulting entity. + Entity entity = 1; + + // A cursor that points to the position after the result entity. + // Set only when the `EntityResult` is part of a `QueryResultBatch` message. + bytes cursor = 3; +} + +// A query. +message Query { + // The projection to return. Defaults to returning all properties. + repeated Projection projection = 2; + + // The kinds to query (if empty, returns entities of all kinds). + // Currently at most 1 kind may be specified. + repeated KindExpression kind = 3; + + // The filter to apply. + Filter filter = 4; + + // The order to apply to the query results (if empty, order is unspecified). + repeated PropertyOrder order = 5; + + // The properties to make distinct. The query results will contain the first + // result for each distinct combination of values for the given properties + // (if empty, all results are returned). + repeated PropertyReference distinct_on = 6; + + // A starting point for the query results. Query cursors are + // returned in query result batches. + bytes start_cursor = 7; + + // An ending point for the query results. Query cursors are + // returned in query result batches. + bytes end_cursor = 8; + + // The number of results to skip. Applies before limit, but after all other + // constraints. + // Must be >= 0. + int32 offset = 10; + + // The maximum number of results to return. Applies after all other + // constraints. + // Unspecified is interpreted as no limit. + // Must be >= 0. + google.protobuf.Int32Value limit = 12; +} + +// A representation of a kind. +message KindExpression { + // The name of the kind. + string name = 1; +} + +// A reference to a property relative to the kind expressions. +message PropertyReference { + // The name of the property. + string name = 2; +} + +// A representation of a property in a projection. +message Projection { + // The property to project. + PropertyReference property = 1; +} + +// The desired order for a specific property. +message PropertyOrder { + // Direction. + enum Direction { + // Unspecified. + DIRECTION_UNSPECIFIED = 0; + + // Ascending. + ASCENDING = 1; + + // Descending. + DESCENDING = 2; + } + + // The property to order by. + PropertyReference property = 1; + + // The direction to order by. Defaults to `ASCENDING`. + Direction direction = 2; +} + +// A holder for any type of filter. +message Filter { + // The type of filter. + oneof filter_type { + // A composite filter. + CompositeFilter composite_filter = 1; + + // A filter on a property. + PropertyFilter property_filter = 2; + } +} + +// A filter that merges the multiple other filters using the given operator. +message CompositeFilter { + // Composite filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // And. + AND = 1; + } + + // The operator for combining multiple filters. + Operator op = 1; + + // The list of filters to combine. + // Must contain at least one filter. + repeated Filter filters = 2; +} + +// A filter on a specific property. +message PropertyFilter { + // Property filter operator. + enum Operator { + // Unspecified. This value must not be used. + OPERATOR_UNSPECIFIED = 0; + + // Less than. + LESS_THAN = 1; + + // Less than or equal. + LESS_THAN_OR_EQUAL = 2; + + // Greater than. + GREATER_THAN = 3; + + // Greater than or equal. + GREATER_THAN_OR_EQUAL = 4; + + // Equal. + EQUAL = 5; + + // Has ancestor. + HAS_ANCESTOR = 11; + } + + // The property to filter by. + PropertyReference property = 1; + + // The operator to filter by. + Operator op = 2; + + // The value to compare the property to. + Value value = 3; +} + +// A GQL query. +message GqlQuery { + // A string of the format described + // [here](https://developers.google.com/datastore/docs/concepts/gql). + string query_string = 1; + + // When false, the query string must not contain any literals and instead + // must bind all values. For example, + // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while + // `SELECT * FROM Kind WHERE a = @value` is. + bool allow_literals = 2; + + // For each non-reserved named binding site in the query string, + // there must be a named parameter with that name, + // but not necessarily the inverse. + // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex + // `__.*__`, and must not be `""`. + map named_bindings = 5; + + // Numbered binding site @1 references the first numbered parameter, + // effectively using 1-based indexing, rather than the usual 0. + // For each binding site numbered i in `query_string`, + // there must be an i-th numbered parameter. + // The inverse must also be true. + repeated GqlQueryParameter positional_bindings = 4; +} + +// A binding parameter for a GQL query. +message GqlQueryParameter { + // The type of parameter. + oneof parameter_type { + // Value. + Value value = 2; + + // Cursor. + bytes cursor = 3; + } +} + +// A batch of results produced by a query. +message QueryResultBatch { + // The possible values for the `more_results` field. + enum MoreResultsType { + // Unspecified. This value is never used. + MORE_RESULTS_TYPE_UNSPECIFIED = 0; + + // There may be additional batches to fetch from this query. + NOT_FINISHED = 1; + + // The query is finished, but there may be more results after the limit. + MORE_RESULTS_AFTER_LIMIT = 2; + + // The query is finished, but there may be more results after the end cursor. + MORE_RESULTS_AFTER_CURSOR = 4; + + // The query has been exhausted. + NO_MORE_RESULTS = 3; + } + + // The number of results skipped, typically because of an offset. + int32 skipped_results = 6; + + // A cursor that points to the position after the last skipped result. + // Will be set when `skipped_results` != 0. + bytes skipped_cursor = 3; + + // The result type for every entity in `entity_results`. + EntityResult.ResultType entity_result_type = 1; + + // The results for this batch. + repeated EntityResult entity_results = 2; + + // A cursor that points to the position after the last result in the batch. + bytes end_cursor = 4; + + // The state of the query after the current batch. + MoreResultsType more_results = 5; +} diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/datastore_grpc_pb2.py b/env/Lib/site-packages/gcloud/datastore/_generated/datastore_grpc_pb2.py new file mode 100644 index 0000000..5e64834 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/datastore_grpc_pb2.py @@ -0,0 +1,279 @@ +import abc +from grpc.beta import implementations as beta_implementations +from grpc.early_adopter import implementations as early_adopter_implementations +from grpc.framework.alpha import utilities as alpha_utilities +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities as face_utilities +class EarlyAdopterDatastoreServicer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def Lookup(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def RunQuery(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def BeginTransaction(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def Commit(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def Rollback(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def AllocateIds(self, request, context): + raise NotImplementedError() +class EarlyAdopterDatastoreServer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def start(self): + raise NotImplementedError() + @abc.abstractmethod + def stop(self): + raise NotImplementedError() +class EarlyAdopterDatastoreStub(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def Lookup(self, request): + raise NotImplementedError() + Lookup.async = None + @abc.abstractmethod + def RunQuery(self, request): + raise NotImplementedError() + RunQuery.async = None + @abc.abstractmethod + def BeginTransaction(self, request): + raise NotImplementedError() + BeginTransaction.async = None + @abc.abstractmethod + def Commit(self, request): + raise NotImplementedError() + Commit.async = None + @abc.abstractmethod + def Rollback(self, request): + raise NotImplementedError() + Rollback.async = None + @abc.abstractmethod + def AllocateIds(self, request): + raise NotImplementedError() + AllocateIds.async = None +def early_adopter_create_Datastore_server(servicer, port, private_key=None, certificate_chain=None): + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + method_service_descriptions = { + "AllocateIds": alpha_utilities.unary_unary_service_description( + servicer.AllocateIds, + gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString, + gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString, + ), + "BeginTransaction": alpha_utilities.unary_unary_service_description( + servicer.BeginTransaction, + gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString, + gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString, + ), + "Commit": alpha_utilities.unary_unary_service_description( + servicer.Commit, + gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString, + gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString, + ), + "Lookup": alpha_utilities.unary_unary_service_description( + servicer.Lookup, + gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString, + gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString, + ), + "Rollback": alpha_utilities.unary_unary_service_description( + servicer.Rollback, + gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString, + gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString, + ), + "RunQuery": alpha_utilities.unary_unary_service_description( + servicer.RunQuery, + gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString, + gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString, + ), + } + return early_adopter_implementations.server("google.datastore.v1beta3.Datastore", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain) +def early_adopter_create_Datastore_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None): + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + method_invocation_descriptions = { + "AllocateIds": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString, + ), + "BeginTransaction": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString, + ), + "Commit": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString, + ), + "Lookup": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString, + ), + "Rollback": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString, + ), + "RunQuery": alpha_utilities.unary_unary_invocation_description( + gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString, + gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString, + ), + } + return early_adopter_implementations.stub("google.datastore.v1beta3.Datastore", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override) + +class BetaDatastoreServicer(object): + """""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def Lookup(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def RunQuery(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def BeginTransaction(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def Commit(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def Rollback(self, request, context): + raise NotImplementedError() + @abc.abstractmethod + def AllocateIds(self, request, context): + raise NotImplementedError() + +class BetaDatastoreStub(object): + """The interface to which stubs will conform.""" + __metaclass__ = abc.ABCMeta + @abc.abstractmethod + def Lookup(self, request, timeout): + raise NotImplementedError() + Lookup.future = None + @abc.abstractmethod + def RunQuery(self, request, timeout): + raise NotImplementedError() + RunQuery.future = None + @abc.abstractmethod + def BeginTransaction(self, request, timeout): + raise NotImplementedError() + BeginTransaction.future = None + @abc.abstractmethod + def Commit(self, request, timeout): + raise NotImplementedError() + Commit.future = None + @abc.abstractmethod + def Rollback(self, request, timeout): + raise NotImplementedError() + Rollback.future = None + @abc.abstractmethod + def AllocateIds(self, request, timeout): + raise NotImplementedError() + AllocateIds.future = None + +def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + request_deserializers = { + ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString, + ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString, + ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString, + ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString, + ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString, + ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString, + } + response_serializers = { + ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString, + } + method_implementations = { + ('google.datastore.v1beta3.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), + ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), + ('google.datastore.v1beta3.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), + ('google.datastore.v1beta3.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), + ('google.datastore.v1beta3.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), + ('google.datastore.v1beta3.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + +def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + import gcloud.datastore._generated.datastore_pb2 + request_serializers = { + ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString, + ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString, + } + response_deserializers = { + ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString, + ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString, + ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString, + ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString, + ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString, + ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString, + } + cardinalities = { + 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, + 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, + 'Commit': cardinality.Cardinality.UNARY_UNARY, + 'Lookup': cardinality.Cardinality.UNARY_UNARY, + 'Rollback': cardinality.Cardinality.UNARY_UNARY, + 'RunQuery': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.datastore.v1beta3.Datastore', cardinalities, options=stub_options) diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/datastore_pb2.py b/env/Lib/site-packages/gcloud/datastore/_generated/datastore_pb2.py new file mode 100644 index 0000000..ffba033 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/datastore_pb2.py @@ -0,0 +1,862 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1beta3/datastore.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2 +from gcloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1beta3_dot_query__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1beta3/datastore.proto', + package='google.datastore.v1beta3', + syntax='proto3', + serialized_pb=b'\n(google/datastore/v1beta3/datastore.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a$google/datastore/v1beta3/query.proto\"\x8d\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12+\n\x04keys\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xb1\x01\n\x0eLookupResponse\x12\x35\n\x05\x66ound\x18\x01 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x37\n\x07missing\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12/\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\x98\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cpartition_id\x18\x02 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12\x30\n\x05query\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.QueryH\x00\x12\x37\n\tgql_query\x18\x07 \x01(\x0b\x32\".google.datastore.v1beta3.GqlQueryH\x00\x42\x0c\n\nquery_type\"}\n\x10RunQueryResponse\x12\x39\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32*.google.datastore.v1beta3.QueryResultBatch\x12.\n\x05query\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x8d\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12:\n\x04mode\x18\x05 \x01(\x0e\x32,.google.datastore.v1beta3.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x35\n\tmutations\x18\x06 \x03(\x0b\x32\".google.datastore.v1beta3.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"k\n\x0e\x43ommitResponse\x12\x42\n\x10mutation_results\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"U\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"B\n\x13\x41llocateIdsResponse\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xe4\x01\n\x08Mutation\x12\x32\n\x06insert\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06update\x18\x05 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06upsert\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12/\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x42\x0b\n\toperation\"<\n\x0eMutationResult\x12*\n\x03key\x18\x03 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xda\x01\n\x0bReadOptions\x12Q\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x35.google.datastore.v1beta3.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xb7\x07\n\tDatastore\x12\x8d\x01\n\x06Lookup\x12\'.google.datastore.v1beta3.LookupRequest\x1a(.google.datastore.v1beta3.LookupResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:lookup:\x01*\x12\x95\x01\n\x08RunQuery\x12).google.datastore.v1beta3.RunQueryRequest\x1a*.google.datastore.v1beta3.RunQueryResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:runQuery:\x01*\x12\xb5\x01\n\x10\x42\x65ginTransaction\x12\x31.google.datastore.v1beta3.BeginTransactionRequest\x1a\x32.google.datastore.v1beta3.BeginTransactionResponse\":\x82\xd3\xe4\x93\x02\x34\"//v1beta3/projects/{project_id}:beginTransaction:\x01*\x12\x8d\x01\n\x06\x43ommit\x12\'.google.datastore.v1beta3.CommitRequest\x1a(.google.datastore.v1beta3.CommitResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:commit:\x01*\x12\x95\x01\n\x08Rollback\x12).google.datastore.v1beta3.RollbackRequest\x1a*.google.datastore.v1beta3.RollbackResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:rollback:\x01*\x12\xa1\x01\n\x0b\x41llocateIds\x12,.google.datastore.v1beta3.AllocateIdsRequest\x1a-.google.datastore.v1beta3.AllocateIdsResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1beta3/projects/{project_id}:allocateIds:\x01*B0\n\x1c\x63om.google.datastore.v1beta3B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_query__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_COMMITREQUEST_MODE = _descriptor.EnumDescriptor( + name='Mode', + full_name='google.datastore.v1beta3.CommitRequest.Mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MODE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRANSACTIONAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_TRANSACTIONAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1263, + serialized_end=1333, +) +_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE) + +_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor( + name='ReadConsistency', + full_name='google.datastore.v1beta3.ReadOptions.ReadConsistency', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STRONG', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENTUAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2038, + serialized_end=2115, +) +_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY) + + +_LOOKUPREQUEST = _descriptor.Descriptor( + name='LookupRequest', + full_name='google.datastore.v1beta3.LookupRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.LookupRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='read_options', full_name='google.datastore.v1beta3.LookupRequest.read_options', index=1, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1beta3.LookupRequest.keys', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=178, + serialized_end=319, +) + + +_LOOKUPRESPONSE = _descriptor.Descriptor( + name='LookupResponse', + full_name='google.datastore.v1beta3.LookupResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='found', full_name='google.datastore.v1beta3.LookupResponse.found', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='missing', full_name='google.datastore.v1beta3.LookupResponse.missing', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deferred', full_name='google.datastore.v1beta3.LookupResponse.deferred', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=322, + serialized_end=499, +) + + +_RUNQUERYREQUEST = _descriptor.Descriptor( + name='RunQueryRequest', + full_name='google.datastore.v1beta3.RunQueryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.RunQueryRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='partition_id', full_name='google.datastore.v1beta3.RunQueryRequest.partition_id', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='read_options', full_name='google.datastore.v1beta3.RunQueryRequest.read_options', index=2, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='query', full_name='google.datastore.v1beta3.RunQueryRequest.query', index=3, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gql_query', full_name='google.datastore.v1beta3.RunQueryRequest.gql_query', index=4, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='query_type', full_name='google.datastore.v1beta3.RunQueryRequest.query_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=502, + serialized_end=782, +) + + +_RUNQUERYRESPONSE = _descriptor.Descriptor( + name='RunQueryResponse', + full_name='google.datastore.v1beta3.RunQueryResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='batch', full_name='google.datastore.v1beta3.RunQueryResponse.batch', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='query', full_name='google.datastore.v1beta3.RunQueryResponse.query', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=784, + serialized_end=909, +) + + +_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( + name='BeginTransactionRequest', + full_name='google.datastore.v1beta3.BeginTransactionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.BeginTransactionRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=911, + serialized_end=956, +) + + +_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( + name='BeginTransactionResponse', + full_name='google.datastore.v1beta3.BeginTransactionResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1beta3.BeginTransactionResponse.transaction', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=958, + serialized_end=1005, +) + + +_ROLLBACKREQUEST = _descriptor.Descriptor( + name='RollbackRequest', + full_name='google.datastore.v1beta3.RollbackRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.RollbackRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1beta3.RollbackRequest.transaction', index=1, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1007, + serialized_end=1065, +) + + +_ROLLBACKRESPONSE = _descriptor.Descriptor( + name='RollbackResponse', + full_name='google.datastore.v1beta3.RollbackResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1067, + serialized_end=1085, +) + + +_COMMITREQUEST = _descriptor.Descriptor( + name='CommitRequest', + full_name='google.datastore.v1beta3.CommitRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.CommitRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mode', full_name='google.datastore.v1beta3.CommitRequest.mode', index=1, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1beta3.CommitRequest.transaction', index=2, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mutations', full_name='google.datastore.v1beta3.CommitRequest.mutations', index=3, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _COMMITREQUEST_MODE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='transaction_selector', full_name='google.datastore.v1beta3.CommitRequest.transaction_selector', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1088, + serialized_end=1357, +) + + +_COMMITRESPONSE = _descriptor.Descriptor( + name='CommitResponse', + full_name='google.datastore.v1beta3.CommitResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mutation_results', full_name='google.datastore.v1beta3.CommitResponse.mutation_results', index=0, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index_updates', full_name='google.datastore.v1beta3.CommitResponse.index_updates', index=1, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1359, + serialized_end=1466, +) + + +_ALLOCATEIDSREQUEST = _descriptor.Descriptor( + name='AllocateIdsRequest', + full_name='google.datastore.v1beta3.AllocateIdsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.AllocateIdsRequest.project_id', index=0, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1beta3.AllocateIdsRequest.keys', index=1, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1468, + serialized_end=1553, +) + + +_ALLOCATEIDSRESPONSE = _descriptor.Descriptor( + name='AllocateIdsResponse', + full_name='google.datastore.v1beta3.AllocateIdsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='google.datastore.v1beta3.AllocateIdsResponse.keys', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1555, + serialized_end=1621, +) + + +_MUTATION = _descriptor.Descriptor( + name='Mutation', + full_name='google.datastore.v1beta3.Mutation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='insert', full_name='google.datastore.v1beta3.Mutation.insert', index=0, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update', full_name='google.datastore.v1beta3.Mutation.update', index=1, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='upsert', full_name='google.datastore.v1beta3.Mutation.upsert', index=2, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete', full_name='google.datastore.v1beta3.Mutation.delete', index=3, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='operation', full_name='google.datastore.v1beta3.Mutation.operation', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1624, + serialized_end=1852, +) + + +_MUTATIONRESULT = _descriptor.Descriptor( + name='MutationResult', + full_name='google.datastore.v1beta3.MutationResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1beta3.MutationResult.key', index=0, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1854, + serialized_end=1914, +) + + +_READOPTIONS = _descriptor.Descriptor( + name='ReadOptions', + full_name='google.datastore.v1beta3.ReadOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='read_consistency', full_name='google.datastore.v1beta3.ReadOptions.read_consistency', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='transaction', full_name='google.datastore.v1beta3.ReadOptions.transaction', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _READOPTIONS_READCONSISTENCY, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='consistency_type', full_name='google.datastore.v1beta3.ReadOptions.consistency_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1917, + serialized_end=2135, +) + +_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS +_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT +_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT +_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._PARTITIONID +_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS +_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY +_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._GQLQUERY +_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( + _RUNQUERYREQUEST.fields_by_name['query']) +_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] +_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( + _RUNQUERYREQUEST.fields_by_name['gql_query']) +_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] +_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERYRESULTBATCH +_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY +_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE +_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION +_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST +_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append( + _COMMITREQUEST.fields_by_name['transaction']) +_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector'] +_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT +_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY +_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['insert']) +_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['update']) +_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['upsert']) +_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATION.oneofs_by_name['operation'].fields.append( + _MUTATION.fields_by_name['delete']) +_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation'] +_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY +_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY +_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS +_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( + _READOPTIONS.fields_by_name['read_consistency']) +_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] +_READOPTIONS.oneofs_by_name['consistency_type'].fields.append( + _READOPTIONS.fields_by_name['transaction']) +_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type'] +DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST +DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE +DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST +DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE +DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST +DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE +DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST +DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE +DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST +DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE +DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST +DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE +DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION +DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT +DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS + +LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict( + DESCRIPTOR = _LOOKUPREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupRequest) + )) +_sym_db.RegisterMessage(LookupRequest) + +LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict( + DESCRIPTOR = _LOOKUPRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupResponse) + )) +_sym_db.RegisterMessage(LookupResponse) + +RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict( + DESCRIPTOR = _RUNQUERYREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryRequest) + )) +_sym_db.RegisterMessage(RunQueryRequest) + +RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict( + DESCRIPTOR = _RUNQUERYRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryResponse) + )) +_sym_db.RegisterMessage(RunQueryResponse) + +BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict( + DESCRIPTOR = _BEGINTRANSACTIONREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionRequest) + )) +_sym_db.RegisterMessage(BeginTransactionRequest) + +BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict( + DESCRIPTOR = _BEGINTRANSACTIONRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionResponse) + )) +_sym_db.RegisterMessage(BeginTransactionResponse) + +RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict( + DESCRIPTOR = _ROLLBACKREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackRequest) + )) +_sym_db.RegisterMessage(RollbackRequest) + +RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict( + DESCRIPTOR = _ROLLBACKRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackResponse) + )) +_sym_db.RegisterMessage(RollbackResponse) + +CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict( + DESCRIPTOR = _COMMITREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitRequest) + )) +_sym_db.RegisterMessage(CommitRequest) + +CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict( + DESCRIPTOR = _COMMITRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitResponse) + )) +_sym_db.RegisterMessage(CommitResponse) + +AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict( + DESCRIPTOR = _ALLOCATEIDSREQUEST, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsRequest) + )) +_sym_db.RegisterMessage(AllocateIdsRequest) + +AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict( + DESCRIPTOR = _ALLOCATEIDSRESPONSE, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsResponse) + )) +_sym_db.RegisterMessage(AllocateIdsResponse) + +Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict( + DESCRIPTOR = _MUTATION, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Mutation) + )) +_sym_db.RegisterMessage(Mutation) + +MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict( + DESCRIPTOR = _MUTATIONRESULT, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.MutationResult) + )) +_sym_db.RegisterMessage(MutationResult) + +ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict( + DESCRIPTOR = _READOPTIONS, + __module__ = 'google.datastore.v1beta3.datastore_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ReadOptions) + )) +_sym_db.RegisterMessage(ReadOptions) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\016DatastoreProtoP\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py b/env/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py new file mode 100644 index 0000000..3295047 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py @@ -0,0 +1,493 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1beta3/entity.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1beta3/entity.proto', + package='google.datastore.v1beta3', + syntax='proto3', + serialized_pb=b'\n%google/datastore/v1beta3/entity.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xc1\x01\n\x03Key\x12;\n\x0cpartition_id\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12\x37\n\x04path\x18\x02 \x03(\x0b\x32).google.datastore.v1beta3.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x80\x04\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x32\n\tkey_value\x18\x05 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x38\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.datastore.v1beta3.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xce\x01\n\x06\x45ntity\x12*\n\x03key\x18\x01 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\x12\x44\n\nproperties\x18\x03 \x03(\x0b\x32\x30.google.datastore.v1beta3.Entity.PropertiesEntry\x1aR\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value:\x02\x38\x01\x42-\n\x1c\x63om.google.datastore.v1beta3B\x0b\x45ntityProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_PARTITIONID = _descriptor.Descriptor( + name='PartitionId', + full_name='google.datastore.v1beta3.PartitionId', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.datastore.v1beta3.PartitionId.project_id', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='namespace_id', full_name='google.datastore.v1beta3.PartitionId.namespace_id', index=1, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=186, + serialized_end=241, +) + + +_KEY_PATHELEMENT = _descriptor.Descriptor( + name='PathElement', + full_name='google.datastore.v1beta3.Key.PathElement', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='kind', full_name='google.datastore.v1beta3.Key.PathElement.kind', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='id', full_name='google.datastore.v1beta3.Key.PathElement.id', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1beta3.Key.PathElement.name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='id_type', full_name='google.datastore.v1beta3.Key.PathElement.id_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=369, + serialized_end=437, +) + +_KEY = _descriptor.Descriptor( + name='Key', + full_name='google.datastore.v1beta3.Key', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='partition_id', full_name='google.datastore.v1beta3.Key.partition_id', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='path', full_name='google.datastore.v1beta3.Key.path', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_KEY_PATHELEMENT, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=244, + serialized_end=437, +) + + +_ARRAYVALUE = _descriptor.Descriptor( + name='ArrayValue', + full_name='google.datastore.v1beta3.ArrayValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='values', full_name='google.datastore.v1beta3.ArrayValue.values', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=439, + serialized_end=500, +) + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='google.datastore.v1beta3.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='google.datastore.v1beta3.Value.null_value', index=0, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='boolean_value', full_name='google.datastore.v1beta3.Value.boolean_value', index=1, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='integer_value', full_name='google.datastore.v1beta3.Value.integer_value', index=2, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.datastore.v1beta3.Value.double_value', index=3, + number=3, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_value', full_name='google.datastore.v1beta3.Value.timestamp_value', index=4, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key_value', full_name='google.datastore.v1beta3.Value.key_value', index=5, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.datastore.v1beta3.Value.string_value', index=6, + number=17, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='blob_value', full_name='google.datastore.v1beta3.Value.blob_value', index=7, + number=18, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='geo_point_value', full_name='google.datastore.v1beta3.Value.geo_point_value', index=8, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_value', full_name='google.datastore.v1beta3.Value.entity_value', index=9, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='array_value', full_name='google.datastore.v1beta3.Value.array_value', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='meaning', full_name='google.datastore.v1beta3.Value.meaning', index=11, + number=14, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='exclude_from_indexes', full_name='google.datastore.v1beta3.Value.exclude_from_indexes', index=12, + number=19, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value_type', full_name='google.datastore.v1beta3.Value.value_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=503, + serialized_end=1015, +) + + +_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor( + name='PropertiesEntry', + full_name='google.datastore.v1beta3.Entity.PropertiesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1142, + serialized_end=1224, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.datastore.v1beta3.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1beta3.Entity.key', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='properties', full_name='google.datastore.v1beta3.Entity.properties', index=1, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_PROPERTIESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1018, + serialized_end=1224, +) + +_KEY_PATHELEMENT.containing_type = _KEY +_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( + _KEY_PATHELEMENT.fields_by_name['id']) +_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] +_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append( + _KEY_PATHELEMENT.fields_by_name['name']) +_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type'] +_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID +_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT +_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE +_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE +_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name['key_value'].message_type = _KEY +_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG +_VALUE.fields_by_name['entity_value'].message_type = _ENTITY +_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['null_value']) +_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['boolean_value']) +_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['integer_value']) +_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['double_value']) +_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['timestamp_value']) +_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['key_value']) +_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['string_value']) +_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['blob_value']) +_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['geo_point_value']) +_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['entity_value']) +_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_VALUE.oneofs_by_name['value_type'].fields.append( + _VALUE.fields_by_name['array_value']) +_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] +_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE +_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['key'].message_type = _KEY +_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY +DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID +DESCRIPTOR.message_types_by_name['Key'] = _KEY +DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY + +PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict( + DESCRIPTOR = _PARTITIONID, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PartitionId) + )) +_sym_db.RegisterMessage(PartitionId) + +Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict( + + PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict( + DESCRIPTOR = _KEY_PATHELEMENT, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key.PathElement) + )) + , + DESCRIPTOR = _KEY, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key) + )) +_sym_db.RegisterMessage(Key) +_sym_db.RegisterMessage(Key.PathElement) + +ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict( + DESCRIPTOR = _ARRAYVALUE, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ArrayValue) + )) +_sym_db.RegisterMessage(ArrayValue) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( + DESCRIPTOR = _VALUE, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Value) + )) +_sym_db.RegisterMessage(Value) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_PROPERTIESENTRY, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity.PropertiesEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.datastore.v1beta3.entity_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.PropertiesEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\013EntityProtoP\001') +_ENTITY_PROPERTIESENTRY.has_options = True +_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py b/env/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py new file mode 100644 index 0000000..e843253 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py @@ -0,0 +1,917 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/datastore/v1beta3/query.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/datastore/v1beta3/query.proto', + package='google.datastore.v1beta3', + syntax='proto3', + serialized_pb=b'\n$google/datastore/v1beta3/query.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xa3\x01\n\x0c\x45ntityResult\x12\x30\n\x06\x65ntity\x18\x01 \x01(\x0b\x32 .google.datastore.v1beta3.Entity\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\x8b\x03\n\x05Query\x12\x38\n\nprojection\x18\x02 \x03(\x0b\x32$.google.datastore.v1beta3.Projection\x12\x36\n\x04kind\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.KindExpression\x12\x30\n\x06\x66ilter\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.Filter\x12\x36\n\x05order\x18\x05 \x03(\x0b\x32\'.google.datastore.v1beta3.PropertyOrder\x12@\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"K\n\nProjection\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\"\xdb\x01\n\rPropertyOrder\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x44\n\tdirection\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\xa3\x01\n\x06\x46ilter\x12\x45\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32).google.datastore.v1beta3.CompositeFilterH\x00\x12\x43\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32(.google.datastore.v1beta3.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xb3\x01\n\x0f\x43ompositeFilter\x12>\n\x02op\x18\x01 \x01(\x0e\x32\x32.google.datastore.v1beta3.CompositeFilter.Operator\x12\x31\n\x07\x66ilters\x18\x02 \x03(\x0b\x32 .google.datastore.v1beta3.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xd6\x02\n\x0ePropertyFilter\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12=\n\x02op\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xb4\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12M\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x35.google.datastore.v1beta3.GqlQuery.NamedBindingsEntry\x12H\n\x13positional_bindings\x18\x04 \x03(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter\x1a\x61\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12:\n\x05value\x18\x02 \x01(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter:\x02\x38\x01\"i\n\x11GqlQueryParameter\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xd3\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12M\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32\x31.google.datastore.v1beta3.EntityResult.ResultType\x12>\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12P\n\x0cmore_results\x18\x05 \x01(\x0e\x32:.google.datastore.v1beta3.QueryResultBatch.MoreResultsType\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42,\n\x1c\x63om.google.datastore.v1beta3B\nQueryProtoP\x01\x62\x06proto3' + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( + name='ResultType', + full_name='google.datastore.v1beta3.EntityResult.ResultType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RESULT_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROJECTION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEY_ONLY', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=250, + serialized_end=331, +) +_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE) + +_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor( + name='Direction', + full_name='google.datastore.v1beta3.PropertyOrder.Direction', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DIRECTION_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ASCENDING', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DESCENDING', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1026, + serialized_end=1095, +) +_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION) + +_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor( + name='Operator', + full_name='google.datastore.v1beta3.CompositeFilter.Operator', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OPERATOR_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AND', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1398, + serialized_end=1443, +) +_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR) + +_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor( + name='Operator', + full_name='google.datastore.v1beta3.PropertyFilter.Operator', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OPERATOR_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LESS_THAN', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LESS_THAN_OR_EQUAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GREATER_THAN', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GREATER_THAN_OR_EQUAL', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EQUAL', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HAS_ANCESTOR', index=6, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1639, + serialized_end=1788, +) +_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR) + +_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor( + name='MoreResultsType', + full_name='google.datastore.v1beta3.QueryResultBatch.MoreResultsType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_FINISHED', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NO_MORE_RESULTS', index=4, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2524, + serialized_end=2676, +) +_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE) + + +_ENTITYRESULT = _descriptor.Descriptor( + name='EntityResult', + full_name='google.datastore.v1beta3.EntityResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entity', full_name='google.datastore.v1beta3.EntityResult.entity', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cursor', full_name='google.datastore.v1beta3.EntityResult.cursor', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYRESULT_RESULTTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=168, + serialized_end=331, +) + + +_QUERY = _descriptor.Descriptor( + name='Query', + full_name='google.datastore.v1beta3.Query', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='projection', full_name='google.datastore.v1beta3.Query.projection', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='kind', full_name='google.datastore.v1beta3.Query.kind', index=1, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.datastore.v1beta3.Query.filter', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='order', full_name='google.datastore.v1beta3.Query.order', index=3, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='distinct_on', full_name='google.datastore.v1beta3.Query.distinct_on', index=4, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_cursor', full_name='google.datastore.v1beta3.Query.start_cursor', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_cursor', full_name='google.datastore.v1beta3.Query.end_cursor', index=6, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='offset', full_name='google.datastore.v1beta3.Query.offset', index=7, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='limit', full_name='google.datastore.v1beta3.Query.limit', index=8, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=334, + serialized_end=729, +) + + +_KINDEXPRESSION = _descriptor.Descriptor( + name='KindExpression', + full_name='google.datastore.v1beta3.KindExpression', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1beta3.KindExpression.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=731, + serialized_end=761, +) + + +_PROPERTYREFERENCE = _descriptor.Descriptor( + name='PropertyReference', + full_name='google.datastore.v1beta3.PropertyReference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.datastore.v1beta3.PropertyReference.name', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=763, + serialized_end=796, +) + + +_PROJECTION = _descriptor.Descriptor( + name='Projection', + full_name='google.datastore.v1beta3.Projection', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1beta3.Projection.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=798, + serialized_end=873, +) + + +_PROPERTYORDER = _descriptor.Descriptor( + name='PropertyOrder', + full_name='google.datastore.v1beta3.PropertyOrder', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1beta3.PropertyOrder.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='direction', full_name='google.datastore.v1beta3.PropertyOrder.direction', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PROPERTYORDER_DIRECTION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=876, + serialized_end=1095, +) + + +_FILTER = _descriptor.Descriptor( + name='Filter', + full_name='google.datastore.v1beta3.Filter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='composite_filter', full_name='google.datastore.v1beta3.Filter.composite_filter', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='property_filter', full_name='google.datastore.v1beta3.Filter.property_filter', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='filter_type', full_name='google.datastore.v1beta3.Filter.filter_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1098, + serialized_end=1261, +) + + +_COMPOSITEFILTER = _descriptor.Descriptor( + name='CompositeFilter', + full_name='google.datastore.v1beta3.CompositeFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='op', full_name='google.datastore.v1beta3.CompositeFilter.op', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filters', full_name='google.datastore.v1beta3.CompositeFilter.filters', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _COMPOSITEFILTER_OPERATOR, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1264, + serialized_end=1443, +) + + +_PROPERTYFILTER = _descriptor.Descriptor( + name='PropertyFilter', + full_name='google.datastore.v1beta3.PropertyFilter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='property', full_name='google.datastore.v1beta3.PropertyFilter.property', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='google.datastore.v1beta3.PropertyFilter.op', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1beta3.PropertyFilter.value', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PROPERTYFILTER_OPERATOR, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1446, + serialized_end=1788, +) + + +_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor( + name='NamedBindingsEntry', + full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2002, + serialized_end=2099, +) + +_GQLQUERY = _descriptor.Descriptor( + name='GqlQuery', + full_name='google.datastore.v1beta3.GqlQuery', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='query_string', full_name='google.datastore.v1beta3.GqlQuery.query_string', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_literals', full_name='google.datastore.v1beta3.GqlQuery.allow_literals', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='named_bindings', full_name='google.datastore.v1beta3.GqlQuery.named_bindings', index=2, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='positional_bindings', full_name='google.datastore.v1beta3.GqlQuery.positional_bindings', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1791, + serialized_end=2099, +) + + +_GQLQUERYPARAMETER = _descriptor.Descriptor( + name='GqlQueryParameter', + full_name='google.datastore.v1beta3.GqlQueryParameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.datastore.v1beta3.GqlQueryParameter.value', index=0, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cursor', full_name='google.datastore.v1beta3.GqlQueryParameter.cursor', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='parameter_type', full_name='google.datastore.v1beta3.GqlQueryParameter.parameter_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2101, + serialized_end=2206, +) + + +_QUERYRESULTBATCH = _descriptor.Descriptor( + name='QueryResultBatch', + full_name='google.datastore.v1beta3.QueryResultBatch', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='skipped_results', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_results', index=0, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='skipped_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_cursor', index=1, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_result_type', full_name='google.datastore.v1beta3.QueryResultBatch.entity_result_type', index=2, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entity_results', full_name='google.datastore.v1beta3.QueryResultBatch.entity_results', index=3, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.end_cursor', index=4, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='more_results', full_name='google.datastore.v1beta3.QueryResultBatch.more_results', index=5, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _QUERYRESULTBATCH_MORERESULTSTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2209, + serialized_end=2676, +) + +_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY +_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT +_QUERY.fields_by_name['projection'].message_type = _PROJECTION +_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION +_QUERY.fields_by_name['filter'].message_type = _FILTER +_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER +_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE +_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE +_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION +_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER +_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER +_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER +_FILTER.oneofs_by_name['filter_type'].fields.append( + _FILTER.fields_by_name['composite_filter']) +_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] +_FILTER.oneofs_by_name['filter_type'].fields.append( + _FILTER.fields_by_name['property_filter']) +_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type'] +_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR +_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER +_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER +_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE +_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR +_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE +_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER +_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER +_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY +_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY +_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER +_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE +_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( + _GQLQUERYPARAMETER.fields_by_name['value']) +_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] +_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append( + _GQLQUERYPARAMETER.fields_by_name['cursor']) +_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type'] +_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE +_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT +_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE +_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH +DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT +DESCRIPTOR.message_types_by_name['Query'] = _QUERY +DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION +DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE +DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION +DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER +DESCRIPTOR.message_types_by_name['Filter'] = _FILTER +DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER +DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER +DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY +DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER +DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH + +EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict( + DESCRIPTOR = _ENTITYRESULT, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.EntityResult) + )) +_sym_db.RegisterMessage(EntityResult) + +Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict( + DESCRIPTOR = _QUERY, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Query) + )) +_sym_db.RegisterMessage(Query) + +KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( + DESCRIPTOR = _KINDEXPRESSION, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.KindExpression) + )) +_sym_db.RegisterMessage(KindExpression) + +PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYREFERENCE, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyReference) + )) +_sym_db.RegisterMessage(PropertyReference) + +Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( + DESCRIPTOR = _PROJECTION, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Projection) + )) +_sym_db.RegisterMessage(Projection) + +PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYORDER, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyOrder) + )) +_sym_db.RegisterMessage(PropertyOrder) + +Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( + DESCRIPTOR = _FILTER, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Filter) + )) +_sym_db.RegisterMessage(Filter) + +CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( + DESCRIPTOR = _COMPOSITEFILTER, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CompositeFilter) + )) +_sym_db.RegisterMessage(CompositeFilter) + +PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYFILTER, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyFilter) + )) +_sym_db.RegisterMessage(PropertyFilter) + +GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( + + NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery.NamedBindingsEntry) + )) + , + DESCRIPTOR = _GQLQUERY, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery) + )) +_sym_db.RegisterMessage(GqlQuery) +_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) + +GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERYPARAMETER, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQueryParameter) + )) +_sym_db.RegisterMessage(GqlQueryParameter) + +QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( + DESCRIPTOR = _QUERYRESULTBATCH, + __module__ = 'google.datastore.v1beta3.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.QueryResultBatch) + )) +_sym_db.RegisterMessage(QueryResultBatch) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\nQueryProtoP\001') +_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True +_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001') +# @@protoc_insertion_point(module_scope) diff --git a/env/Lib/site-packages/gcloud/datastore/batch.py b/env/Lib/site-packages/gcloud/datastore/batch.py new file mode 100644 index 0000000..5d4fc01 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/batch.py @@ -0,0 +1,301 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with a batch of updates / deletes. + +Batches provide the ability to execute multiple operations +in a single request to the Cloud Datastore API. + +See +https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations +""" + +from gcloud.datastore import helpers +from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2 + + +class Batch(object): + """An abstraction representing a collected group of updates / deletes. + + Used to build up a bulk mutuation. + + For example, the following snippet of code will put the two ``save`` + operations and the ``delete`` operation into the same mutation, and send + them to the server in a single API request:: + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> batch = client.batch() + >>> batch.put(entity1) + >>> batch.put(entity2) + >>> batch.delete(key3) + >>> batch.commit() + + You can also use a batch as a context manager, in which case + :meth:`commit` will be called automatically if its block exits without + raising an exception:: + + >>> with batch: + ... batch.put(entity1) + ... batch.put(entity2) + ... batch.delete(key3) + + By default, no updates will be sent if the block exits with an error:: + + >>> with batch: + ... do_some_work(batch) + ... raise Exception() # rolls back + + :type client: :class:`gcloud.datastore.client.Client` + :param client: The client used to connect to datastore. + """ + + _id = None # "protected" attribute, always None for non-transactions + + _INITIAL = 0 + """Enum value for _INITIAL status of batch/transaction.""" + + _IN_PROGRESS = 1 + """Enum value for _IN_PROGRESS status of batch/transaction.""" + + _ABORTED = 2 + """Enum value for _ABORTED status of batch/transaction.""" + + _FINISHED = 3 + """Enum value for _FINISHED status of batch/transaction.""" + + def __init__(self, client): + self._client = client + self._commit_request = _datastore_pb2.CommitRequest() + self._partial_key_entities = [] + self._status = self._INITIAL + + def current(self): + """Return the topmost batch / transaction, or None.""" + return self._client.current_batch + + @property + def project(self): + """Getter for project in which the batch will run. + + :rtype: :class:`str` + :returns: The project in which the batch will run. + """ + return self._client.project + + @property + def namespace(self): + """Getter for namespace in which the batch will run. + + :rtype: :class:`str` + :returns: The namespace in which the batch will run. + """ + return self._client.namespace + + @property + def connection(self): + """Getter for connection over which the batch will run. + + :rtype: :class:`gcloud.datastore.connection.Connection` + :returns: The connection over which the batch will run. + """ + return self._client.connection + + def _add_partial_key_entity_pb(self): + """Adds a new mutation for an entity with a partial key. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :returns: The newly created entity protobuf that will be + updated and sent with a commit. + """ + new_mutation = self.mutations.add() + return new_mutation.insert + + def _add_complete_key_entity_pb(self): + """Adds a new mutation for an entity with a completed key. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :returns: The newly created entity protobuf that will be + updated and sent with a commit. + """ + # We use ``upsert`` for entities with completed keys, rather than + # ``insert`` or ``update``, in order not to create race conditions + # based on prior existence / removal of the entity. + new_mutation = self.mutations.add() + return new_mutation.upsert + + def _add_delete_key_pb(self): + """Adds a new mutation for a key to be deleted. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Key` + :returns: The newly created key protobuf that will be + deleted when sent with a commit. + """ + new_mutation = self.mutations.add() + return new_mutation.delete + + @property + def mutations(self): + """Getter for the changes accumulated by this batch. + + Every batch is committed with a single commit request containing all + the work to be done as mutations. Inside a batch, calling :meth:`put` + with an entity, or :meth:`delete` with a key, builds up the request by + adding a new mutation. This getter returns the protobuf that has been + built-up so far. + + :rtype: iterable + :returns: The list of :class:`._generated.datastore_pb2.Mutation` + protobufs to be sent in the commit request. + """ + return self._commit_request.mutations + + def put(self, entity): + """Remember an entity's state to be saved during :meth:`commit`. + + .. note:: + Any existing properties for the entity will be replaced by those + currently set on this instance. Already-stored properties which do + not correspond to keys set on this instance will be removed from + the datastore. + + .. note:: + Property values which are "text" ('unicode' in Python2, 'str' in + Python3) map to 'string_value' in the datastore; values which are + "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. + + When an entity has a partial key, calling :meth:`commit` sends it as + an ``insert`` mutation and the key is completed. On return, + the key for the ``entity`` passed in is updated to match the key ID + assigned by the server. + + :type entity: :class:`gcloud.datastore.entity.Entity` + :param entity: the entity to be saved. + + :raises: ValueError if entity has no key assigned, or if the key's + ``project`` does not match ours. + """ + if entity.key is None: + raise ValueError("Entity must have a key") + + if self.project != entity.key.project: + raise ValueError("Key must be from same project as batch") + + if entity.key.is_partial: + entity_pb = self._add_partial_key_entity_pb() + self._partial_key_entities.append(entity) + else: + entity_pb = self._add_complete_key_entity_pb() + + _assign_entity_to_pb(entity_pb, entity) + + def delete(self, key): + """Remember a key to be deleted during :meth:`commit`. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: the key to be deleted. + + :raises: ValueError if key is not complete, or if the key's + ``project`` does not match ours. + """ + if key.is_partial: + raise ValueError("Key must be complete") + + if self.project != key.project: + raise ValueError("Key must be from same project as batch") + + key_pb = key.to_protobuf() + self._add_delete_key_pb().CopyFrom(key_pb) + + def begin(self): + """Begins a batch. + + This method is called automatically when entering a with + statement, however it can be called explicitly if you don't want + to use a context manager. + + Overridden by :class:`gcloud.datastore.transaction.Transaction`. + + :raises: :class:`ValueError` if the batch has already begun. + """ + if self._status != self._INITIAL: + raise ValueError('Batch already started previously.') + self._status = self._IN_PROGRESS + + def _commit(self): + """Commits the batch. + + This is called by :meth:`commit`. + """ + # NOTE: ``self._commit_request`` will be modified. + _, updated_keys = self.connection.commit( + self.project, self._commit_request, self._id) + # If the back-end returns without error, we are guaranteed that + # :meth:`Connection.commit` will return keys that match (length and + # order) directly ``_partial_key_entities``. + for new_key_pb, entity in zip(updated_keys, + self._partial_key_entities): + new_id = new_key_pb.path[-1].id + entity.key = entity.key.completed_key(new_id) + + def commit(self): + """Commits the batch. + + This is called automatically upon exiting a with statement, + however it can be called explicitly if you don't want to use a + context manager. + """ + try: + self._commit() + finally: + self._status = self._FINISHED + + def rollback(self): + """Rolls back the current batch. + + Marks the batch as aborted (can't be used again). + + Overridden by :class:`gcloud.datastore.transaction.Transaction`. + """ + self._status = self._ABORTED + + def __enter__(self): + self._client._push_batch(self) + self.begin() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + try: + if exc_type is None: + self.commit() + else: + self.rollback() + finally: + self._client._pop_batch() + + +def _assign_entity_to_pb(entity_pb, entity): + """Copy ``entity`` into ``entity_pb``. + + Helper method for ``Batch.put``. + + :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :param entity_pb: The entity owned by a mutation. + + :type entity: :class:`gcloud.datastore.entity.Entity` + :param entity: The entity being updated within the batch / transaction. + """ + bare_entity_pb = helpers.entity_to_protobuf(entity) + bare_entity_pb.key.CopyFrom(bare_entity_pb.key) + entity_pb.CopyFrom(bare_entity_pb) diff --git a/env/Lib/site-packages/gcloud/datastore/client.py b/env/Lib/site-packages/gcloud/datastore/client.py new file mode 100644 index 0000000..fc8b36b --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/client.py @@ -0,0 +1,454 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Convenience wrapper for invoking APIs/factories w/ a project.""" + +import os + +from gcloud._helpers import _LocalStack +from gcloud._helpers import _determine_default_project as _base_default_project +from gcloud.client import _ClientProjectMixin +from gcloud.client import Client as _BaseClient +from gcloud.datastore import helpers +from gcloud.datastore.connection import Connection +from gcloud.datastore.batch import Batch +from gcloud.datastore.entity import Entity +from gcloud.datastore.key import Key +from gcloud.datastore.query import Query +from gcloud.datastore.transaction import Transaction +from gcloud.environment_vars import GCD_DATASET + + +_MAX_LOOPS = 128 +"""Maximum number of iterations to wait for deferred keys.""" + + +def _get_gcd_project(): + """Gets the GCD application ID if it can be inferred.""" + return os.getenv(GCD_DATASET) + + +def _determine_default_project(project=None): + """Determine default project explicitly or implicitly as fall-back. + + In implicit case, supports four environments. In order of precedence, the + implicit environments are: + + * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) + * GCLOUD_PROJECT environment variable + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + + :type project: string + :param project: Optional. The project to use as default. + + :rtype: string or ``NoneType`` + :returns: Default project if it can be determined. + """ + if project is None: + project = _get_gcd_project() + + if project is None: + project = _base_default_project(project=project) + + return project + + +def _extended_lookup(connection, project, key_pbs, + missing=None, deferred=None, + eventual=False, transaction_id=None): + """Repeat lookup until all keys found (unless stop requested). + + Helper function for :meth:`Client.get_multi`. + + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: The connection used to connect to datastore. + + :type project: string + :param project: The project to make the request for. + + :type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys to retrieve from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entity + protobufs returned by the backend as "missing" will be + copied into it. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the key protobufs returned + by the backend as "deferred" will be copied into it. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Entity` + :returns: The requested entities. + :raises: :class:`ValueError` if missing / deferred are not null or + empty list. + """ + if missing is not None and missing != []: + raise ValueError('missing must be None or an empty list') + + if deferred is not None and deferred != []: + raise ValueError('deferred must be None or an empty list') + + results = [] + + loop_num = 0 + while loop_num < _MAX_LOOPS: # loop against possible deferred. + loop_num += 1 + + results_found, missing_found, deferred_found = connection.lookup( + project=project, + key_pbs=key_pbs, + eventual=eventual, + transaction_id=transaction_id, + ) + + results.extend(results_found) + + if missing is not None: + missing.extend(missing_found) + + if deferred is not None: + deferred.extend(deferred_found) + break + + if len(deferred_found) == 0: + break + + # We have deferred keys, and the user didn't ask to know about + # them, so retry (but only with the deferred ones). + key_pbs = deferred_found + + return results + + +class Client(_BaseClient, _ClientProjectMixin): + """Convenience wrapper for invoking APIs/factories w/ a project. + + :type project: string + :param project: (optional) The project to pass to proxied API methods. + + :type namespace: string + :param namespace: (optional) namespace to pass to proxied API methods. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + _connection_class = Connection + + def __init__(self, project=None, namespace=None, + credentials=None, http=None): + _ClientProjectMixin.__init__(self, project=project) + self.namespace = namespace + self._batch_stack = _LocalStack() + super(Client, self).__init__(credentials, http) + + @staticmethod + def _determine_default(project): + """Helper: override default project detection.""" + return _determine_default_project(project) + + def _push_batch(self, batch): + """Push a batch/transaction onto our stack. + + "Protected", intended for use by batch / transaction context mgrs. + + :type batch: :class:`gcloud.datastore.batch.Batch`, or an object + implementing its API. + :param batch: newly-active batch/transaction. + """ + self._batch_stack.push(batch) + + def _pop_batch(self): + """Pop a batch/transaction from our stack. + + "Protected", intended for use by batch / transaction context mgrs. + + :raises: IndexError if the stack is empty. + :rtype: :class:`gcloud.datastore.batch.Batch`, or an object + implementing its API. + :returns: the top-most batch/transaction, after removing it. + """ + return self._batch_stack.pop() + + @property + def current_batch(self): + """Currently-active batch. + + :rtype: :class:`gcloud.datastore.batch.Batch`, or an object + implementing its API, or ``NoneType`` (if no batch is active). + :returns: The batch/transaction at the top of the batch stack. + """ + return self._batch_stack.top + + @property + def current_transaction(self): + """Currently-active transaction. + + :rtype: :class:`gcloud.datastore.transaction.Transaction`, or an object + implementing its API, or ``NoneType`` (if no transaction is + active). + :returns: The transaction at the top of the batch stack. + """ + transaction = self.current_batch + if isinstance(transaction, Transaction): + return transaction + + def get(self, key, missing=None, deferred=None, transaction=None): + """Retrieve an entity from a single key (if it exists). + + .. note:: + + This is just a thin wrapper over :meth:`get_multi`. + The backend API does not make a distinction between a single key or + multiple keys in a lookup request. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: The key to be retrieved from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned + by the backend as "deferred" will be copied into it. + + :type transaction: :class:`gcloud.datastore.transaction.Transaction` + :param transaction: (Optional) Transaction to use for read consistency. + If not passed, uses current transaction, if set. + + :rtype: :class:`gcloud.datastore.entity.Entity` or ``NoneType`` + :returns: The requested entity if it exists. + """ + entities = self.get_multi(keys=[key], missing=missing, + deferred=deferred, transaction=transaction) + if entities: + return entities[0] + + def get_multi(self, keys, missing=None, deferred=None, transaction=None): + """Retrieve entities, along with their attributes. + + :type keys: list of :class:`gcloud.datastore.key.Key` + :param keys: The keys to be retrieved from the datastore. + + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. If the list is not empty, an error will occur. + + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned + by the backend as "deferred" will be copied into it. + If the list is not empty, an error will occur. + + :type transaction: :class:`gcloud.datastore.transaction.Transaction` + :param transaction: (Optional) Transaction to use for read consistency. + If not passed, uses current transaction, if set. + + :rtype: list of :class:`gcloud.datastore.entity.Entity` + :returns: The requested entities. + :raises: :class:`ValueError` if one or more of ``keys`` has a project + which does not match our project. + """ + if not keys: + return [] + + ids = set(key.project for key in keys) + for current_id in ids: + if current_id != self.project: + raise ValueError('Keys do not match project') + + if transaction is None: + transaction = self.current_transaction + + entity_pbs = _extended_lookup( + connection=self.connection, + project=self.project, + key_pbs=[k.to_protobuf() for k in keys], + missing=missing, + deferred=deferred, + transaction_id=transaction and transaction.id, + ) + + if missing is not None: + missing[:] = [ + helpers.entity_from_protobuf(missed_pb) + for missed_pb in missing] + + if deferred is not None: + deferred[:] = [ + helpers.key_from_protobuf(deferred_pb) + for deferred_pb in deferred] + + return [helpers.entity_from_protobuf(entity_pb) + for entity_pb in entity_pbs] + + def put(self, entity): + """Save an entity in the Cloud Datastore. + + .. note:: + + This is just a thin wrapper over :meth:`put_multi`. + The backend API does not make a distinction between a single + entity or multiple entities in a commit request. + + :type entity: :class:`gcloud.datastore.entity.Entity` + :param entity: The entity to be saved to the datastore. + """ + self.put_multi(entities=[entity]) + + def put_multi(self, entities): + """Save entities in the Cloud Datastore. + + :type entities: list of :class:`gcloud.datastore.entity.Entity` + :param entities: The entities to be saved to the datastore. + + :raises: :class:`ValueError` if ``entities`` is a single entity. + """ + if isinstance(entities, Entity): + raise ValueError("Pass a sequence of entities") + + if not entities: + return + + current = self.current_batch + in_batch = current is not None + + if not in_batch: + current = self.batch() + + for entity in entities: + current.put(entity) + + if not in_batch: + current.commit() + + def delete(self, key): + """Delete the key in the Cloud Datastore. + + .. note:: + + This is just a thin wrapper over :meth:`delete_multi`. + The backend API does not make a distinction between a single key or + multiple keys in a commit request. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: The key to be deleted from the datastore. + """ + return self.delete_multi(keys=[key]) + + def delete_multi(self, keys): + """Delete keys from the Cloud Datastore. + + :type keys: list of :class:`gcloud.datastore.key.Key` + :param keys: The keys to be deleted from the datastore. + """ + if not keys: + return + + # We allow partial keys to attempt a delete, the backend will fail. + current = self.current_batch + in_batch = current is not None + + if not in_batch: + current = self.batch() + + for key in keys: + current.delete(key) + + if not in_batch: + current.commit() + + def allocate_ids(self, incomplete_key, num_ids): + """Allocate a list of IDs from a partial key. + + :type incomplete_key: :class:`gcloud.datastore.key.Key` + :param incomplete_key: Partial key to use as base for allocated IDs. + + :type num_ids: int + :param num_ids: The number of IDs to allocate. + + :rtype: list of :class:`gcloud.datastore.key.Key` + :returns: The (complete) keys allocated with ``incomplete_key`` as + root. + :raises: :class:`ValueError` if ``incomplete_key`` is not a + partial key. + """ + if not incomplete_key.is_partial: + raise ValueError(('Key is not partial.', incomplete_key)) + + incomplete_key_pb = incomplete_key.to_protobuf() + incomplete_key_pbs = [incomplete_key_pb] * num_ids + + conn = self.connection + allocated_key_pbs = conn.allocate_ids(incomplete_key.project, + incomplete_key_pbs) + allocated_ids = [allocated_key_pb.path[-1].id + for allocated_key_pb in allocated_key_pbs] + return [incomplete_key.completed_key(allocated_id) + for allocated_id in allocated_ids] + + def key(self, *path_args, **kwargs): + """Proxy to :class:`gcloud.datastore.key.Key`. + + Passes our ``project``. + """ + if 'project' in kwargs: + raise TypeError('Cannot pass project') + kwargs['project'] = self.project + if 'namespace' not in kwargs: + kwargs['namespace'] = self.namespace + return Key(*path_args, **kwargs) + + def batch(self): + """Proxy to :class:`gcloud.datastore.batch.Batch`.""" + return Batch(self) + + def transaction(self): + """Proxy to :class:`gcloud.datastore.transaction.Transaction`.""" + return Transaction(self) + + def query(self, **kwargs): + """Proxy to :class:`gcloud.datastore.query.Query`. + + Passes our ``project``. + """ + if 'client' in kwargs: + raise TypeError('Cannot pass client') + if 'project' in kwargs: + raise TypeError('Cannot pass project') + kwargs['project'] = self.project + if 'namespace' not in kwargs: + kwargs['namespace'] = self.namespace + return Query(self, **kwargs) diff --git a/env/Lib/site-packages/gcloud/datastore/connection.py b/env/Lib/site-packages/gcloud/datastore/connection.py new file mode 100644 index 0000000..63a82ad --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/connection.py @@ -0,0 +1,426 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Connections to gcloud datastore API servers.""" + +import os + +from gcloud import connection +from gcloud.environment_vars import GCD_HOST +from gcloud.exceptions import make_exception +from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2 +from google.rpc import status_pb2 + + +class Connection(connection.Connection): + """A connection to the Google Cloud Datastore via the Protobuf API. + + This class should understand only the basic types (and protobufs) + in method arguments, however should be capable of returning advanced types. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: The OAuth2 Credentials to use for this connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to + :attr:`API_BASE_URL`. + """ + + API_BASE_URL = 'https://datastore.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v1beta3' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = ('{api_base}/{api_version}/projects' + '/{project}:{method}') + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/datastore',) + """The scopes required for authenticating as a Cloud Datastore consumer.""" + + def __init__(self, credentials=None, http=None, api_base_url=None): + super(Connection, self).__init__(credentials=credentials, http=http) + if api_base_url is None: + try: + # gcd.sh has /datastore/ in the path still since it supports + # v1beta2 and v1beta3 simultaneously. + api_base_url = '%s/datastore' % (os.environ[GCD_HOST],) + except KeyError: + api_base_url = self.__class__.API_BASE_URL + self.api_base_url = api_base_url + + def _request(self, project, method, data): + """Make a request over the Http transport to the Cloud Datastore API. + + :type project: string + :param project: The project to make the request for. + + :type method: string + :param method: The API call method name (ie, ``runQuery``, + ``lookup``, etc) + + :type data: string + :param data: The data to send with the API call. + Typically this is a serialized Protobuf string. + + :rtype: string + :returns: The string response content from the API call. + :raises: :class:`gcloud.exceptions.GCloudError` if the response + code is not 200 OK. + """ + headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': str(len(data)), + 'User-Agent': self.USER_AGENT, + } + headers, content = self.http.request( + uri=self.build_api_url(project=project, method=method), + method='POST', headers=headers, body=data) + + status = headers['status'] + if status != '200': + error_status = status_pb2.Status.FromString(content) + raise make_exception(headers, error_status.message, use_json=False) + + return content + + def _rpc(self, project, method, request_pb, response_pb_cls): + """Make a protobuf RPC request. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type method: string + :param method: The name of the method to invoke. + + :type request_pb: :class:`google.protobuf.message.Message` instance + :param request_pb: the protobuf instance representing the request. + + :type response_pb_cls: A :class:`google.protobuf.message.Message' + subclass. + :param response_pb_cls: The class used to unmarshall the response + protobuf. + """ + response = self._request(project=project, method=method, + data=request_pb.SerializeToString()) + return response_pb_cls.FromString(response) + + def build_api_url(self, project, method, base_url=None, + api_version=None): + """Construct the URL for a particular API call. + + This method is used internally to come up with the URL to use when + making RPCs to the Cloud Datastore API. + + :type project: string + :param project: The project to connect to. This is + usually your project name in the cloud console. + + :type method: string + :param method: The API method to call (e.g. 'runQuery', 'lookup'). + + :type base_url: string + :param base_url: The base URL where the API lives. + You shouldn't have to provide this. + + :type api_version: string + :param api_version: The version of the API to connect to. + You shouldn't have to provide this. + """ + return self.API_URL_TEMPLATE.format( + api_base=(base_url or self.api_base_url), + api_version=(api_version or self.API_VERSION), + project=project, method=method) + + def lookup(self, project, key_pbs, + eventual=False, transaction_id=None): + """Lookup keys from a project in the Cloud Datastore. + + Maps the ``DatastoreService.Lookup`` protobuf RPC. + + This uses mostly protobufs + (:class:`gcloud.datastore._generated.entity_pb2.Key` as input and + :class:`gcloud.datastore._generated.entity_pb2.Entity` as output). It + is used under the hood in + :meth:`Client.get() <.datastore.client.Client.get>`: + + >>> from gcloud import datastore + >>> client = datastore.Client(project='project') + >>> key = client.key('MyKind', 1234) + >>> client.get(key) + [] + + Using a :class:`Connection` directly: + + >>> connection.lookup('project', [key.to_protobuf()]) + [] + + :type project: string + :param project: The project to look up the keys in. + + :type key_pbs: list of + :class:`gcloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys to retrieve from the datastore. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: tuple + :returns: A triple of (``results``, ``missing``, ``deferred``) where + both ``results`` and ``missing`` are lists of + :class:`gcloud.datastore._generated.entity_pb2.Entity` and + ``deferred`` is a list of + :class:`gcloud.datastore._generated.entity_pb2.Key`. + """ + lookup_request = _datastore_pb2.LookupRequest() + _set_read_options(lookup_request, eventual, transaction_id) + _add_keys_to_request(lookup_request.keys, key_pbs) + + lookup_response = self._rpc(project, 'lookup', lookup_request, + _datastore_pb2.LookupResponse) + + results = [result.entity for result in lookup_response.found] + missing = [result.entity for result in lookup_response.missing] + + return results, missing, list(lookup_response.deferred) + + def run_query(self, project, query_pb, namespace=None, + eventual=False, transaction_id=None): + """Run a query on the Cloud Datastore. + + Maps the ``DatastoreService.RunQuery`` protobuf RPC. + + Given a Query protobuf, sends a ``runQuery`` request to the + Cloud Datastore API and returns a list of entity protobufs + matching the query. + + You typically wouldn't use this method directly, in favor of the + :meth:`gcloud.datastore.query.Query.fetch` method. + + Under the hood, the :class:`gcloud.datastore.query.Query` class + uses this method to fetch data: + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='MyKind') + >>> query.add_filter('property', '=', 'val') + + Using the query iterator's + :meth:`next_page() <.datastore.query.Iterator.next_page>` method: + + >>> query_iter = query.fetch() + >>> entities, more_results, cursor = query_iter.next_page() + >>> entities + [] + >>> more_results + + >>> cursor + + + Under the hood this is doing: + + >>> connection.run_query('project', query.to_protobuf()) + [], cursor, more_results, skipped_results + + :type project: string + :param project: The project over which to run the query. + + :type query_pb: :class:`gcloud.datastore._generated.query_pb2.Query` + :param query_pb: The Protobuf representing the query to run. + + :type namespace: string + :param namespace: The namespace over which to run the query. + + :type eventual: bool + :param eventual: If False (the default), request ``STRONG`` read + consistency. If True, request ``EVENTUAL`` read + consistency. + + :type transaction_id: string + :param transaction_id: If passed, make the request in the scope of + the given transaction. Incompatible with + ``eventual==True``. + + :rtype: tuple + :returns: Four-tuple containing the entities returned, + the end cursor of the query, a ``more_results`` + enum and a count of the number of skipped results. + """ + request = _datastore_pb2.RunQueryRequest() + _set_read_options(request, eventual, transaction_id) + + if namespace: + request.partition_id.namespace_id = namespace + + request.query.CopyFrom(query_pb) + response = self._rpc(project, 'runQuery', request, + _datastore_pb2.RunQueryResponse) + return ( + [e.entity for e in response.batch.entity_results], + response.batch.end_cursor, # Assume response always has cursor. + response.batch.more_results, + response.batch.skipped_results, + ) + + def begin_transaction(self, project): + """Begin a transaction. + + Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction applies. + + :rtype: bytes + :returns: The serialized transaction that was begun. + """ + request = _datastore_pb2.BeginTransactionRequest() + response = self._rpc(project, 'beginTransaction', request, + _datastore_pb2.BeginTransactionResponse) + return response.transaction + + def commit(self, project, request, transaction_id): + """Commit mutations in context of current transation (if any). + + Maps the ``DatastoreService.Commit`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction applies. + + :type request: :class:`._generated.datastore_pb2.CommitRequest` + :param request: The protobuf with the mutations being committed. + + :type transaction_id: string or None + :param transaction_id: The transaction ID returned from + :meth:`begin_transaction`. Non-transactional + batches must pass ``None``. + + .. note:: + + This method will mutate ``request`` before using it. + + :rtype: tuple + :returns': The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. + """ + if transaction_id: + request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL + request.transaction = transaction_id + else: + request.mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL + + response = self._rpc(project, 'commit', request, + _datastore_pb2.CommitResponse) + return _parse_commit_response(response) + + def rollback(self, project, transaction_id): + """Rollback the connection's existing transaction. + + Maps the ``DatastoreService.Rollback`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction belongs. + + :type transaction_id: string + :param transaction_id: The transaction ID returned from + :meth:`begin_transaction`. + """ + request = _datastore_pb2.RollbackRequest() + request.transaction = transaction_id + # Nothing to do with this response, so just execute the method. + self._rpc(project, 'rollback', request, + _datastore_pb2.RollbackResponse) + + def allocate_ids(self, project, key_pbs): + """Obtain backend-generated IDs for a set of keys. + + Maps the ``DatastoreService.AllocateIds`` protobuf RPC. + + :type project: string + :param project: The project to which the transaction belongs. + + :type key_pbs: list of + :class:`gcloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys for which the backend should allocate IDs. + + :rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Key` + :returns: An equal number of keys, with IDs filled in by the backend. + """ + request = _datastore_pb2.AllocateIdsRequest() + _add_keys_to_request(request.keys, key_pbs) + # Nothing to do with this response, so just execute the method. + response = self._rpc(project, 'allocateIds', request, + _datastore_pb2.AllocateIdsResponse) + return list(response.keys) + + +def _set_read_options(request, eventual, transaction_id): + """Validate rules for read options, and assign to the request. + + Helper method for ``lookup()`` and ``run_query``. + + :raises: :class:`ValueError` if ``eventual`` is ``True`` and the + ``transaction_id`` is not ``None``. + """ + if eventual and (transaction_id is not None): + raise ValueError('eventual must be False when in a transaction') + + opts = request.read_options + if eventual: + opts.read_consistency = _datastore_pb2.ReadOptions.EVENTUAL + elif transaction_id: + opts.transaction = transaction_id + + +def _add_keys_to_request(request_field_pb, key_pbs): + """Add protobuf keys to a request object. + + :type request_field_pb: `RepeatedCompositeFieldContainer` + :param request_field_pb: A repeated proto field that contains keys. + + :type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key` + :param key_pbs: The keys to add to a request. + """ + for key_pb in key_pbs: + request_field_pb.add().CopyFrom(key_pb) + + +def _parse_commit_response(commit_response_pb): + """Extract response data from a commit response. + + :type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse` + :param commit_response_pb: The protobuf response from a commit request. + + :rtype: tuple + :returns': The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. + """ + mut_results = commit_response_pb.mutation_results + index_updates = commit_response_pb.index_updates + completed_keys = [mut_result.key for mut_result in mut_results + if mut_result.HasField('key')] # Message field (Key) + return index_updates, completed_keys diff --git a/env/Lib/site-packages/gcloud/datastore/entity.py b/env/Lib/site-packages/gcloud/datastore/entity.py new file mode 100644 index 0000000..0d5ce4b --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/entity.py @@ -0,0 +1,142 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Class for representing a single entity in the Cloud Datastore.""" + + +from gcloud._helpers import _ensure_tuple_or_list + + +class Entity(dict): + """Entities are akin to rows in a relational database + + An entity storing the actual instance of data. + + Each entity is officially represented with a + :class:`gcloud.datastore.key.Key` class, however it is possible that + you might create an Entity with only a partial Key (that is, a Key + with a Kind, and possibly a parent, but without an ID). In such a + case, the datastore service will automatically assign an ID to the + partial key. + + Entities in this API act like dictionaries with extras built in that + allow you to delete or persist the data stored on the entity. + + Entities are mutable and act like a subclass of a dictionary. + This means you could take an existing entity and change the key + to duplicate the object. + + Use :func:`gcloud.datastore.get` to retrieve an existing entity. + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> client.get(key) + + + You can the set values on the entity just like you would on any + other dictionary. + + >>> entity['age'] = 20 + >>> entity['name'] = 'JJ' + >>> entity + + + And you can convert an entity to a regular Python dictionary with the + ``dict`` builtin: + + >>> dict(entity) + {'age': 20, 'name': 'JJ'} + + .. note:: + + When saving an entity to the backend, values which are "text" + (``unicode`` in Python2, ``str`` in Python3) will be saved using + the 'text_value' field, after being encoded to UTF-8. When + retrieved from the back-end, such values will be decoded to "text" + again. Values which are "bytes" (``str`` in Python2, ``bytes`` in + Python3), will be saved using the 'blob_value' field, without + any decoding / encoding step. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: Optional key to be set on entity. + + :type exclude_from_indexes: tuple of string + :param exclude_from_indexes: Names of fields whose values are not to be + indexed for this entity. + """ + + def __init__(self, key=None, exclude_from_indexes=()): + super(Entity, self).__init__() + self.key = key + self._exclude_from_indexes = set(_ensure_tuple_or_list( + 'exclude_from_indexes', exclude_from_indexes)) + # NOTE: This will be populated when parsing a protobuf in + # gcloud.datastore.helpers.entity_from_protobuf. + self._meanings = {} + + def __eq__(self, other): + """Compare two entities for equality. + + Entities compare equal if their keys compare equal, and their + properties compare equal. + + :rtype: boolean + :returns: True if the entities compare equal, else False. + """ + if not isinstance(other, Entity): + return False + + return (self.key == other.key and + self._exclude_from_indexes == other._exclude_from_indexes and + self._meanings == other._meanings and + super(Entity, self).__eq__(other)) + + def __ne__(self, other): + """Compare two entities for inequality. + + Entities compare equal if their keys compare equal, and their + properties compare equal. + + :rtype: boolean + :returns: False if the entities compare equal, else True. + """ + return not self.__eq__(other) + + @property + def kind(self): + """Get the kind of the current entity. + + .. note:: + This relies entirely on the :class:`gcloud.datastore.key.Key` + set on the entity. That means that we're not storing the kind + of the entity at all, just the properties and a pointer to a + Key which knows its Kind. + """ + if self.key: + return self.key.kind + + @property + def exclude_from_indexes(self): + """Names of fields which are *not* to be indexed for this entity. + + :rtype: sequence of field names + """ + return frozenset(self._exclude_from_indexes) + + def __repr__(self): + if self.key: + return '' % (self.key.path, + super(Entity, self).__repr__()) + else: + return '' % (super(Entity, self).__repr__()) diff --git a/env/Lib/site-packages/gcloud/datastore/helpers.py b/env/Lib/site-packages/gcloud/datastore/helpers.py new file mode 100644 index 0000000..6b0ff82 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/helpers.py @@ -0,0 +1,468 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for dealing with Cloud Datastore's Protobuf API. + +The non-private functions are part of the API. +""" + +import datetime +import itertools + +from google.protobuf import struct_pb2 +from google.type import latlng_pb2 +import six + +from gcloud._helpers import _datetime_to_pb_timestamp +from gcloud._helpers import _pb_timestamp_to_datetime +from gcloud.datastore._generated import entity_pb2 as _entity_pb2 +from gcloud.datastore.entity import Entity +from gcloud.datastore.key import Key + +__all__ = ('entity_from_protobuf', 'key_from_protobuf') + + +def _get_meaning(value_pb, is_list=False): + """Get the meaning from a protobuf value. + + :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` + :param value_pb: The protobuf value to be checked for an + associated meaning. + + :type is_list: bool + :param is_list: Boolean indicating if the ``value_pb`` contains + a list value. + + :rtype: int + :returns: The meaning for the ``value_pb`` if one is set, else + :data:`None`. For a list value, if there are disagreeing + means it just returns a list of meanings. If all the + list meanings agree, it just condenses them. + """ + meaning = None + if is_list: + # An empty list will have no values, hence no shared meaning + # set among them. + if len(value_pb.array_value.values) == 0: + return None + + # We check among all the meanings, some of which may be None, + # the rest which may be enum/int values. + all_meanings = [_get_meaning(sub_value_pb) + for sub_value_pb in value_pb.array_value.values] + unique_meanings = set(all_meanings) + if len(unique_meanings) == 1: + # If there is a unique meaning, we preserve it. + meaning = unique_meanings.pop() + else: # We know len(value_pb.array_value.values) > 0. + # If the meaning is not unique, just return all of them. + meaning = all_meanings + elif value_pb.meaning: # Simple field (int32) + meaning = value_pb.meaning + + return meaning + + +def _new_value_pb(entity_pb, name): + """Add (by name) a new ``Value`` protobuf to an entity protobuf. + + :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :param entity_pb: An entity protobuf to add a new property to. + + :type name: string + :param name: The name of the new property. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Value` + :returns: The new ``Value`` protobuf that was added to the entity. + """ + return entity_pb.properties.get_or_create(name) + + +def _property_tuples(entity_pb): + """Iterator of name, ``Value`` tuples from entity properties. + + :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :param entity_pb: An entity protobuf to add a new property to. + + :rtype: :class:`generator` + :returns: An iterator that yields tuples of a name and ``Value`` + corresponding to properties on the entity. + """ + return six.iteritems(entity_pb.properties) + + +def entity_from_protobuf(pb): + """Factory method for creating an entity based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :param pb: The Protobuf representing the entity. + + :rtype: :class:`gcloud.datastore.entity.Entity` + :returns: The entity derived from the protobuf. + """ + key = None + if pb.HasField('key'): # Message field (Key) + key = key_from_protobuf(pb.key) + + entity_props = {} + entity_meanings = {} + exclude_from_indexes = [] + + for prop_name, value_pb in _property_tuples(pb): + value = _get_value_from_value_pb(value_pb) + entity_props[prop_name] = value + + # Check if the property has an associated meaning. + is_list = isinstance(value, list) + meaning = _get_meaning(value_pb, is_list=is_list) + if meaning is not None: + entity_meanings[prop_name] = (meaning, value) + + # Check if ``value_pb`` was excluded from index. Lists need to be + # special-cased and we require all ``exclude_from_indexes`` values + # in a list agree. + if is_list: + exclude_values = set(value_pb.exclude_from_indexes + for value_pb in value_pb.array_value.values) + if len(exclude_values) != 1: + raise ValueError('For an array_value, subvalues must either ' + 'all be indexed or all excluded from ' + 'indexes.') + + if exclude_values.pop(): + exclude_from_indexes.append(prop_name) + else: + if value_pb.exclude_from_indexes: + exclude_from_indexes.append(prop_name) + + entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) + entity.update(entity_props) + entity._meanings.update(entity_meanings) + return entity + + +def _set_pb_meaning_from_entity(entity, name, value, value_pb, + is_list=False): + """Add meaning information (from an entity) to a protobuf. + + :type entity: :class:`gcloud.datastore.entity.Entity` + :param entity: The entity to be turned into a protobuf. + + :type name: string + :param name: The name of the property. + + :type value: object + :param value: The current value stored as property ``name``. + + :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` + :param value_pb: The protobuf value to add meaning / meanings to. + + :type is_list: bool + :param is_list: (Optional) Boolean indicating if the ``value`` is + a list value. + """ + if name not in entity._meanings: + return + + meaning, orig_value = entity._meanings[name] + # Only add the meaning back to the protobuf if the value is + # unchanged from when it was originally read from the API. + if orig_value is not value: + return + + # For lists, we set meaning on each sub-element. + if is_list: + if not isinstance(meaning, list): + meaning = itertools.repeat(meaning) + val_iter = six.moves.zip(value_pb.array_value.values, + meaning) + for sub_value_pb, sub_meaning in val_iter: + if sub_meaning is not None: + sub_value_pb.meaning = sub_meaning + else: + value_pb.meaning = meaning + + +def entity_to_protobuf(entity): + """Converts an entity into a protobuf. + + :type entity: :class:`gcloud.datastore.entity.Entity` + :param entity: The entity to be turned into a protobuf. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity` + :returns: The protobuf representing the entity. + """ + entity_pb = _entity_pb2.Entity() + if entity.key is not None: + key_pb = entity.key.to_protobuf() + entity_pb.key.CopyFrom(key_pb) + + for name, value in entity.items(): + value_is_list = isinstance(value, list) + if value_is_list and len(value) == 0: + continue + + value_pb = _new_value_pb(entity_pb, name) + # Set the appropriate value. + _set_protobuf_value(value_pb, value) + + # Add index information to protobuf. + if name in entity.exclude_from_indexes: + if not value_is_list: + value_pb.exclude_from_indexes = True + + for sub_value in value_pb.array_value.values: + sub_value.exclude_from_indexes = True + + # Add meaning information to protobuf. + _set_pb_meaning_from_entity(entity, name, value, value_pb, + is_list=value_is_list) + + return entity_pb + + +def key_from_protobuf(pb): + """Factory method for creating a key based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`gcloud.datastore._generated.entity_pb2.Key` + :param pb: The Protobuf representing the key. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new `Key` instance + """ + path_args = [] + for element in pb.path: + path_args.append(element.kind) + if element.id: # Simple field (int64) + path_args.append(element.id) + # This is safe: we expect proto objects returned will only have + # one of `name` or `id` set. + if element.name: # Simple field (string) + path_args.append(element.name) + + project = None + if pb.partition_id.project_id: # Simple field (string) + project = pb.partition_id.project_id + namespace = None + if pb.partition_id.namespace_id: # Simple field (string) + namespace = pb.partition_id.namespace_id + + return Key(*path_args, namespace=namespace, project=project) + + +def _pb_attr_value(val): + """Given a value, return the protobuf attribute name and proper value. + + The Protobuf API uses different attribute names based on value types + rather than inferring the type. This function simply determines the + proper attribute name based on the type of the value provided and + returns the attribute name as well as a properly formatted value. + + Certain value types need to be coerced into a different type (such + as a `datetime.datetime` into an integer timestamp, or a + `gcloud.datastore.key.Key` into a Protobuf representation. This + function handles that for you. + + .. note:: + Values which are "text" ('unicode' in Python2, 'str' in Python3) map + to 'string_value' in the datastore; values which are "bytes" + ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. + + For example: + + >>> _pb_attr_value(1234) + ('integer_value', 1234) + >>> _pb_attr_value('my_string') + ('string_value', 'my_string') + + :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`, + bool, float, integer, string + :param val: The value to be scrutinized. + + :returns: A tuple of the attribute name and proper value type. + """ + + if isinstance(val, datetime.datetime): + name = 'timestamp' + value = _datetime_to_pb_timestamp(val) + elif isinstance(val, Key): + name, value = 'key', val.to_protobuf() + elif isinstance(val, bool): + name, value = 'boolean', val + elif isinstance(val, float): + name, value = 'double', val + elif isinstance(val, six.integer_types): + name, value = 'integer', val + elif isinstance(val, six.text_type): + name, value = 'string', val + elif isinstance(val, (bytes, str)): + name, value = 'blob', val + elif isinstance(val, Entity): + name, value = 'entity', val + elif isinstance(val, list): + name, value = 'array', val + elif isinstance(val, GeoPoint): + name, value = 'geo_point', val.to_protobuf() + elif val is None: + name, value = 'null', struct_pb2.NULL_VALUE + else: + raise ValueError("Unknown protobuf attr type %s" % type(val)) + + return name + '_value', value + + +def _get_value_from_value_pb(value_pb): + """Given a protobuf for a Value, get the correct value. + + The Cloud Datastore Protobuf API returns a Property Protobuf which + has one value set and the rest blank. This function retrieves the + the one value provided. + + Some work is done to coerce the return value into a more useful type + (particularly in the case of a timestamp value, or a key value). + + :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` + :param value_pb: The Value Protobuf. + + :returns: The value provided by the Protobuf. + :raises: :class:`ValueError ` if no value type + has been set. + """ + value_type = value_pb.WhichOneof('value_type') + + if value_type == 'timestamp_value': + result = _pb_timestamp_to_datetime(value_pb.timestamp_value) + + elif value_type == 'key_value': + result = key_from_protobuf(value_pb.key_value) + + elif value_type == 'boolean_value': + result = value_pb.boolean_value + + elif value_type == 'double_value': + result = value_pb.double_value + + elif value_type == 'integer_value': + result = value_pb.integer_value + + elif value_type == 'string_value': + result = value_pb.string_value + + elif value_type == 'blob_value': + result = value_pb.blob_value + + elif value_type == 'entity_value': + result = entity_from_protobuf(value_pb.entity_value) + + elif value_type == 'array_value': + result = [_get_value_from_value_pb(value) + for value in value_pb.array_value.values] + + elif value_type == 'geo_point_value': + result = GeoPoint(value_pb.geo_point_value.latitude, + value_pb.geo_point_value.longitude) + + elif value_type == 'null_value': + result = None + + else: + raise ValueError('Value protobuf did not have any value set') + + return result + + +def _set_protobuf_value(value_pb, val): + """Assign 'val' to the correct subfield of 'value_pb'. + + The Protobuf API uses different attribute names based on value types + rather than inferring the type. + + Some value types (entities, keys, lists) cannot be directly + assigned; this function handles them correctly. + + :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` + :param value_pb: The value protobuf to which the value is being assigned. + + :type val: :class:`datetime.datetime`, boolean, float, integer, string, + :class:`gcloud.datastore.key.Key`, + :class:`gcloud.datastore.entity.Entity` + :param val: The value to be assigned. + """ + attr, val = _pb_attr_value(val) + if attr == 'key_value': + value_pb.key_value.CopyFrom(val) + elif attr == 'timestamp_value': + value_pb.timestamp_value.CopyFrom(val) + elif attr == 'entity_value': + entity_pb = entity_to_protobuf(val) + value_pb.entity_value.CopyFrom(entity_pb) + elif attr == 'array_value': + l_pb = value_pb.array_value.values + for item in val: + i_pb = l_pb.add() + _set_protobuf_value(i_pb, item) + elif attr == 'geo_point_value': + value_pb.geo_point_value.CopyFrom(val) + else: # scalar, just assign + setattr(value_pb, attr, val) + + +class GeoPoint(object): + """Simple container for a geo point value. + + :type latitude: float + :param latitude: Latitude of a point. + + :type longitude: float + :param longitude: Longitude of a point. + """ + + def __init__(self, latitude, longitude): + self.latitude = latitude + self.longitude = longitude + + def to_protobuf(self): + """Convert the current object to protobuf. + + :rtype: :class:`google.type.latlng_pb2.LatLng`. + :returns: The current point as a protobuf. + """ + return latlng_pb2.LatLng(latitude=self.latitude, + longitude=self.longitude) + + def __eq__(self, other): + """Compare two geo points for equality. + + :rtype: boolean + :returns: True if the points compare equal, else False. + """ + if not isinstance(other, GeoPoint): + return False + + return (self.latitude == other.latitude and + self.longitude == other.longitude) + + def __ne__(self, other): + """Compare two geo points for inequality. + + :rtype: boolean + :returns: False if the points compare equal, else True. + """ + return not self.__eq__(other) diff --git a/env/Lib/site-packages/gcloud/datastore/key.py b/env/Lib/site-packages/gcloud/datastore/key.py new file mode 100644 index 0000000..a1356dc --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/key.py @@ -0,0 +1,404 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud datastore keys.""" + +import copy +import six + +from gcloud.datastore._generated import entity_pb2 as _entity_pb2 + + +class Key(object): + """An immutable representation of a datastore Key. + + To create a basic key: + + >>> Key('EntityKind', 1234) + + >>> Key('EntityKind', 'foo') + + + To create a key with a parent: + + >>> Key('Parent', 'foo', 'Child', 1234) + + >>> Key('Child', 1234, parent=parent_key) + + + To create a partial key: + + >>> Key('Parent', 'foo', 'Child') + + + :type path_args: tuple of string and integer + :param path_args: May represent a partial (odd length) or full (even + length) key path. + + :type kwargs: dict + :param kwargs: Keyword arguments to be passed in. + + Accepted keyword arguments are + + * namespace (string): A namespace identifier for the key. + * project (string): The project associated with the key. + * parent (:class:`gcloud.datastore.key.Key`): The parent of the key. + + The project argument is required unless it has been set implicitly. + """ + + def __init__(self, *path_args, **kwargs): + self._flat_path = path_args + parent = self._parent = kwargs.get('parent') + self._namespace = kwargs.get('namespace') + project = kwargs.get('project') + self._project = _validate_project(project, parent) + # _flat_path, _parent, _namespace and _project must be set before + # _combine_args() is called. + self._path = self._combine_args() + + def __eq__(self, other): + """Compare two keys for equality. + + Incomplete keys never compare equal to any other key. + + Completed keys compare equal if they have the same path, project, + and namespace. + + :rtype: bool + :returns: True if the keys compare equal, else False. + """ + if not isinstance(other, Key): + return False + + if self.is_partial or other.is_partial: + return False + + return (self.flat_path == other.flat_path and + self.project == other.project and + self.namespace == other.namespace) + + def __ne__(self, other): + """Compare two keys for inequality. + + Incomplete keys never compare equal to any other key. + + Completed keys compare equal if they have the same path, project, + and namespace. + + :rtype: bool + :returns: False if the keys compare equal, else True. + """ + return not self.__eq__(other) + + def __hash__(self): + """Hash a keys for use in a dictionary lookp. + + :rtype: integer + :returns: a hash of the key's state. + """ + return (hash(self.flat_path) + + hash(self.project) + + hash(self.namespace)) + + @staticmethod + def _parse_path(path_args): + """Parses positional arguments into key path with kinds and IDs. + + :type path_args: tuple + :param path_args: A tuple from positional arguments. Should be + alternating list of kinds (string) and ID/name + parts (int or string). + + :rtype: :class:`list` of :class:`dict` + :returns: A list of key parts with kind and ID or name set. + :raises: :class:`ValueError` if there are no ``path_args``, if one of + the kinds is not a string or if one of the IDs/names is not + a string or an integer. + """ + if len(path_args) == 0: + raise ValueError('Key path must not be empty.') + + kind_list = path_args[::2] + id_or_name_list = path_args[1::2] + # Dummy sentinel value to pad incomplete key to even length path. + partial_ending = object() + if len(path_args) % 2 == 1: + id_or_name_list += (partial_ending,) + + result = [] + for kind, id_or_name in zip(kind_list, id_or_name_list): + curr_key_part = {} + if isinstance(kind, six.string_types): + curr_key_part['kind'] = kind + else: + raise ValueError(kind, 'Kind was not a string.') + + if isinstance(id_or_name, six.string_types): + curr_key_part['name'] = id_or_name + elif isinstance(id_or_name, six.integer_types): + curr_key_part['id'] = id_or_name + elif id_or_name is not partial_ending: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + result.append(curr_key_part) + + return result + + def _combine_args(self): + """Sets protected data by combining raw data set from the constructor. + + If a ``_parent`` is set, updates the ``_flat_path`` and sets the + ``_namespace`` and ``_project`` if not already set. + + :rtype: :class:`list` of :class:`dict` + :returns: A list of key parts with kind and ID or name set. + :raises: :class:`ValueError` if the parent key is not complete. + """ + child_path = self._parse_path(self._flat_path) + + if self._parent is not None: + if self._parent.is_partial: + raise ValueError('Parent key must be complete.') + + # We know that _parent.path() will return a copy. + child_path = self._parent.path + child_path + self._flat_path = self._parent.flat_path + self._flat_path + if (self._namespace is not None and + self._namespace != self._parent.namespace): + raise ValueError('Child namespace must agree with parent\'s.') + self._namespace = self._parent.namespace + if (self._project is not None and + self._project != self._parent.project): + raise ValueError('Child project must agree with parent\'s.') + self._project = self._parent.project + + return child_path + + def _clone(self): + """Duplicates the Key. + + Most attributes are simple types, so don't require copying. Other + attributes like ``parent`` are long-lived and so we re-use them. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: A new ``Key`` instance with the same data as the current one. + """ + cloned_self = self.__class__(*self.flat_path, + project=self.project, + namespace=self.namespace) + # If the current parent has already been set, we re-use + # the same instance + cloned_self._parent = self._parent + return cloned_self + + def completed_key(self, id_or_name): + """Creates new key from existing partial key by adding final ID/name. + + :type id_or_name: string or integer + :param id_or_name: ID or name to be added to the key. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: A new ``Key`` instance with the same data as the current one + and an extra ID or name added. + :raises: :class:`ValueError` if the current key is not partial or if + ``id_or_name`` is not a string or integer. + """ + if not self.is_partial: + raise ValueError('Only a partial key can be completed.') + + id_or_name_key = None + if isinstance(id_or_name, six.string_types): + id_or_name_key = 'name' + elif isinstance(id_or_name, six.integer_types): + id_or_name_key = 'id' + else: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + new_key = self._clone() + new_key._path[-1][id_or_name_key] = id_or_name + new_key._flat_path += (id_or_name,) + return new_key + + def to_protobuf(self): + """Return a protobuf corresponding to the key. + + :rtype: :class:`gcloud.datastore._generated.entity_pb2.Key` + :returns: The protobuf representing the key. + """ + key = _entity_pb2.Key() + key.partition_id.project_id = self.project + + if self.namespace: + key.partition_id.namespace_id = self.namespace + + for item in self.path: + element = key.path.add() + if 'kind' in item: + element.kind = item['kind'] + if 'id' in item: + element.id = item['id'] + if 'name' in item: + element.name = item['name'] + + return key + + @property + def is_partial(self): + """Boolean indicating if the key has an ID (or name). + + :rtype: bool + :returns: ``True`` if the last element of the key's path does not have + an ``id`` or a ``name``. + """ + return self.id_or_name is None + + @property + def namespace(self): + """Namespace getter. + + :rtype: string + :returns: The namespace of the current key. + """ + return self._namespace + + @property + def path(self): + """Path getter. + + Returns a copy so that the key remains immutable. + + :rtype: :class:`list` of :class:`dict` + :returns: The (key) path of the current key. + """ + return copy.deepcopy(self._path) + + @property + def flat_path(self): + """Getter for the key path as a tuple. + + :rtype: tuple of string and integer + :returns: The tuple of elements in the path. + """ + return self._flat_path + + @property + def kind(self): + """Kind getter. Based on the last element of path. + + :rtype: string + :returns: The kind of the current key. + """ + return self.path[-1]['kind'] + + @property + def id(self): + """ID getter. Based on the last element of path. + + :rtype: integer + :returns: The (integer) ID of the key. + """ + return self.path[-1].get('id') + + @property + def name(self): + """Name getter. Based on the last element of path. + + :rtype: string + :returns: The (string) name of the key. + """ + return self.path[-1].get('name') + + @property + def id_or_name(self): + """Getter. Based on the last element of path. + + :rtype: integer (if ``id``) or string (if ``name``) + :returns: The last element of the key's path if it is either an ``id`` + or a ``name``. + """ + return self.id or self.name + + @property + def project(self): + """Project getter. + + :rtype: string + :returns: The key's project. + """ + return self._project + + def _make_parent(self): + """Creates a parent key for the current path. + + Extracts all but the last element in the key path and creates a new + key, while still matching the namespace and the project. + + :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. + """ + if self.is_partial: + parent_args = self.flat_path[:-1] + else: + parent_args = self.flat_path[:-2] + if parent_args: + return self.__class__(*parent_args, project=self.project, + namespace=self.namespace) + + @property + def parent(self): + """The parent of the current key. + + :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType` + :returns: A new ``Key`` instance, whose path consists of all but the + last element of current path. If the current key has only + one path element, returns ``None``. + """ + if self._parent is None: + self._parent = self._make_parent() + + return self._parent + + def __repr__(self): + return '' % (self.path, self.project) + + +def _validate_project(project, parent): + """Ensure the project is set appropriately. + + If ``parent`` is passed, skip the test (it will be checked / fixed up + later). + + If ``project`` is unset, attempt to infer the project from the environment. + + :type project: string + :param project: A project. + + :type parent: :class:`gcloud.datastore.key.Key` or ``NoneType`` + :param parent: The parent of the key or ``None``. + + :rtype: string + :returns: The ``project`` passed in, or implied from the environment. + :raises: :class:`ValueError` if ``project`` is ``None`` and no project + can be inferred from the parent. + """ + if parent is None: + if project is None: + raise ValueError("A Key must have a project set.") + + return project diff --git a/env/Lib/site-packages/gcloud/datastore/query.py b/env/Lib/site-packages/gcloud/datastore/query.py new file mode 100644 index 0000000..a6ce955 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/query.py @@ -0,0 +1,531 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud datastore queries.""" + +import base64 + +from gcloud._helpers import _ensure_tuple_or_list +from gcloud.datastore._generated import query_pb2 as _query_pb2 +from gcloud.datastore import helpers +from gcloud.datastore.key import Key + + +class Query(object): + """A Query against the Cloud Datastore. + + This class serves as an abstraction for creating a query over data + stored in the Cloud Datastore. + + :type client: :class:`gcloud.datastore.client.Client` + :param client: The client used to connect to datastore. + + :type kind: string + :param kind: The kind to query. + + :type project: string + :param project: The project associated with the query. If not passed, + uses the client's value. + + :type namespace: string or None + :param namespace: The namespace to which to restrict results. If not + passed, uses the client's value. + + :type ancestor: :class:`gcloud.datastore.key.Key` or None + :param ancestor: key of the ancestor to which this query's results are + restricted. + + :type filters: sequence of (property_name, operator, value) tuples + :param filters: property filters applied by this query. + + :type projection: sequence of string + :param projection: fields returned as part of query results. + + :type order: sequence of string + :param order: field names used to order query results. Prepend '-' + to a field name to sort it in descending order. + + :type distinct_on: sequence of string + :param distinct_on: field names used to group query results. + + :raises: ValueError if ``project`` is not passed and no implicit + default is set. + """ + + OPERATORS = { + '<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, + '>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, + '<': _query_pb2.PropertyFilter.LESS_THAN, + '>': _query_pb2.PropertyFilter.GREATER_THAN, + '=': _query_pb2.PropertyFilter.EQUAL, + } + """Mapping of operator strings and their protobuf equivalents.""" + + def __init__(self, + client, + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=()): + + self._client = client + self._kind = kind + self._project = project or client.project + self._namespace = namespace or client.namespace + self._ancestor = ancestor + self._filters = [] + # Verify filters passed in. + for property_name, operator, value in filters: + self.add_filter(property_name, operator, value) + self._projection = _ensure_tuple_or_list('projection', projection) + self._order = _ensure_tuple_or_list('order', order) + self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on) + + @property + def project(self): + """Get the project for this Query. + + :rtype: str + """ + return self._project or self._client.project + + @property + def namespace(self): + """This query's namespace + + :rtype: string or None + :returns: the namespace assigned to this query + """ + return self._namespace or self._client.namespace + + @namespace.setter + def namespace(self, value): + """Update the query's namespace. + + :type value: string + """ + if not isinstance(value, str): + raise ValueError("Namespace must be a string") + self._namespace = value + + @property + def kind(self): + """Get the Kind of the Query. + + :rtype: string + """ + return self._kind + + @kind.setter + def kind(self, value): + """Update the Kind of the Query. + + :type value: string + :param value: updated kind for the query. + + .. note:: + + The protobuf specification allows for ``kind`` to be repeated, + but the current implementation returns an error if more than + one value is passed. If the back-end changes in the future to + allow multiple values, this method will be updated to allow passing + either a string or a sequence of strings. + """ + if not isinstance(value, str): + raise TypeError("Kind must be a string") + self._kind = value + + @property + def ancestor(self): + """The ancestor key for the query. + + :rtype: Key or None + """ + return self._ancestor + + @ancestor.setter + def ancestor(self, value): + """Set the ancestor for the query + + :type value: Key + :param value: the new ancestor key + """ + if not isinstance(value, Key): + raise TypeError("Ancestor must be a Key") + self._ancestor = value + + @ancestor.deleter + def ancestor(self): + """Remove the ancestor for the query.""" + self._ancestor = None + + @property + def filters(self): + """Filters set on the query. + + :rtype: sequence of (property_name, operator, value) tuples. + """ + return self._filters[:] + + def add_filter(self, property_name, operator, value): + """Filter the query based on a property name, operator and a value. + + Expressions take the form of:: + + .add_filter('', '', ) + + where property is a property stored on the entity in the datastore + and operator is one of ``OPERATORS`` + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='Person') + >>> query.add_filter('name', '=', 'James') + >>> query.add_filter('age', '>', 50) + + :type property_name: string + :param property_name: A property name. + + :type operator: string + :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + + :type value: :class:`int`, :class:`str`, :class:`bool`, + :class:`float`, :class:`NoneType`, + :class:`datetime.datetime`, + :class:`gcloud.datastore.key.Key` + :param value: The value to filter on. + + :raises: :class:`ValueError` if ``operation`` is not one of the + specified values, or if a filter names ``'__key__'`` but + passes an invalid value (a key is required). + """ + if self.OPERATORS.get(operator) is None: + error_message = 'Invalid expression: "%s"' % (operator,) + choices_message = 'Please use one of: =, <, <=, >, >=.' + raise ValueError(error_message, choices_message) + + if property_name == '__key__' and not isinstance(value, Key): + raise ValueError('Invalid key: "%s"' % value) + + self._filters.append((property_name, operator, value)) + + @property + def projection(self): + """Fields names returned by the query. + + :rtype: sequence of string + :returns: Names of fields in query results. + """ + return self._projection[:] + + @projection.setter + def projection(self, projection): + """Set the fields returned the query. + + :type projection: string or sequence of strings + :param projection: Each value is a string giving the name of a + property to be included in the projection query. + """ + if isinstance(projection, str): + projection = [projection] + self._projection[:] = projection + + def keys_only(self): + """Set the projection to include only keys.""" + self._projection[:] = ['__key__'] + + def key_filter(self, key, operator='='): + """Filter on a key. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: The key to filter on. + + :type operator: string + :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``. + Defaults to ``=``. + """ + self.add_filter('__key__', operator, key) + + @property + def order(self): + """Names of fields used to sort query results. + + :rtype: sequence of string + """ + return self._order[:] + + @order.setter + def order(self, value): + """Set the fields used to sort query results. + + Sort fields will be applied in the order specified. + + :type value: string or sequence of strings + :param value: Each value is a string giving the name of the + property on which to sort, optionally preceded by a + hyphen (-) to specify descending order. + Omitting the hyphen implies ascending order. + """ + if isinstance(value, str): + value = [value] + self._order[:] = value + + @property + def distinct_on(self): + """Names of fields used to group query results. + + :rtype: sequence of string + """ + return self._distinct_on[:] + + @distinct_on.setter + def distinct_on(self, value): + """Set fields used to group query results. + + :type value: string or sequence of strings + :param value: Each value is a string giving the name of a + property to use to group results together. + """ + if isinstance(value, str): + value = [value] + self._distinct_on[:] = value + + def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, + client=None): + """Execute the Query; return an iterator for the matching entities. + + For example:: + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> query = client.query(kind='Person') + >>> query.add_filter('name', '=', 'Sally') + >>> list(query.fetch()) + [, , ...] + >>> list(query.fetch(1)) + [] + + :type limit: integer or None + :param limit: An optional limit passed through to the iterator. + + :type offset: integer + :param offset: An optional offset passed through to the iterator. + + :type start_cursor: bytes + :param start_cursor: An optional cursor passed through to the iterator. + + :type end_cursor: bytes + :param end_cursor: An optional cursor passed through to the iterator. + + :type client: :class:`gcloud.datastore.client.Client` + :param client: client used to connect to datastore. + If not supplied, uses the query's value. + + :rtype: :class:`Iterator` + :raises: ValueError if ``connection`` is not passed and no implicit + default has been set. + """ + if client is None: + client = self._client + + return Iterator( + self, client, limit, offset, start_cursor, end_cursor) + + +class Iterator(object): + """Represent the state of a given execution of a Query. + + :type query: :class:`gcloud.datastore.query.Query` + :param query: Query object holding permanent configuration (i.e. + things that don't change on with each page in + a results set). + + :type client: :class:`gcloud.datastore.client.Client` + :param client: The client used to make a request. + + :type limit: integer + :param limit: (Optional) Limit the number of results returned. + + :type offset: integer + :param offset: (Optional) Offset used to begin a query. + + :type start_cursor: bytes + :param start_cursor: (Optional) Cursor to begin paging through + query results. + + :type end_cursor: bytes + :param end_cursor: (Optional) Cursor to end paging through + query results. + """ + + _NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED + + _FINISHED = ( + _query_pb2.QueryResultBatch.NO_MORE_RESULTS, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR, + ) + + def __init__(self, query, client, limit=None, offset=None, + start_cursor=None, end_cursor=None): + self._query = query + self._client = client + self._limit = limit + self._offset = offset + self._start_cursor = start_cursor + self._end_cursor = end_cursor + self._page = self._more_results = None + self._skipped_results = None + + def next_page(self): + """Fetch a single "page" of query results. + + Low-level API for fine control: the more convenient API is + to iterate on the current Iterator. + + :rtype: tuple, (entities, more_results, cursor) + """ + pb = _pb_from_query(self._query) + + start_cursor = self._start_cursor + if start_cursor is not None: + pb.start_cursor = base64.urlsafe_b64decode(start_cursor) + + end_cursor = self._end_cursor + if end_cursor is not None: + pb.end_cursor = base64.urlsafe_b64decode(end_cursor) + + if self._limit is not None: + pb.limit.value = self._limit + + if self._offset is not None: + pb.offset = self._offset + + transaction = self._client.current_transaction + + query_results = self._client.connection.run_query( + query_pb=pb, + project=self._query.project, + namespace=self._query.namespace, + transaction_id=transaction and transaction.id, + ) + (entity_pbs, cursor_as_bytes, + more_results_enum, self._skipped_results) = query_results + + if cursor_as_bytes == b'': + self._start_cursor = None + else: + self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes) + self._end_cursor = None + + if more_results_enum == self._NOT_FINISHED: + self._more_results = True + elif more_results_enum in self._FINISHED: + self._more_results = False + else: + raise ValueError('Unexpected value returned for `more_results`.') + + self._page = [ + helpers.entity_from_protobuf(entity) + for entity in entity_pbs] + return self._page, self._more_results, self._start_cursor + + def __iter__(self): + """Generator yielding all results matching our query. + + :rtype: sequence of :class:`gcloud.datastore.entity.Entity` + """ + while True: + self.next_page() + for entity in self._page: + yield entity + if not self._more_results: + break + num_results = len(self._page) + if self._limit is not None: + self._limit -= num_results + if self._offset is not None and self._skipped_results is not None: + # NOTE: The offset goes down relative to the location + # because we are updating the cursor each time. + self._offset -= self._skipped_results + + +def _pb_from_query(query): + """Convert a Query instance to the corresponding protobuf. + + :type query: :class:`Query` + :param query: The source query. + + :rtype: :class:`gcloud.datastore._generated.query_pb2.Query` + :returns: A protobuf that can be sent to the protobuf API. N.b. that + it does not contain "in-flight" fields for ongoing query + executions (cursors, offset, limit). + """ + pb = _query_pb2.Query() + + for projection_name in query.projection: + pb.projection.add().property.name = projection_name + + if query.kind: + pb.kind.add().name = query.kind + + composite_filter = pb.filter.composite_filter + composite_filter.op = _query_pb2.CompositeFilter.AND + + if query.ancestor: + ancestor_pb = query.ancestor.to_protobuf() + + # Filter on __key__ HAS_ANCESTOR == ancestor. + ancestor_filter = composite_filter.filters.add().property_filter + ancestor_filter.property.name = '__key__' + ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR + ancestor_filter.value.key_value.CopyFrom(ancestor_pb) + + for property_name, operator, value in query.filters: + pb_op_enum = query.OPERATORS.get(operator) + + # Add the specific filter + property_filter = composite_filter.filters.add().property_filter + property_filter.property.name = property_name + property_filter.op = pb_op_enum + + # Set the value to filter on based on the type. + if property_name == '__key__': + key_pb = value.to_protobuf() + property_filter.value.key_value.CopyFrom(key_pb) + else: + helpers._set_protobuf_value(property_filter.value, value) + + if not composite_filter.filters: + pb.ClearField('filter') + + for prop in query.order: + property_order = pb.order.add() + + if prop.startswith('-'): + property_order.property.name = prop[1:] + property_order.direction = property_order.DESCENDING + else: + property_order.property.name = prop + property_order.direction = property_order.ASCENDING + + for distinct_on_name in query.distinct_on: + pb.distinct_on.add().name = distinct_on_name + + return pb diff --git a/env/Lib/site-packages/gcloud/datastore/test_batch.py b/env/Lib/site-packages/gcloud/datastore/test_batch.py new file mode 100644 index 0000000..8c267de --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_batch.py @@ -0,0 +1,400 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestBatch(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.datastore.batch import Batch + + return Batch + + def _makeOne(self, client): + return self._getTargetClass()(client) + + def test_ctor(self): + from gcloud.datastore._generated import datastore_pb2 + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + connection = _Connection() + client = _Client(_PROJECT, connection, _NAMESPACE) + batch = self._makeOne(client) + + self.assertEqual(batch.project, _PROJECT) + self.assertEqual(batch.connection, connection) + self.assertEqual(batch.namespace, _NAMESPACE) + self.assertTrue(batch._id is None) + self.assertEqual(batch._status, batch._INITIAL) + self.assertTrue(isinstance(batch._commit_request, + datastore_pb2.CommitRequest)) + self.assertTrue(batch.mutations is batch._commit_request.mutations) + self.assertEqual(batch._partial_key_entities, []) + + def test_current(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch1 = self._makeOne(client) + batch2 = self._makeOne(client) + self.assertTrue(batch1.current() is None) + self.assertTrue(batch2.current() is None) + with batch1: + self.assertTrue(batch1.current() is batch1) + self.assertTrue(batch2.current() is batch1) + with batch2: + self.assertTrue(batch1.current() is batch2) + self.assertTrue(batch2.current() is batch2) + self.assertTrue(batch1.current() is batch1) + self.assertTrue(batch2.current() is batch1) + self.assertTrue(batch1.current() is None) + self.assertTrue(batch2.current() is None) + + def test_put_entity_wo_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + + self.assertRaises(ValueError, batch.put, _Entity()) + + def test_put_entity_w_key_wrong_project(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity() + entity.key = _Key('OTHER') + + self.assertRaises(ValueError, batch.put, entity) + + def test_put_entity_w_partial_key(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + key._id = None + + batch.put(entity) + + mutated_entity = _mutated_pb(self, batch.mutations, 'insert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(batch._partial_key_entities, [entity]) + + def test_put_entity_w_completed_key(self): + from gcloud.datastore.helpers import _property_tuples + + _PROJECT = 'PROJECT' + _PROPERTIES = { + 'foo': 'bar', + 'baz': 'qux', + 'spam': [1, 2, 3], + 'frotz': [], # will be ignored + } + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity(_PROPERTIES) + entity.exclude_from_indexes = ('baz', 'spam') + key = entity.key = _Key(_PROJECT) + + batch.put(entity) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + + prop_dict = dict(_property_tuples(mutated_entity)) + self.assertEqual(len(prop_dict), 3) + self.assertFalse(prop_dict['foo'].exclude_from_indexes) + self.assertTrue(prop_dict['baz'].exclude_from_indexes) + self.assertFalse(prop_dict['spam'].exclude_from_indexes) + spam_values = prop_dict['spam'].array_value.values + self.assertTrue(spam_values[0].exclude_from_indexes) + self.assertTrue(spam_values[1].exclude_from_indexes) + self.assertTrue(spam_values[2].exclude_from_indexes) + self.assertFalse('frotz' in prop_dict) + + def test_delete_w_partial_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key(_PROJECT) + key._id = None + + self.assertRaises(ValueError, batch.delete, key) + + def test_delete_w_key_wrong_project(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key('OTHER') + + self.assertRaises(ValueError, batch.delete, key) + + def test_delete_w_completed_key(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + key = _Key(_PROJECT) + + batch.delete(key) + + mutated_key = _mutated_pb(self, batch.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + + def test_begin(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + self.assertEqual(batch._status, batch._INITIAL) + batch.begin() + self.assertEqual(batch._status, batch._IN_PROGRESS) + + def test_begin_fail(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + batch._status = batch._IN_PROGRESS + with self.assertRaises(ValueError): + batch.begin() + + def test_rollback(self): + _PROJECT = 'PROJECT' + client = _Client(_PROJECT, None) + batch = self._makeOne(client) + self.assertEqual(batch._status, batch._INITIAL) + batch.rollback() + self.assertEqual(batch._status, batch._ABORTED) + + def test_commit(self): + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + + self.assertEqual(batch._status, batch._INITIAL) + batch.commit() + self.assertEqual(batch._status, batch._FINISHED) + + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + + def test_commit_w_partial_key_entities(self): + _PROJECT = 'PROJECT' + _NEW_ID = 1234 + connection = _Connection(_NEW_ID) + client = _Client(_PROJECT, connection) + batch = self._makeOne(client) + entity = _Entity({}) + key = entity.key = _Key(_PROJECT) + key._id = None + batch._partial_key_entities.append(entity) + + self.assertEqual(batch._status, batch._INITIAL) + batch.commit() + self.assertEqual(batch._status, batch._FINISHED) + + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + self.assertFalse(entity.key.is_partial) + self.assertEqual(entity.key._id, _NEW_ID) + + def test_as_context_mgr_wo_error(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + with self._makeOne(client) as batch: + self.assertEqual(list(client._batches), [batch]) + batch.put(entity) + + self.assertEqual(list(client._batches), []) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(connection._committed, + [(_PROJECT, batch._commit_request, None)]) + + def test_as_context_mgr_nested(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity1 = _Entity(_PROPERTIES) + key1 = entity1.key = _Key(_PROJECT) + entity2 = _Entity(_PROPERTIES) + key2 = entity2.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + with self._makeOne(client) as batch1: + self.assertEqual(list(client._batches), [batch1]) + batch1.put(entity1) + with self._makeOne(client) as batch2: + self.assertEqual(list(client._batches), [batch2, batch1]) + batch2.put(entity2) + + self.assertEqual(list(client._batches), [batch1]) + + self.assertEqual(list(client._batches), []) + + mutated_entity1 = _mutated_pb(self, batch1.mutations, 'upsert') + self.assertEqual(mutated_entity1.key, key1._key) + + mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert') + self.assertEqual(mutated_entity2.key, key2._key) + + self.assertEqual(connection._committed, + [(_PROJECT, batch2._commit_request, None), + (_PROJECT, batch1._commit_request, None)]) + + def test_as_context_mgr_w_error(self): + _PROJECT = 'PROJECT' + _PROPERTIES = {'foo': 'bar'} + connection = _Connection() + entity = _Entity(_PROPERTIES) + key = entity.key = _Key(_PROJECT) + + client = _Client(_PROJECT, connection) + self.assertEqual(list(client._batches), []) + + try: + with self._makeOne(client) as batch: + self.assertEqual(list(client._batches), [batch]) + batch.put(entity) + raise ValueError("testing") + except ValueError: + pass + + self.assertEqual(list(client._batches), []) + + mutated_entity = _mutated_pb(self, batch.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key._key) + self.assertEqual(connection._committed, []) + + +class _PathElementPB(object): + + def __init__(self, id_): + self.id = id_ + + +class _KeyPB(object): + + def __init__(self, id_): + self.path = [_PathElementPB(id_)] + + +class _Connection(object): + _marker = object() + _save_result = (False, None) + + def __init__(self, *new_keys): + self._completed_keys = [_KeyPB(key) for key in new_keys] + self._committed = [] + self._index_updates = 0 + + def commit(self, project, commit_request, transaction_id): + self._committed.append((project, commit_request, transaction_id)) + return self._index_updates, self._completed_keys + + +class _Entity(dict): + key = None + exclude_from_indexes = () + _meanings = {} + + +class _Key(object): + _MARKER = object() + _kind = 'KIND' + _key = 'KEY' + _path = None + _id = 1234 + _stored = None + + def __init__(self, project): + self.project = project + + @property + def is_partial(self): + return self._id is None + + def to_protobuf(self): + from gcloud.datastore._generated import entity_pb2 + key = self._key = entity_pb2.Key() + # Don't assign it, because it will just get ripped out + # key.partition_id.project_id = self.project + + element = key.path.add() + element.kind = self._kind + if self._id is not None: + element.id = self._id + + return key + + def completed_key(self, new_id): + assert self.is_partial + new_key = self.__class__(self.project) + new_key._id = new_id + return new_key + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + self._batches = [] + + def _push_batch(self, batch): + self._batches.insert(0, batch) + + def _pop_batch(self): + return self._batches.pop(0) + + @property + def current_batch(self): + if self._batches: + return self._batches[0] + + +def _assert_num_mutations(test_case, mutation_pb_list, num_mutations): + test_case.assertEqual(len(mutation_pb_list), num_mutations) + + +def _mutated_pb(test_case, mutation_pb_list, mutation_type): + # Make sure there is only one mutation. + _assert_num_mutations(test_case, mutation_pb_list, 1) + + # We grab the only mutation. + mutated_pb = mutation_pb_list[0] + # Then check if it is the correct type. + test_case.assertEqual(mutated_pb.WhichOneof('operation'), + mutation_type) + + return getattr(mutated_pb, mutation_type) diff --git a/env/Lib/site-packages/gcloud/datastore/test_client.py b/env/Lib/site-packages/gcloud/datastore/test_client.py new file mode 100644 index 0000000..ff8765f --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_client.py @@ -0,0 +1,1006 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = project + path_element = entity_pb.key.path.add() + path_element.kind = kind + path_element.id = integer_id + if name is not None and str_val is not None: + value_pb = _new_value_pb(entity_pb, name) + value_pb.string_value = str_val + + return entity_pb + + +class Test__get_gcd_project(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore.client import _get_gcd_project + return _get_gcd_project() + + def test_no_value(self): + import os + from gcloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertEqual(project, None) + + def test_value_set(self): + import os + from gcloud._testing import _Monkey + from gcloud.datastore.client import GCD_DATASET + + MOCK_PROJECT = object() + environ = {GCD_DATASET: MOCK_PROJECT} + with _Monkey(os, getenv=environ.get): + project = self._callFUT() + self.assertEqual(project, MOCK_PROJECT) + + +class Test__determine_default_project(unittest2.TestCase): + + def _callFUT(self, project=None): + from gcloud.datastore.client import ( + _determine_default_project) + return _determine_default_project(project=project) + + def _determine_default_helper(self, gcd=None, fallback=None, + project_called=None): + from gcloud._testing import _Monkey + from gcloud.datastore import client + + _callers = [] + + def gcd_mock(): + _callers.append('gcd_mock') + return gcd + + def fallback_mock(project=None): + _callers.append(('fallback_mock', project)) + return fallback + + patched_methods = { + '_get_gcd_project': gcd_mock, + '_base_default_project': fallback_mock, + } + + with _Monkey(client, **patched_methods): + returned_project = self._callFUT(project_called) + + return returned_project, _callers + + def test_no_value(self): + project, callers = self._determine_default_helper() + self.assertEqual(project, None) + self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + + def test_explicit(self): + PROJECT = object() + project, callers = self._determine_default_helper( + project_called=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, []) + + def test_gcd(self): + PROJECT = object() + project, callers = self._determine_default_helper(gcd=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['gcd_mock']) + + def test_fallback(self): + PROJECT = object() + project, callers = self._determine_default_helper(fallback=PROJECT) + self.assertEqual(project, PROJECT) + self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)]) + + +class TestClient(unittest2.TestCase): + + PROJECT = 'PROJECT' + + def setUp(self): + KLASS = self._getTargetClass() + self.original_cnxn_class = KLASS._connection_class + KLASS._connection_class = _MockConnection + + def tearDown(self): + KLASS = self._getTargetClass() + KLASS._connection_class = self.original_cnxn_class + + def _getTargetClass(self): + from gcloud.datastore.client import Client + return Client + + def _makeOne(self, project=PROJECT, namespace=None, + credentials=None, http=None): + return self._getTargetClass()(project=project, + namespace=namespace, + credentials=credentials, + http=http) + + def test_ctor_w_project_no_environ(self): + from gcloud._testing import _Monkey + from gcloud.datastore import client as _MUT + + # Some environments (e.g. AppVeyor CI) run in GCE, so + # this test would fail artificially. + with _Monkey(_MUT, _base_default_project=lambda project: None): + self.assertRaises(EnvironmentError, self._makeOne, None) + + def test_ctor_w_implicit_inputs(self): + from gcloud._testing import _Monkey + from gcloud.datastore import client as _MUT + from gcloud import client as _base_client + + OTHER = 'other' + creds = object() + default_called = [] + + def fallback_mock(project): + default_called.append(project) + return project or OTHER + + klass = self._getTargetClass() + with _Monkey(_MUT, + _determine_default_project=fallback_mock): + with _Monkey(_base_client, + get_credentials=lambda: creds): + client = klass() + self.assertEqual(client.project, OTHER) + self.assertEqual(client.namespace, None) + self.assertTrue(isinstance(client.connection, _MockConnection)) + self.assertTrue(client.connection.credentials is creds) + self.assertTrue(client.connection.http is None) + self.assertTrue(client.current_batch is None) + self.assertTrue(client.current_transaction is None) + self.assertEqual(default_called, [None]) + + def test_ctor_w_explicit_inputs(self): + OTHER = 'other' + NAMESPACE = 'namespace' + creds = object() + http = object() + client = self._makeOne(project=OTHER, + namespace=NAMESPACE, + credentials=creds, + http=http) + self.assertEqual(client.project, OTHER) + self.assertEqual(client.namespace, NAMESPACE) + self.assertTrue(isinstance(client.connection, _MockConnection)) + self.assertTrue(client.connection.credentials is creds) + self.assertTrue(client.connection.http is http) + self.assertTrue(client.current_batch is None) + self.assertEqual(list(client._batch_stack), []) + + def test__push_batch_and__pop_batch(self): + creds = object() + client = self._makeOne(credentials=creds) + batch = client.batch() + xact = client.transaction() + client._push_batch(batch) + self.assertEqual(list(client._batch_stack), [batch]) + self.assertTrue(client.current_batch is batch) + self.assertTrue(client.current_transaction is None) + client._push_batch(xact) + self.assertTrue(client.current_batch is xact) + self.assertTrue(client.current_transaction is xact) + # list(_LocalStack) returns in reverse order. + self.assertEqual(list(client._batch_stack), [xact, batch]) + self.assertTrue(client._pop_batch() is xact) + self.assertEqual(list(client._batch_stack), [batch]) + self.assertTrue(client._pop_batch() is batch) + self.assertEqual(list(client._batch_stack), []) + + def test_get_miss(self): + _called_with = [] + + def _get_multi(*args, **kw): + _called_with.append((args, kw)) + return [] + + creds = object() + client = self._makeOne(credentials=creds) + client.get_multi = _get_multi + + key = object() + + self.assertTrue(client.get(key) is None) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + self.assertTrue(_called_with[0][1]['missing'] is None) + self.assertTrue(_called_with[0][1]['deferred'] is None) + self.assertTrue(_called_with[0][1]['transaction'] is None) + + def test_get_hit(self): + TXN_ID = '123' + _called_with = [] + _entity = object() + + def _get_multi(*args, **kw): + _called_with.append((args, kw)) + return [_entity] + + creds = object() + client = self._makeOne(credentials=creds) + client.get_multi = _get_multi + + key, missing, deferred = object(), [], [] + + self.assertTrue(client.get(key, missing, deferred, TXN_ID) is _entity) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + self.assertTrue(_called_with[0][1]['missing'] is missing) + self.assertTrue(_called_with[0][1]['deferred'] is deferred) + self.assertEqual(_called_with[0][1]['transaction'], TXN_ID) + + def test_get_multi_no_keys(self): + creds = object() + client = self._makeOne(credentials=creds) + results = client.get_multi([]) + self.assertEqual(results, []) + + def test_get_multi_miss(self): + from gcloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result() + key = Key('Kind', 1234, project=self.PROJECT) + results = client.get_multi([key]) + self.assertEqual(results, []) + + def test_get_multi_miss_w_missing(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + + # Make a missing entity pb to be returned from mock backend. + missed = entity_pb2.Entity() + missed.key.partition_id.project_id = self.PROJECT + path_element = missed.key.path.add() + path_element.kind = KIND + path_element.id = ID + + creds = object() + client = self._makeOne(credentials=creds) + # Set missing entity on mock connection. + client.connection._add_lookup_result(missing=[missed]) + + key = Key(KIND, ID, project=self.PROJECT) + missing = [] + entities = client.get_multi([key], missing=missing) + self.assertEqual(entities, []) + self.assertEqual([missed.key.to_protobuf() for missed in missing], + [key.to_protobuf()]) + + def test_get_multi_w_missing_non_empty(self): + from gcloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + key = Key('Kind', 1234, project=self.PROJECT) + + missing = ['this', 'list', 'is', 'not', 'empty'] + self.assertRaises(ValueError, client.get_multi, + [key], missing=missing) + + def test_get_multi_w_deferred_non_empty(self): + from gcloud.datastore.key import Key + + creds = object() + client = self._makeOne(credentials=creds) + key = Key('Kind', 1234, project=self.PROJECT) + + deferred = ['this', 'list', 'is', 'not', 'empty'] + self.assertRaises(ValueError, client.get_multi, + [key], deferred=deferred) + + def test_get_multi_miss_w_deferred(self): + from gcloud.datastore.key import Key + + key = Key('Kind', 1234, project=self.PROJECT) + + # Set deferred entity on mock connection. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result(deferred=[key.to_protobuf()]) + + deferred = [] + entities = client.get_multi([key], deferred=deferred) + self.assertEqual(entities, []) + self.assertEqual([def_key.to_protobuf() for def_key in deferred], + [key.to_protobuf()]) + + def test_get_multi_w_deferred_from_backend_but_not_passed(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.key import Key + + key1 = Key('Kind', project=self.PROJECT) + key1_pb = key1.to_protobuf() + key2 = Key('Kind', 2345, project=self.PROJECT) + key2_pb = key2.to_protobuf() + + entity1_pb = entity_pb2.Entity() + entity1_pb.key.CopyFrom(key1_pb) + entity2_pb = entity_pb2.Entity() + entity2_pb.key.CopyFrom(key2_pb) + + creds = object() + client = self._makeOne(credentials=creds) + # mock up two separate requests + client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) + client.connection._add_lookup_result([entity2_pb]) + + missing = [] + found = client.get_multi([key1, key2], missing=missing) + self.assertEqual(len(found), 2) + self.assertEqual(len(missing), 0) + + # Check the actual contents on the response. + self.assertTrue(isinstance(found[0], Entity)) + self.assertEqual(found[0].key.path, key1.path) + self.assertEqual(found[0].key.project, key1.project) + + self.assertTrue(isinstance(found[1], Entity)) + self.assertEqual(found[1].key.path, key2.path) + self.assertEqual(found[1].key.project, key2.project) + + cw = client.connection._lookup_cw + self.assertEqual(len(cw), 2) + + ds_id, k_pbs, eventual, tid = cw[0] + self.assertEqual(ds_id, self.PROJECT) + self.assertEqual(len(k_pbs), 2) + self.assertEqual(key1_pb, k_pbs[0]) + self.assertEqual(key2_pb, k_pbs[1]) + self.assertFalse(eventual) + self.assertTrue(tid is None) + + ds_id, k_pbs, eventual, tid = cw[1] + self.assertEqual(ds_id, self.PROJECT) + self.assertEqual(len(k_pbs), 1) + self.assertEqual(key2_pb, k_pbs[0]) + self.assertFalse(eventual) + self.assertTrue(tid is None) + + def test_get_multi_hit(self): + from gcloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + PATH = [{'kind': KIND, 'id': ID}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + result, = client.get_multi([key]) + new_key = result.key + + # Check the returned value is as expected. + self.assertFalse(new_key is key) + self.assertEqual(new_key.project, self.PROJECT) + self.assertEqual(new_key.path, PATH) + self.assertEqual(list(result), ['foo']) + self.assertEqual(result['foo'], 'Foo') + + def test_get_multi_hit_w_transaction(self): + from gcloud.datastore.key import Key + + TXN_ID = '123' + KIND = 'Kind' + ID = 1234 + PATH = [{'kind': KIND, 'id': ID}] + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + txn = client.transaction() + txn._id = TXN_ID + result, = client.get_multi([key], transaction=txn) + new_key = result.key + + # Check the returned value is as expected. + self.assertFalse(new_key is key) + self.assertEqual(new_key.project, self.PROJECT) + self.assertEqual(new_key.path, PATH) + self.assertEqual(list(result), ['foo']) + self.assertEqual(result['foo'], 'Foo') + + cw = client.connection._lookup_cw + self.assertEqual(len(cw), 1) + _, _, _, transaction_id = cw[0] + self.assertEqual(transaction_id, TXN_ID) + + def test_get_multi_hit_multiple_keys_same_project(self): + from gcloud.datastore.key import Key + + KIND = 'Kind' + ID1 = 1234 + ID2 = 2345 + + # Make a found entity pb to be returned from mock backend. + entity_pb1 = _make_entity_pb(self.PROJECT, KIND, ID1) + entity_pb2 = _make_entity_pb(self.PROJECT, KIND, ID2) + + # Make a connection to return the entity pbs. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb1, entity_pb2]) + + key1 = Key(KIND, ID1, project=self.PROJECT) + key2 = Key(KIND, ID2, project=self.PROJECT) + retrieved1, retrieved2 = client.get_multi([key1, key2]) + + # Check values match. + self.assertEqual(retrieved1.key.path, key1.path) + self.assertEqual(dict(retrieved1), {}) + self.assertEqual(retrieved2.key.path, key2.path) + self.assertEqual(dict(retrieved2), {}) + + def test_get_multi_hit_multiple_keys_different_project(self): + from gcloud.datastore.key import Key + + PROJECT1 = 'PROJECT' + PROJECT2 = 'PROJECT-ALT' + + # Make sure our IDs are actually different. + self.assertNotEqual(PROJECT1, PROJECT2) + + key1 = Key('KIND', 1234, project=PROJECT1) + key2 = Key('KIND', 1234, project=PROJECT2) + + creds = object() + client = self._makeOne(credentials=creds) + + with self.assertRaises(ValueError): + client.get_multi([key1, key2]) + + def test_get_multi_max_loops(self): + from gcloud._testing import _Monkey + from gcloud.datastore import client as _MUT + from gcloud.datastore.key import Key + + KIND = 'Kind' + ID = 1234 + + # Make a found entity pb to be returned from mock backend. + entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') + + # Make a connection to return the entity pb. + creds = object() + client = self._makeOne(credentials=creds) + client.connection._add_lookup_result([entity_pb]) + + key = Key(KIND, ID, project=self.PROJECT) + deferred = [] + missing = [] + with _Monkey(_MUT, _MAX_LOOPS=-1): + result = client.get_multi([key], missing=missing, + deferred=deferred) + + # Make sure we have no results, even though the connection has been + # set up as in `test_hit` to return a single result. + self.assertEqual(result, []) + self.assertEqual(missing, []) + self.assertEqual(deferred, []) + + def test_put(self): + _called_with = [] + + def _put_multi(*args, **kw): + _called_with.append((args, kw)) + + creds = object() + client = self._makeOne(credentials=creds) + client.put_multi = _put_multi + entity = object() + + client.put(entity) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['entities'], [entity]) + + def test_put_multi_no_entities(self): + creds = object() + client = self._makeOne(credentials=creds) + self.assertEqual(client.put_multi([]), None) + + def test_put_multi_w_single_empty_entity(self): + # https://github.com/GoogleCloudPlatform/gcloud-python/issues/649 + from gcloud.datastore.entity import Entity + + creds = object() + client = self._makeOne(credentials=creds) + self.assertRaises(ValueError, client.put_multi, Entity()) + + def test_put_multi_no_batch_w_partial_key(self): + from gcloud.datastore.helpers import _property_tuples + from gcloud.datastore.test_batch import _Entity + from gcloud.datastore.test_batch import _Key + from gcloud.datastore.test_batch import _KeyPB + from gcloud.datastore.test_batch import _mutated_pb + + entity = _Entity(foo=u'bar') + key = entity.key = _Key(self.PROJECT) + key._id = None + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._commit.append([_KeyPB(key)]) + + result = client.put_multi([entity]) + self.assertTrue(result is None) + + self.assertEqual(len(client.connection._commit_cw), 1) + (project, + commit_req, transaction_id) = client.connection._commit_cw[0] + self.assertEqual(project, self.PROJECT) + + mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert') + self.assertEqual(mutated_entity.key, key.to_protobuf()) + + prop_list = list(_property_tuples(mutated_entity)) + self.assertTrue(len(prop_list), 1) + name, value_pb = prop_list[0] + self.assertEqual(name, 'foo') + self.assertEqual(value_pb.string_value, u'bar') + + self.assertTrue(transaction_id is None) + + def test_put_multi_existing_batch_w_completed_key(self): + from gcloud.datastore.helpers import _property_tuples + from gcloud.datastore.test_batch import _Entity + from gcloud.datastore.test_batch import _Key + from gcloud.datastore.test_batch import _mutated_pb + + creds = object() + client = self._makeOne(credentials=creds) + entity = _Entity(foo=u'bar') + key = entity.key = _Key(self.PROJECT) + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.put_multi([entity]) + + self.assertEqual(result, None) + mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, 'upsert') + self.assertEqual(mutated_entity.key, key.to_protobuf()) + + prop_list = list(_property_tuples(mutated_entity)) + self.assertTrue(len(prop_list), 1) + name, value_pb = prop_list[0] + self.assertEqual(name, 'foo') + self.assertEqual(value_pb.string_value, u'bar') + + def test_delete(self): + _called_with = [] + + def _delete_multi(*args, **kw): + _called_with.append((args, kw)) + + creds = object() + client = self._makeOne(credentials=creds) + client.delete_multi = _delete_multi + key = object() + + client.delete(key) + + self.assertEqual(_called_with[0][0], ()) + self.assertEqual(_called_with[0][1]['keys'], [key]) + + def test_delete_multi_no_keys(self): + creds = object() + client = self._makeOne(credentials=creds) + result = client.delete_multi([]) + self.assertEqual(result, None) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_delete_multi_no_batch(self): + from gcloud.datastore.test_batch import _Key + from gcloud.datastore.test_batch import _mutated_pb + + key = _Key(self.PROJECT) + + creds = object() + client = self._makeOne(credentials=creds) + client.connection._commit.append([]) + + result = client.delete_multi([key]) + self.assertEqual(result, None) + self.assertEqual(len(client.connection._commit_cw), 1) + (project, + commit_req, transaction_id) = client.connection._commit_cw[0] + self.assertEqual(project, self.PROJECT) + + mutated_key = _mutated_pb(self, commit_req.mutations, 'delete') + self.assertEqual(mutated_key, key.to_protobuf()) + self.assertTrue(transaction_id is None) + + def test_delete_multi_w_existing_batch(self): + from gcloud.datastore.test_batch import _Key + from gcloud.datastore.test_batch import _mutated_pb + + creds = object() + client = self._makeOne(credentials=creds) + key = _Key(self.PROJECT) + + with _NoCommitBatch(client) as CURR_BATCH: + result = client.delete_multi([key]) + + self.assertEqual(result, None) + mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_delete_multi_w_existing_transaction(self): + from gcloud.datastore.test_batch import _Key + from gcloud.datastore.test_batch import _mutated_pb + + creds = object() + client = self._makeOne(credentials=creds) + key = _Key(self.PROJECT) + + with _NoCommitTransaction(client) as CURR_XACT: + result = client.delete_multi([key]) + + self.assertEqual(result, None) + mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete') + self.assertEqual(mutated_key, key._key) + self.assertEqual(len(client.connection._commit_cw), 0) + + def test_allocate_ids_w_partial_key(self): + from gcloud.datastore.test_batch import _Key + + NUM_IDS = 2 + + INCOMPLETE_KEY = _Key(self.PROJECT) + INCOMPLETE_KEY._id = None + + creds = object() + client = self._makeOne(credentials=creds) + + result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS) + + # Check the IDs returned. + self.assertEqual([key._id for key in result], list(range(NUM_IDS))) + + def test_allocate_ids_with_completed_key(self): + from gcloud.datastore.test_batch import _Key + + creds = object() + client = self._makeOne(credentials=creds) + + COMPLETE_KEY = _Key(self.PROJECT) + self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2) + + def test_key_w_project(self): + KIND = 'KIND' + ID = 1234 + + creds = object() + client = self._makeOne(credentials=creds) + + self.assertRaises(TypeError, + client.key, KIND, ID, project=self.PROJECT) + + def test_key_wo_project(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID) + + self.assertTrue(isinstance(key, _Dummy)) + self.assertEqual(key.args, (KIND, ID)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': None, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_key_w_namespace(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + NAMESPACE = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE, credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID) + + self.assertTrue(isinstance(key, _Dummy)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_key_w_namespace_collision(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + ID = 1234 + NAMESPACE1 = object() + NAMESPACE2 = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + + with _Monkey(MUT, Key=_Dummy): + key = client.key(KIND, ID, namespace=NAMESPACE2) + + self.assertTrue(isinstance(key, _Dummy)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE2, + } + self.assertEqual(key.kwargs, expected_kwargs) + + def test_batch(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Batch=_Dummy): + batch = client.batch() + + self.assertTrue(isinstance(batch, _Dummy)) + self.assertEqual(batch.args, (client,)) + self.assertEqual(batch.kwargs, {}) + + def test_transaction_defaults(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Transaction=_Dummy): + xact = client.transaction() + + self.assertTrue(isinstance(xact, _Dummy)) + self.assertEqual(xact.args, (client,)) + self.assertEqual(xact.kwargs, {}) + + def test_query_w_client(self): + KIND = 'KIND' + + creds = object() + client = self._makeOne(credentials=creds) + other = self._makeOne(credentials=object()) + + self.assertRaises(TypeError, client.query, kind=KIND, client=other) + + def test_query_w_project(self): + KIND = 'KIND' + + creds = object() + client = self._makeOne(credentials=creds) + + self.assertRaises(TypeError, + client.query, kind=KIND, project=self.PROJECT) + + def test_query_w_defaults(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query() + + self.assertTrue(isinstance(query, _Dummy)) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': None, + } + self.assertEqual(query.kwargs, expected_kwargs) + + def test_query_explicit(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE = 'NAMESPACE' + ANCESTOR = object() + FILTERS = [('PROPERTY', '==', 'VALUE')] + PROJECTION = ['__key__'] + ORDER = ['PROPERTY'] + DISTINCT_ON = ['DISTINCT_ON'] + + creds = object() + client = self._makeOne(credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query( + kind=KIND, + namespace=NAMESPACE, + ancestor=ANCESTOR, + filters=FILTERS, + projection=PROJECTION, + order=ORDER, + distinct_on=DISTINCT_ON, + ) + + self.assertTrue(isinstance(query, _Dummy)) + self.assertEqual(query.args, (client,)) + kwargs = { + 'project': self.PROJECT, + 'kind': KIND, + 'namespace': NAMESPACE, + 'ancestor': ANCESTOR, + 'filters': FILTERS, + 'projection': PROJECTION, + 'order': ORDER, + 'distinct_on': DISTINCT_ON, + } + self.assertEqual(query.kwargs, kwargs) + + def test_query_w_namespace(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE, credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query(kind=KIND) + + self.assertTrue(isinstance(query, _Dummy)) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE, + 'kind': KIND, + } + self.assertEqual(query.kwargs, expected_kwargs) + + def test_query_w_namespace_collision(self): + from gcloud.datastore import client as MUT + from gcloud._testing import _Monkey + + KIND = 'KIND' + NAMESPACE1 = object() + NAMESPACE2 = object() + + creds = object() + client = self._makeOne(namespace=NAMESPACE1, credentials=creds) + + with _Monkey(MUT, Query=_Dummy): + query = client.query(kind=KIND, namespace=NAMESPACE2) + + self.assertTrue(isinstance(query, _Dummy)) + self.assertEqual(query.args, (client,)) + expected_kwargs = { + 'project': self.PROJECT, + 'namespace': NAMESPACE2, + 'kind': KIND, + } + self.assertEqual(query.kwargs, expected_kwargs) + + +class _Dummy(object): + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + + +class _MockConnection(object): + + def __init__(self, credentials=None, http=None): + self.credentials = credentials + self.http = http + self._lookup_cw = [] + self._lookup = [] + self._commit_cw = [] + self._commit = [] + self._alloc_cw = [] + self._alloc = [] + self._index_updates = 0 + + def _add_lookup_result(self, results=(), missing=(), deferred=()): + self._lookup.append((list(results), list(missing), list(deferred))) + + def lookup(self, project, key_pbs, eventual=False, transaction_id=None): + self._lookup_cw.append((project, key_pbs, eventual, transaction_id)) + triple, self._lookup = self._lookup[0], self._lookup[1:] + results, missing, deferred = triple + return results, missing, deferred + + def commit(self, project, commit_request, transaction_id): + self._commit_cw.append((project, commit_request, transaction_id)) + response, self._commit = self._commit[0], self._commit[1:] + return self._index_updates, response + + def allocate_ids(self, project, key_pbs): + from gcloud.datastore.test_connection import _KeyProto + self._alloc_cw.append((project, key_pbs)) + num_pbs = len(key_pbs) + return [_KeyProto(i) for i in list(range(num_pbs))] + + +class _NoCommitBatch(object): + + def __init__(self, client): + from gcloud.datastore.batch import Batch + self._client = client + self._batch = Batch(client) + + def __enter__(self): + self._client._push_batch(self._batch) + return self._batch + + def __exit__(self, *args): + self._client._pop_batch() + + +class _NoCommitTransaction(object): + + def __init__(self, client, transaction_id='TRANSACTION'): + from gcloud.datastore.transaction import Transaction + self._client = client + xact = self._transaction = Transaction(client) + xact._id = transaction_id + + def __enter__(self): + self._client._push_batch(self._transaction) + return self._transaction + + def __exit__(self, *args): + self._client._pop_batch() diff --git a/env/Lib/site-packages/gcloud/datastore/test_connection.py b/env/Lib/site-packages/gcloud/datastore/test_connection.py new file mode 100644 index 0000000..513c0cb --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_connection.py @@ -0,0 +1,873 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.datastore.connection import Connection + + return Connection + + def _make_key_pb(self, project, id_=1234): + from gcloud.datastore.key import Key + path_args = ('Kind',) + if id_ is not None: + path_args += (id_,) + return Key(*path_args, project=project).to_protobuf() + + def _make_query_pb(self, kind): + from gcloud.datastore._generated import query_pb2 + pb = query_pb2.Query() + pb.kind.add().name = kind + return pb + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _verifyProtobufCall(self, called_with, URI, conn): + self.assertEqual(called_with['uri'], URI) + self.assertEqual(called_with['method'], 'POST') + self.assertEqual(called_with['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(called_with['headers']['User-Agent'], + conn.USER_AGENT) + + def test_default_url(self): + klass = self._getTargetClass() + conn = self._makeOne() + self.assertEqual(conn.api_base_url, klass.API_BASE_URL) + + def test_custom_url_from_env(self): + import os + from gcloud._testing import _Monkey + from gcloud.connection import API_BASE_URL + from gcloud.environment_vars import GCD_HOST + + HOST = 'CURR_HOST' + fake_environ = {GCD_HOST: HOST} + + with _Monkey(os, environ=fake_environ): + conn = self._makeOne() + + self.assertNotEqual(conn.api_base_url, API_BASE_URL) + self.assertEqual(conn.api_base_url, HOST + '/datastore') + + def test_custom_url_from_constructor(self): + from gcloud.connection import API_BASE_URL + + HOST = object() + conn = self._makeOne(api_base_url=HOST) + self.assertNotEqual(conn.api_base_url, API_BASE_URL) + self.assertEqual(conn.api_base_url, HOST) + + def test_custom_url_constructor_and_env(self): + import os + from gcloud._testing import _Monkey + from gcloud.connection import API_BASE_URL + from gcloud.environment_vars import GCD_HOST + + HOST1 = object() + HOST2 = object() + fake_environ = {GCD_HOST: HOST1} + + with _Monkey(os, environ=fake_environ): + conn = self._makeOne(api_base_url=HOST2) + + self.assertNotEqual(conn.api_base_url, API_BASE_URL) + self.assertNotEqual(conn.api_base_url, HOST1) + self.assertEqual(conn.api_base_url, HOST2) + + def test_ctor_defaults(self): + conn = self._makeOne() + self.assertEqual(conn.credentials, None) + + def test_ctor_explicit(self): + class Creds(object): + + def create_scoped_required(self): + return False + + creds = Creds() + conn = self._makeOne(creds) + self.assertTrue(conn.credentials is creds) + + def test_http_w_existing(self): + conn = self._makeOne() + conn._http = http = object() + self.assertTrue(conn.http is http) + + def test_http_wo_creds(self): + import httplib2 + + conn = self._makeOne() + self.assertTrue(isinstance(conn.http, httplib2.Http)) + + def test_http_w_creds(self): + import httplib2 + + authorized = object() + + class Creds(object): + + def authorize(self, http): + self._called_with = http + return authorized + + def create_scoped_required(self): + return False + + creds = Creds() + conn = self._makeOne(creds) + self.assertTrue(conn.http is authorized) + self.assertTrue(isinstance(creds._called_with, httplib2.Http)) + + def test__request_w_200(self): + PROJECT = 'PROJECT' + METHOD = 'METHOD' + DATA = b'DATA' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':' + METHOD, + ]) + http = conn._http = Http({'status': '200'}, 'CONTENT') + self.assertEqual(conn._request(PROJECT, METHOD, DATA), 'CONTENT') + self._verifyProtobufCall(http._called_with, URI, conn) + self.assertEqual(http._called_with['body'], DATA) + + def test__request_not_200(self): + from gcloud.exceptions import BadRequest + from google.rpc import status_pb2 + + error = status_pb2.Status() + error.message = 'Entity value is indexed.' + error.code = 9 # FAILED_PRECONDITION + + PROJECT = 'PROJECT' + METHOD = 'METHOD' + DATA = 'DATA' + conn = self._makeOne() + conn._http = Http({'status': '400'}, error.SerializeToString()) + with self.assertRaises(BadRequest) as e: + conn._request(PROJECT, METHOD, DATA) + expected_message = '400 Entity value is indexed.' + self.assertEqual(str(e.exception), expected_message) + + def test__rpc(self): + + class ReqPB(object): + + def SerializeToString(self): + return REQPB + + class RspPB(object): + + def __init__(self, pb): + self._pb = pb + + @classmethod + def FromString(cls, pb): + return cls(pb) + + REQPB = b'REQPB' + PROJECT = 'PROJECT' + METHOD = 'METHOD' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':' + METHOD, + ]) + http = conn._http = Http({'status': '200'}, 'CONTENT') + response = conn._rpc(PROJECT, METHOD, ReqPB(), RspPB) + self.assertTrue(isinstance(response, RspPB)) + self.assertEqual(response._pb, 'CONTENT') + self._verifyProtobufCall(http._called_with, URI, conn) + self.assertEqual(http._called_with['body'], REQPB) + + def test_build_api_url_w_default_base_version(self): + PROJECT = 'PROJECT' + METHOD = 'METHOD' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':' + METHOD, + ]) + self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI) + + def test_build_api_url_w_explicit_base_version(self): + BASE = 'http://example.com/' + VER = '3.1415926' + PROJECT = 'PROJECT' + METHOD = 'METHOD' + conn = self._makeOne() + URI = '/'.join([ + BASE, + VER, + 'projects', + PROJECT + ':' + METHOD, + ]) + self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER), + URI) + + def test_lookup_single_key_empty_response(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb]) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + + def test_lookup_single_key_empty_response_w_eventual(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb], + eventual=True) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + self.assertEqual(request.read_options.read_consistency, + datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(request.read_options.transaction, b'') + + def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + key_pb = self._make_key_pb(PROJECT) + conn = self._makeOne() + self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, + eventual=True, transaction_id=TRANSACTION) + + def test_lookup_single_key_empty_response_w_transaction(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb], + transaction_id=TRANSACTION) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + self.assertEqual(request.read_options.transaction, TRANSACTION) + + def test_lookup_single_key_nonempty_response(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import entity_pb2 + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.LookupResponse() + entity = entity_pb2.Entity() + entity.key.CopyFrom(key_pb) + rsp_pb.found.add(entity=entity) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + (found,), missing, deferred = conn.lookup(PROJECT, [key_pb]) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + self.assertEqual(found.key.path[0].kind, 'Kind') + self.assertEqual(found.key.path[0].id, 1234) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 1) + self.assertEqual(key_pb, keys[0]) + + def test_lookup_multiple_keys_empty_response(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(len(found), 0) + self.assertEqual(len(missing), 0) + self.assertEqual(len(deferred), 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_lookup_multiple_keys_w_missing(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + er_1 = rsp_pb.missing.add() + er_1.entity.key.CopyFrom(key_pb1) + er_2 = rsp_pb.missing.add() + er_2.entity.key.CopyFrom(key_pb2) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(result, []) + self.assertEqual(len(deferred), 0) + self.assertEqual([missed.key for missed in missing], + [key_pb1, key_pb2]) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_lookup_multiple_keys_w_deferred(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + key_pb1 = self._make_key_pb(PROJECT) + key_pb2 = self._make_key_pb(PROJECT, id_=2345) + rsp_pb = datastore_pb2.LookupResponse() + rsp_pb.deferred.add().CopyFrom(key_pb1) + rsp_pb.deferred.add().CopyFrom(key_pb2) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':lookup', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) + self.assertEqual(result, []) + self.assertEqual(len(missing), 0) + self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2]) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + self.assertEqual(cw['uri'], URI) + self.assertEqual(cw['method'], 'POST') + self.assertEqual(cw['headers']['Content-Type'], + 'application/x-protobuf') + self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) + rq_class = datastore_pb2.LookupRequest + request = rq_class() + request.ParseFromString(cw['body']) + keys = list(request.keys) + self.assertEqual(len(keys), 2) + self.assertEqual(key_pb1, keys[0]) + self.assertEqual(key_pb2, keys[1]) + + def test_run_query_w_eventual_no_transaction(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query(PROJECT, q_pb, + eventual=True) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + self.assertEqual(request.read_options.read_consistency, + datastore_pb2.ReadOptions.EVENTUAL) + self.assertEqual(request.read_options.transaction, b'') + + def test_run_query_wo_eventual_w_transaction(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + TRANSACTION = b'TRANSACTION' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query( + PROJECT, q_pb, transaction_id=TRANSACTION) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + self.assertEqual( + request.read_options.read_consistency, + datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) + self.assertEqual(request.read_options.transaction, TRANSACTION) + + def test_run_query_w_eventual_and_transaction(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + TRANSACTION = b'TRANSACTION' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, + eventual=True, transaction_id=TRANSACTION) + + def test_run_query_wo_namespace_empty_result(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import query_pb2 + + PROJECT = 'PROJECT' + KIND = 'Nonesuch' + CURSOR = b'\x00' + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.end_cursor = CURSOR + no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS + rsp_pb.batch.more_results = no_more + rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs, end, more, skipped = conn.run_query(PROJECT, q_pb) + self.assertEqual(pbs, []) + self.assertEqual(end, CURSOR) + self.assertTrue(more) + self.assertEqual(skipped, 0) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, '') + self.assertEqual(request.query, q_pb) + + def test_run_query_w_namespace_nonempty_result(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import entity_pb2 + + PROJECT = 'PROJECT' + KIND = 'Kind' + entity = entity_pb2.Entity() + q_pb = self._make_query_pb(KIND) + rsp_pb = datastore_pb2.RunQueryResponse() + rsp_pb.batch.entity_results.add(entity=entity) + rsp_pb.batch.entity_result_type = 1 # FULL + rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':runQuery', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + pbs = conn.run_query(PROJECT, q_pb, 'NS')[0] + self.assertEqual(len(pbs), 1) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RunQueryRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.partition_id.namespace_id, 'NS') + self.assertEqual(request.query, q_pb) + + def test_begin_transaction(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + TRANSACTION = b'TRANSACTION' + rsp_pb = datastore_pb2.BeginTransactionResponse() + rsp_pb.transaction = TRANSACTION + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':beginTransaction', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.BeginTransactionRequest + request = rq_class() + request.ParseFromString(cw['body']) + + def test_commit_wo_transaction(self): + from gcloud._testing import _Monkey + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore import connection as MUT + from gcloud.datastore.helpers import _new_value_pb + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.CommitResponse() + req_pb = datastore_pb2.CommitRequest() + mutation = req_pb.mutations.add() + insert = mutation.upsert + insert.key.CopyFrom(key_pb) + value_pb = _new_value_pb(insert, 'foo') + value_pb.string_value = u'Foo' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':commit', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + + # Set up mock for parsing the response. + expected_result = object() + _parsed = [] + + def mock_parse(response): + _parsed.append(response) + return expected_result + + with _Monkey(MUT, _parse_commit_response=mock_parse): + result = conn.commit(PROJECT, req_pb, None) + + self.assertTrue(result is expected_result) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.CommitRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, b'') + self.assertEqual(list(request.mutations), [mutation]) + self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) + self.assertEqual(_parsed, [rsp_pb]) + + def test_commit_w_transaction(self): + from gcloud._testing import _Monkey + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore import connection as MUT + from gcloud.datastore.helpers import _new_value_pb + + PROJECT = 'PROJECT' + key_pb = self._make_key_pb(PROJECT) + rsp_pb = datastore_pb2.CommitResponse() + req_pb = datastore_pb2.CommitRequest() + mutation = req_pb.mutations.add() + insert = mutation.upsert + insert.key.CopyFrom(key_pb) + value_pb = _new_value_pb(insert, 'foo') + value_pb.string_value = u'Foo' + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':commit', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + + # Set up mock for parsing the response. + expected_result = object() + _parsed = [] + + def mock_parse(response): + _parsed.append(response) + return expected_result + + with _Monkey(MUT, _parse_commit_response=mock_parse): + result = conn.commit(PROJECT, req_pb, b'xact') + + self.assertTrue(result is expected_result) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.CommitRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, b'xact') + self.assertEqual(list(request.mutations), [mutation]) + self.assertEqual(request.mode, rq_class.TRANSACTIONAL) + self.assertEqual(_parsed, [rsp_pb]) + + def test_rollback_ok(self): + from gcloud.datastore._generated import datastore_pb2 + PROJECT = 'PROJECT' + TRANSACTION = b'xact' + + rsp_pb = datastore_pb2.RollbackResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':rollback', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.rollback(PROJECT, TRANSACTION), None) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.RollbackRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(request.transaction, TRANSACTION) + + def test_allocate_ids_empty(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + rsp_pb = datastore_pb2.AllocateIdsResponse() + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':allocateIds', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.allocate_ids(PROJECT, []), []) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.AllocateIdsRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(list(request.keys), []) + + def test_allocate_ids_non_empty(self): + from gcloud.datastore._generated import datastore_pb2 + + PROJECT = 'PROJECT' + before_key_pbs = [ + self._make_key_pb(PROJECT, id_=None), + self._make_key_pb(PROJECT, id_=None), + ] + after_key_pbs = [ + self._make_key_pb(PROJECT), + self._make_key_pb(PROJECT, id_=2345), + ] + rsp_pb = datastore_pb2.AllocateIdsResponse() + rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) + rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) + conn = self._makeOne() + URI = '/'.join([ + conn.api_base_url, + conn.API_VERSION, + 'projects', + PROJECT + ':allocateIds', + ]) + http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) + self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), + after_key_pbs) + cw = http._called_with + self._verifyProtobufCall(cw, URI, conn) + rq_class = datastore_pb2.AllocateIdsRequest + request = rq_class() + request.ParseFromString(cw['body']) + self.assertEqual(len(request.keys), len(before_key_pbs)) + for key_before, key_after in zip(before_key_pbs, request.keys): + self.assertEqual(key_before, key_after) + + +class Test__parse_commit_response(unittest2.TestCase): + + def _callFUT(self, commit_response_pb): + from gcloud.datastore.connection import _parse_commit_response + return _parse_commit_response(commit_response_pb) + + def test_it(self): + from gcloud.datastore._generated import datastore_pb2 + from gcloud.datastore._generated import entity_pb2 + + index_updates = 1337 + keys = [ + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Foo', + id=1234, + ), + ], + ), + entity_pb2.Key( + path=[ + entity_pb2.Key.PathElement( + kind='Bar', + name='baz', + ), + ], + ), + ] + response = datastore_pb2.CommitResponse( + mutation_results=[ + datastore_pb2.MutationResult(key=key) for key in keys + ], + index_updates=index_updates, + ) + result = self._callFUT(response) + self.assertEqual(result, (index_updates, keys)) + + +class Http(object): + + _called_with = None + + def __init__(self, headers, content): + from httplib2 import Response + self._response = Response(headers) + self._content = content + + def request(self, **kw): + self._called_with = kw + return self._response, self._content + + +class _PathElementProto(object): + + def __init__(self, _id): + self.id = _id + + +class _KeyProto(object): + + def __init__(self, id_): + self.path = [_PathElementProto(id_)] diff --git a/env/Lib/site-packages/gcloud/datastore/test_entity.py b/env/Lib/site-packages/gcloud/datastore/test_entity.py new file mode 100644 index 0000000..ce9e635 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_entity.py @@ -0,0 +1,211 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + +_PROJECT = 'PROJECT' +_KIND = 'KIND' +_ID = 1234 + + +class TestEntity(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.datastore.entity import Entity + return Entity + + def _makeOne(self, key=None, exclude_from_indexes=()): + klass = self._getTargetClass() + return klass(key=key, exclude_from_indexes=exclude_from_indexes) + + def test_ctor_defaults(self): + klass = self._getTargetClass() + entity = klass() + self.assertEqual(entity.key, None) + self.assertEqual(entity.kind, None) + self.assertEqual(sorted(entity.exclude_from_indexes), []) + + def test_ctor_explicit(self): + _EXCLUDE_FROM_INDEXES = ['foo', 'bar'] + key = _Key() + entity = self._makeOne( + key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES) + self.assertEqual(sorted(entity.exclude_from_indexes), + sorted(_EXCLUDE_FROM_INDEXES)) + + def test_ctor_bad_exclude_from_indexes(self): + BAD_EXCLUDE_FROM_INDEXES = object() + key = _Key() + self.assertRaises(TypeError, self._makeOne, key=key, + exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES) + + def test___eq_____ne___w_non_entity(self): + from gcloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity = self._makeOne(key=key) + self.assertFalse(entity == object()) + self.assertTrue(entity != object()) + + def test___eq_____ne___w_different_keys(self): + from gcloud.datastore.key import Key + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + entity1 = self._makeOne(key=key1) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity2 = self._makeOne(key=key2) + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys(self): + from gcloud.datastore.key import Key + + name = 'foo' + value = 42 + meaning = 9 + + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1, exclude_from_indexes=(name,)) + entity1[name] = value + entity1._meanings[name] = (meaning, value) + + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = self._makeOne(key=key2, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_different_props(self): + from gcloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['foo'] = 'Foo' + key2 = Key(_KIND, _ID, project=_PROJECT) + entity2 = self._makeOne(key=key2) + entity1['bar'] = 'Bar' + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self): + from gcloud.datastore.key import Key + key1 = Key(_KIND, _ID, project=_PROJECT) + key2 = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['some_key'] = key1 + entity2 = self._makeOne(key=key1) + entity2['some_key'] = key2 + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self): + from gcloud.datastore.key import Key + _ID1 = 1234 + _ID2 = 2345 + key1 = Key(_KIND, _ID1, project=_PROJECT) + key2 = Key(_KIND, _ID2, project=_PROJECT) + entity1 = self._makeOne(key=key1) + entity1['some_key'] = key1 + entity2 = self._makeOne(key=key1) + entity2['some_key'] = key2 + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self): + from gcloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key) + sub1 = self._makeOne() + sub1.update({'foo': 'Foo'}) + entity1['some_entity'] = sub1 + entity2 = self._makeOne(key=key) + sub2 = self._makeOne() + sub2.update({'foo': 'Foo'}) + entity2['some_entity'] = sub2 + self.assertTrue(entity1 == entity2) + self.assertFalse(entity1 != entity2) + + def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self): + from gcloud.datastore.key import Key + key = Key(_KIND, _ID, project=_PROJECT) + entity1 = self._makeOne(key=key) + sub1 = self._makeOne() + sub1.update({'foo': 'Foo'}) + entity1['some_entity'] = sub1 + entity2 = self._makeOne(key=key) + sub2 = self._makeOne() + sub2.update({'foo': 'Bar'}) + entity2['some_entity'] = sub2 + self.assertFalse(entity1 == entity2) + self.assertTrue(entity1 != entity2) + + def test__eq__same_value_different_exclude(self): + from gcloud.datastore.key import Key + + name = 'foo' + value = 42 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = self._makeOne(key=key, exclude_from_indexes=()) + entity2[name] = value + + self.assertFalse(entity1 == entity2) + + def test__eq__same_value_different_meanings(self): + from gcloud.datastore.key import Key + + name = 'foo' + value = 42 + meaning = 9 + key = Key(_KIND, _ID, project=_PROJECT) + + entity1 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity1[name] = value + + entity2 = self._makeOne(key=key, exclude_from_indexes=(name,)) + entity2[name] = value + entity2._meanings[name] = (meaning, value) + + self.assertFalse(entity1 == entity2) + + def test___repr___no_key_empty(self): + entity = self._makeOne() + self.assertEqual(repr(entity), '') + + def test___repr___w_key_non_empty(self): + key = _Key() + key._path = '/bar/baz' + entity = self._makeOne(key=key) + entity['foo'] = 'Foo' + self.assertEqual(repr(entity), "") + + +class _Key(object): + _MARKER = object() + _key = 'KEY' + _partial = False + _path = None + _id = None + _stored = None + + def __init__(self, project=_PROJECT): + self.project = project + + @property + def path(self): + return self._path diff --git a/env/Lib/site-packages/gcloud/datastore/test_helpers.py b/env/Lib/site-packages/gcloud/datastore/test_helpers.py new file mode 100644 index 0000000..3cc7e6c --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_helpers.py @@ -0,0 +1,926 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test__new_value_pb(unittest2.TestCase): + + def _callFUT(self, entity_pb, name): + from gcloud.datastore.helpers import _new_value_pb + return _new_value_pb(entity_pb, name) + + def test_it(self): + from gcloud.datastore._generated import entity_pb2 + + entity_pb = entity_pb2.Entity() + name = 'foo' + result = self._callFUT(entity_pb, name) + + self.assertTrue(isinstance(result, entity_pb2.Value)) + self.assertEqual(len(entity_pb.properties), 1) + self.assertEqual(entity_pb.properties[name], result) + + +class Test__property_tuples(unittest2.TestCase): + + def _callFUT(self, entity_pb): + from gcloud.datastore.helpers import _property_tuples + return _property_tuples(entity_pb) + + def test_it(self): + import types + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + name1 = 'foo' + name2 = 'bar' + val_pb1 = _new_value_pb(entity_pb, name1) + val_pb2 = _new_value_pb(entity_pb, name2) + + result = self._callFUT(entity_pb) + self.assertTrue(isinstance(result, types.GeneratorType)) + self.assertEqual(sorted(result), + sorted([(name1, val_pb1), (name2, val_pb2)])) + + +class Test_entity_from_protobuf(unittest2.TestCase): + + def _callFUT(self, val): + from gcloud.datastore.helpers import entity_from_protobuf + return entity_from_protobuf(val) + + def test_it(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key.path.add(kind=_KIND, id=_ID) + + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = 'Foo' + + unindexed_val_pb = _new_value_pb(entity_pb, 'bar') + unindexed_val_pb.integer_value = 10 + unindexed_val_pb.exclude_from_indexes = True + + array_val_pb1 = _new_value_pb(entity_pb, 'baz') + array_pb1 = array_val_pb1.array_value.values + + unindexed_array_val_pb = array_pb1.add() + unindexed_array_val_pb.integer_value = 11 + unindexed_array_val_pb.exclude_from_indexes = True + + array_val_pb2 = _new_value_pb(entity_pb, 'qux') + array_pb2 = array_val_pb2.array_value.values + + indexed_array_val_pb = array_pb2.add() + indexed_array_val_pb.integer_value = 12 + + entity = self._callFUT(entity_pb) + self.assertEqual(entity.kind, _KIND) + self.assertEqual(entity.exclude_from_indexes, + frozenset(['bar', 'baz'])) + entity_props = dict(entity) + self.assertEqual(entity_props, + {'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12]}) + + # Also check the key. + key = entity.key + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) + + def test_mismatched_value_indexed(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = _PROJECT + entity_pb.key.path.add(kind=_KIND, id=_ID) + + array_val_pb = _new_value_pb(entity_pb, 'baz') + array_pb = array_val_pb.array_value.values + + unindexed_value_pb1 = array_pb.add() + unindexed_value_pb1.integer_value = 10 + unindexed_value_pb1.exclude_from_indexes = True + + unindexed_value_pb2 = array_pb.add() + unindexed_value_pb2.integer_value = 11 + + with self.assertRaises(ValueError): + self._callFUT(entity_pb) + + def test_entity_no_key(self): + from gcloud.datastore._generated import entity_pb2 + + entity_pb = entity_pb2.Entity() + entity = self._callFUT(entity_pb) + + self.assertEqual(entity.key, None) + self.assertEqual(dict(entity), {}) + + def test_entity_with_meaning(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + entity_pb = entity_pb2.Entity() + name = 'hello' + value_pb = _new_value_pb(entity_pb, name) + value_pb.meaning = meaning = 9 + value_pb.string_value = val = u'something' + + entity = self._callFUT(entity_pb) + self.assertEqual(entity.key, None) + self.assertEqual(dict(entity), {name: val}) + self.assertEqual(entity._meanings, {name: (meaning, val)}) + + def test_nested_entity_no_key(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + + PROJECT = 'FOO' + KIND = 'KIND' + INSIDE_NAME = 'IFOO' + OUTSIDE_NAME = 'OBAR' + INSIDE_VALUE = 1337 + + entity_inside = entity_pb2.Entity() + inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME) + inside_val_pb.integer_value = INSIDE_VALUE + + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = PROJECT + element = entity_pb.key.path.add() + element.kind = KIND + + outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME) + outside_val_pb.entity_value.CopyFrom(entity_inside) + + entity = self._callFUT(entity_pb) + self.assertEqual(entity.key.project, PROJECT) + self.assertEqual(entity.key.flat_path, (KIND,)) + self.assertEqual(len(entity), 1) + + inside_entity = entity[OUTSIDE_NAME] + self.assertEqual(inside_entity.key, None) + self.assertEqual(len(inside_entity), 1) + self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE) + + +class Test_entity_to_protobuf(unittest2.TestCase): + + def _callFUT(self, entity): + from gcloud.datastore.helpers import entity_to_protobuf + return entity_to_protobuf(entity) + + def _compareEntityProto(self, entity_pb1, entity_pb2): + from gcloud.datastore.helpers import _property_tuples + + self.assertEqual(entity_pb1.key, entity_pb2.key) + value_list1 = sorted(_property_tuples(entity_pb1)) + value_list2 = sorted(_property_tuples(entity_pb2)) + self.assertEqual(len(value_list1), len(value_list2)) + for pair1, pair2 in zip(value_list1, value_list2): + name1, val1 = pair1 + name2, val2 = pair2 + self.assertEqual(name1, name2) + if val1.HasField('entity_value'): # Message field (Entity) + self.assertEqual(val1.meaning, val2.meaning) + self._compareEntityProto(val1.entity_value, + val2.entity_value) + else: + self.assertEqual(val1, val2) + + def test_empty(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + + entity = Entity() + entity_pb = self._callFUT(entity) + self._compareEntityProto(entity_pb, entity_pb2.Entity()) + + def test_key_only(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.key import Key + + kind, name = 'PATH', 'NAME' + project = 'PROJECT' + key = Key(kind, name, project=project) + entity = Entity(key=key) + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + expected_pb.key.partition_id.project_id = project + path_elt = expected_pb.key.path.add() + path_elt.kind = kind + path_elt.name = name + + self._compareEntityProto(entity_pb, expected_pb) + + def test_simple_fields(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _new_value_pb + + entity = Entity() + name1 = 'foo' + entity[name1] = value1 = 42 + name2 = 'bar' + entity[name2] = value2 = u'some-string' + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + val_pb1 = _new_value_pb(expected_pb, name1) + val_pb1.integer_value = value1 + val_pb2 = _new_value_pb(expected_pb, name2) + val_pb2.string_value = value2 + + self._compareEntityProto(entity_pb, expected_pb) + + def test_with_empty_list(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + + entity = Entity() + entity['foo'] = [] + entity_pb = self._callFUT(entity) + + self._compareEntityProto(entity_pb, entity_pb2.Entity()) + + def test_inverts_to_protobuf(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import _new_value_pb + from gcloud.datastore.helpers import entity_from_protobuf + + original_pb = entity_pb2.Entity() + # Add a key. + original_pb.key.partition_id.project_id = project = 'PROJECT' + elem1 = original_pb.key.path.add() + elem1.kind = 'Family' + elem1.id = 1234 + elem2 = original_pb.key.path.add() + elem2.kind = 'King' + elem2.name = 'Spades' + + # Add an integer property. + val_pb1 = _new_value_pb(original_pb, 'foo') + val_pb1.integer_value = 1337 + val_pb1.exclude_from_indexes = True + # Add a string property. + val_pb2 = _new_value_pb(original_pb, 'bar') + val_pb2.string_value = u'hello' + + # Add a nested (entity) property. + val_pb3 = _new_value_pb(original_pb, 'entity-baz') + sub_pb = entity_pb2.Entity() + sub_val_pb1 = _new_value_pb(sub_pb, 'x') + sub_val_pb1.double_value = 3.14 + sub_val_pb2 = _new_value_pb(sub_pb, 'y') + sub_val_pb2.double_value = 2.718281828 + val_pb3.meaning = 9 + val_pb3.entity_value.CopyFrom(sub_pb) + + # Add a list property. + val_pb4 = _new_value_pb(original_pb, 'list-quux') + array_val1 = val_pb4.array_value.values.add() + array_val1.exclude_from_indexes = False + array_val1.meaning = meaning = 22 + array_val1.blob_value = b'\xe2\x98\x83' + array_val2 = val_pb4.array_value.values.add() + array_val2.exclude_from_indexes = False + array_val2.meaning = meaning + array_val2.blob_value = b'\xe2\x98\x85' + + # Convert to the user-space Entity. + entity = entity_from_protobuf(original_pb) + # Convert the user-space Entity back to a protobuf. + new_pb = self._callFUT(entity) + + # NOTE: entity_to_protobuf() strips the project so we "cheat". + new_pb.key.partition_id.project_id = project + self._compareEntityProto(original_pb, new_pb) + + def test_meaning_with_change(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _new_value_pb + + entity = Entity() + name = 'foo' + entity[name] = value = 42 + entity._meanings[name] = (9, 1337) + entity_pb = self._callFUT(entity) + + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value_pb.integer_value = value + # NOTE: No meaning is used since the value differs from the + # value stored. + self._compareEntityProto(entity_pb, expected_pb) + + def test_variable_meanings(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _new_value_pb + + entity = Entity() + name = 'quux' + entity[name] = values = [1, 20, 300] + meaning = 9 + entity._meanings[name] = ([None, meaning, None], values) + entity_pb = self._callFUT(entity) + + # Construct the expected protobuf. + expected_pb = entity_pb2.Entity() + value_pb = _new_value_pb(expected_pb, name) + value0 = value_pb.array_value.values.add() + value0.integer_value = values[0] + # The only array entry with a meaning is the middle one. + value1 = value_pb.array_value.values.add() + value1.integer_value = values[1] + value1.meaning = meaning + value2 = value_pb.array_value.values.add() + value2.integer_value = values[2] + + self._compareEntityProto(entity_pb, expected_pb) + + +class Test_key_from_protobuf(unittest2.TestCase): + + def _callFUT(self, val): + from gcloud.datastore.helpers import key_from_protobuf + + return key_from_protobuf(val) + + def _makePB(self, project=None, namespace=None, path=()): + from gcloud.datastore._generated import entity_pb2 + pb = entity_pb2.Key() + if project is not None: + pb.partition_id.project_id = project + if namespace is not None: + pb.partition_id.namespace_id = namespace + for elem in path: + added = pb.path.add() + added.kind = elem['kind'] + if 'id' in elem: + added.id = elem['id'] + if 'name' in elem: + added.name = elem['name'] + return pb + + def test_wo_namespace_in_pb(self): + _PROJECT = 'PROJECT' + pb = self._makePB(path=[{'kind': 'KIND'}], project=_PROJECT) + key = self._callFUT(pb) + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, None) + + def test_w_namespace_in_pb(self): + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, + project=_PROJECT) + key = self._callFUT(pb) + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, _NAMESPACE) + + def test_w_nested_path_in_pb(self): + _PATH = [ + {'kind': 'PARENT', 'name': 'NAME'}, + {'kind': 'CHILD', 'id': 1234}, + {'kind': 'GRANDCHILD', 'id': 5678}, + ] + pb = self._makePB(path=_PATH, project='PROJECT') + key = self._callFUT(pb) + self.assertEqual(key.path, _PATH) + + def test_w_nothing_in_pb(self): + pb = self._makePB() + self.assertRaises(ValueError, self._callFUT, pb) + + +class Test__pb_attr_value(unittest2.TestCase): + + def _callFUT(self, val): + from gcloud.datastore.helpers import _pb_attr_value + + return _pb_attr_value(val) + + def test_datetime_naive(self): + import calendar + import datetime + from gcloud._helpers import UTC + + micros = 4375 + naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone. + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = self._callFUT(naive) + self.assertEqual(name, 'timestamp_value') + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_datetime_w_zone(self): + import calendar + import datetime + from gcloud._helpers import UTC + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + name, value = self._callFUT(utc) + self.assertEqual(name, 'timestamp_value') + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_key(self): + from gcloud.datastore.key import Key + + key = Key('PATH', 1234, project='PROJECT') + name, value = self._callFUT(key) + self.assertEqual(name, 'key_value') + self.assertEqual(value, key.to_protobuf()) + + def test_bool(self): + name, value = self._callFUT(False) + self.assertEqual(name, 'boolean_value') + self.assertEqual(value, False) + + def test_float(self): + name, value = self._callFUT(3.1415926) + self.assertEqual(name, 'double_value') + self.assertEqual(value, 3.1415926) + + def test_int(self): + name, value = self._callFUT(42) + self.assertEqual(name, 'integer_value') + self.assertEqual(value, 42) + + def test_long(self): + must_be_long = (1 << 63) - 1 + name, value = self._callFUT(must_be_long) + self.assertEqual(name, 'integer_value') + self.assertEqual(value, must_be_long) + + def test_native_str(self): + import six + name, value = self._callFUT('str') + if six.PY2: + self.assertEqual(name, 'blob_value') + else: # pragma: NO COVER Python 3 + self.assertEqual(name, 'string_value') + self.assertEqual(value, 'str') + + def test_bytes(self): + name, value = self._callFUT(b'bytes') + self.assertEqual(name, 'blob_value') + self.assertEqual(value, b'bytes') + + def test_unicode(self): + name, value = self._callFUT(u'str') + self.assertEqual(name, 'string_value') + self.assertEqual(value, u'str') + + def test_entity(self): + from gcloud.datastore.entity import Entity + entity = Entity() + name, value = self._callFUT(entity) + self.assertEqual(name, 'entity_value') + self.assertTrue(value is entity) + + def test_array(self): + values = ['a', 0, 3.14] + name, value = self._callFUT(values) + self.assertEqual(name, 'array_value') + self.assertTrue(value is values) + + def test_geo_point(self): + from google.type import latlng_pb2 + from gcloud.datastore.helpers import GeoPoint + + lat = 42.42 + lng = 99.0007 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + name, value = self._callFUT(geo_pt) + self.assertEqual(name, 'geo_point_value') + self.assertEqual(value, geo_pt_pb) + + def test_null(self): + from google.protobuf import struct_pb2 + + name, value = self._callFUT(None) + self.assertEqual(name, 'null_value') + self.assertEqual(value, struct_pb2.NULL_VALUE) + + def test_object(self): + self.assertRaises(ValueError, self._callFUT, object()) + + +class Test__get_value_from_value_pb(unittest2.TestCase): + + def _callFUT(self, pb): + from gcloud.datastore.helpers import _get_value_from_value_pb + + return _get_value_from_value_pb(pb) + + def _makePB(self, attr_name, value): + from gcloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + setattr(pb, attr_name, value) + return pb + + def test_datetime(self): + import calendar + import datetime + from gcloud._helpers import UTC + from gcloud.datastore._generated import entity_pb2 + + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + pb = entity_pb2.Value() + pb.timestamp_value.seconds = calendar.timegm(utc.timetuple()) + pb.timestamp_value.nanos = 1000 * micros + self.assertEqual(self._callFUT(pb), utc) + + def test_key(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.key import Key + + pb = entity_pb2.Value() + expected = Key('KIND', 1234, project='PROJECT').to_protobuf() + pb.key_value.CopyFrom(expected) + found = self._callFUT(pb) + self.assertEqual(found.to_protobuf(), expected) + + def test_bool(self): + pb = self._makePB('boolean_value', False) + self.assertEqual(self._callFUT(pb), False) + + def test_float(self): + pb = self._makePB('double_value', 3.1415926) + self.assertEqual(self._callFUT(pb), 3.1415926) + + def test_int(self): + pb = self._makePB('integer_value', 42) + self.assertEqual(self._callFUT(pb), 42) + + def test_bytes(self): + pb = self._makePB('blob_value', b'str') + self.assertEqual(self._callFUT(pb), b'str') + + def test_unicode(self): + pb = self._makePB('string_value', u'str') + self.assertEqual(self._callFUT(pb), u'str') + + def test_entity(self): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _new_value_pb + + pb = entity_pb2.Value() + entity_pb = pb.entity_value + entity_pb.key.path.add(kind='KIND') + entity_pb.key.partition_id.project_id = 'PROJECT' + + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = 'Foo' + entity = self._callFUT(pb) + self.assertTrue(isinstance(entity, Entity)) + self.assertEqual(entity['foo'], 'Foo') + + def test_array(self): + from gcloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + array_pb = pb.array_value.values + item_pb = array_pb.add() + item_pb.string_value = 'Foo' + item_pb = array_pb.add() + item_pb.string_value = 'Bar' + items = self._callFUT(pb) + self.assertEqual(items, ['Foo', 'Bar']) + + def test_geo_point(self): + from google.type import latlng_pb2 + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore.helpers import GeoPoint + + lat = -3.14 + lng = 13.37 + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + pb = entity_pb2.Value(geo_point_value=geo_pt_pb) + result = self._callFUT(pb) + self.assertIsInstance(result, GeoPoint) + self.assertEqual(result.latitude, lat) + self.assertEqual(result.longitude, lng) + + def test_null(self): + from google.protobuf import struct_pb2 + from gcloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) + result = self._callFUT(pb) + self.assertIsNone(result) + + def test_unknown(self): + from gcloud.datastore._generated import entity_pb2 + + pb = entity_pb2.Value() + with self.assertRaises(ValueError): + self._callFUT(pb) + + +class Test_set_protobuf_value(unittest2.TestCase): + + def _callFUT(self, value_pb, val): + from gcloud.datastore.helpers import _set_protobuf_value + + return _set_protobuf_value(value_pb, val) + + def _makePB(self): + from gcloud.datastore._generated import entity_pb2 + return entity_pb2.Value() + + def test_datetime(self): + import calendar + import datetime + from gcloud._helpers import UTC + + pb = self._makePB() + micros = 4375 + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) + self._callFUT(pb, utc) + value = pb.timestamp_value + self.assertEqual(value.seconds, calendar.timegm(utc.timetuple())) + self.assertEqual(value.nanos, 1000 * micros) + + def test_key(self): + from gcloud.datastore.key import Key + + pb = self._makePB() + key = Key('KIND', 1234, project='PROJECT') + self._callFUT(pb, key) + value = pb.key_value + self.assertEqual(value, key.to_protobuf()) + + def test_none(self): + pb = self._makePB() + self._callFUT(pb, None) + self.assertEqual(pb.WhichOneof('value_type'), 'null_value') + + def test_bool(self): + pb = self._makePB() + self._callFUT(pb, False) + value = pb.boolean_value + self.assertEqual(value, False) + + def test_float(self): + pb = self._makePB() + self._callFUT(pb, 3.1415926) + value = pb.double_value + self.assertEqual(value, 3.1415926) + + def test_int(self): + pb = self._makePB() + self._callFUT(pb, 42) + value = pb.integer_value + self.assertEqual(value, 42) + + def test_long(self): + pb = self._makePB() + must_be_long = (1 << 63) - 1 + self._callFUT(pb, must_be_long) + value = pb.integer_value + self.assertEqual(value, must_be_long) + + def test_native_str(self): + import six + pb = self._makePB() + self._callFUT(pb, 'str') + if six.PY2: + value = pb.blob_value + else: # pragma: NO COVER Python 3 + value = pb.string_value + self.assertEqual(value, 'str') + + def test_bytes(self): + pb = self._makePB() + self._callFUT(pb, b'str') + value = pb.blob_value + self.assertEqual(value, b'str') + + def test_unicode(self): + pb = self._makePB() + self._callFUT(pb, u'str') + value = pb.string_value + self.assertEqual(value, u'str') + + def test_entity_empty_wo_key(self): + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _property_tuples + + pb = self._makePB() + entity = Entity() + self._callFUT(pb, entity) + value = pb.entity_value + self.assertEqual(value.key.SerializeToString(), b'') + self.assertEqual(len(list(_property_tuples(value))), 0) + + def test_entity_w_key(self): + from gcloud.datastore.entity import Entity + from gcloud.datastore.helpers import _property_tuples + from gcloud.datastore.key import Key + + name = 'foo' + value = u'Foo' + pb = self._makePB() + key = Key('KIND', 123, project='PROJECT') + entity = Entity(key=key) + entity[name] = value + self._callFUT(pb, entity) + entity_pb = pb.entity_value + self.assertEqual(entity_pb.key, key.to_protobuf()) + + prop_dict = dict(_property_tuples(entity_pb)) + self.assertEqual(len(prop_dict), 1) + self.assertEqual(list(prop_dict.keys()), [name]) + self.assertEqual(prop_dict[name].string_value, value) + + def test_array(self): + pb = self._makePB() + values = [u'a', 0, 3.14] + self._callFUT(pb, values) + marshalled = pb.array_value.values + self.assertEqual(len(marshalled), len(values)) + self.assertEqual(marshalled[0].string_value, values[0]) + self.assertEqual(marshalled[1].integer_value, values[1]) + self.assertEqual(marshalled[2].double_value, values[2]) + + def test_geo_point(self): + from google.type import latlng_pb2 + from gcloud.datastore.helpers import GeoPoint + + pb = self._makePB() + lat = 9.11 + lng = 3.337 + geo_pt = GeoPoint(latitude=lat, longitude=lng) + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + self._callFUT(pb, geo_pt) + self.assertEqual(pb.geo_point_value, geo_pt_pb) + + +class Test__get_meaning(unittest2.TestCase): + + def _callFUT(self, *args, **kwargs): + from gcloud.datastore.helpers import _get_meaning + return _get_meaning(*args, **kwargs) + + def test_no_meaning(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + result = self._callFUT(value_pb) + self.assertEqual(result, None) + + def test_single(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + value_pb.meaning = meaning = 22 + value_pb.string_value = u'hi' + result = self._callFUT(value_pb) + self.assertEqual(meaning, result) + + def test_empty_array_value(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + value_pb.array_value.values.add() + value_pb.array_value.values.pop() + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(None, result) + + def test_array_value(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning = 9 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = sub_value_pb2.meaning = meaning + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(meaning, result) + + def test_array_value_multiple_meanings(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning1 = 9 + meaning2 = 10 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb2.meaning = meaning2 + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(result, [meaning1, meaning2]) + + def test_array_value_meaning_partially_unset(self): + from gcloud.datastore._generated import entity_pb2 + + value_pb = entity_pb2.Value() + meaning1 = 9 + sub_value_pb1 = value_pb.array_value.values.add() + sub_value_pb2 = value_pb.array_value.values.add() + + sub_value_pb1.meaning = meaning1 + sub_value_pb1.string_value = u'hi' + sub_value_pb2.string_value = u'bye' + + result = self._callFUT(value_pb, is_list=True) + self.assertEqual(result, [meaning1, None]) + + +class TestGeoPoint(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.datastore.helpers import GeoPoint + return GeoPoint + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_constructor(self): + lat = 81.2 + lng = 359.9999 + geo_pt = self._makeOne(lat, lng) + self.assertEqual(geo_pt.latitude, lat) + self.assertEqual(geo_pt.longitude, lng) + + def test_to_protobuf(self): + from google.type import latlng_pb2 + + lat = 0.0001 + lng = 20.03 + geo_pt = self._makeOne(lat, lng) + result = geo_pt.to_protobuf() + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + self.assertEqual(result, geo_pt_pb) + + def test___eq__(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = self._makeOne(lat, lng) + self.assertEqual(geo_pt1, geo_pt2) + + def test___eq__type_differ(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = object() + self.assertNotEqual(geo_pt1, geo_pt2) + + def test___ne__same_value(self): + lat = 0.0001 + lng = 20.03 + geo_pt1 = self._makeOne(lat, lng) + geo_pt2 = self._makeOne(lat, lng) + comparison_val = (geo_pt1 != geo_pt2) + self.assertFalse(comparison_val) + + def test___ne__(self): + geo_pt1 = self._makeOne(0.0, 1.0) + geo_pt2 = self._makeOne(2.0, 3.0) + self.assertNotEqual(geo_pt1, geo_pt2) diff --git a/env/Lib/site-packages/gcloud/datastore/test_key.py b/env/Lib/site-packages/gcloud/datastore/test_key.py new file mode 100644 index 0000000..ce214e4 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_key.py @@ -0,0 +1,431 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestKey(unittest2.TestCase): + + _DEFAULT_PROJECT = 'PROJECT' + + def _getTargetClass(self): + from gcloud.datastore.key import Key + return Key + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_empty(self): + self.assertRaises(ValueError, self._makeOne) + + def test_ctor_no_project(self): + klass = self._getTargetClass() + self.assertRaises(ValueError, klass, 'KIND') + + def test_ctor_w_explicit_project_empty_path(self): + _PROJECT = 'PROJECT' + self.assertRaises(ValueError, self._makeOne, project=_PROJECT) + + def test_ctor_parent(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PROJECT = 'PROJECT-ALT' + _PARENT_NAMESPACE = 'NAMESPACE' + _CHILD_KIND = 'KIND2' + _CHILD_ID = 2345 + _PATH = [ + {'kind': _PARENT_KIND, 'id': _PARENT_ID}, + {'kind': _CHILD_KIND, 'id': _CHILD_ID}, + ] + parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE) + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + self.assertEqual(key.project, parent_key.project) + self.assertEqual(key.namespace, parent_key.namespace) + self.assertEqual(key.kind, _CHILD_KIND) + self.assertEqual(key.path, _PATH) + self.assertTrue(key.parent is parent_key) + + def test_ctor_partial_parent(self): + parent_key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) + + def test_ctor_parent_bad_type(self): + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234), + project=self._DEFAULT_PROJECT) + + def test_ctor_parent_bad_namespace(self): + parent_key = self._makeOne('KIND', 1234, namespace='FOO', + project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key, + project=self._DEFAULT_PROJECT) + + def test_ctor_parent_bad_project(self): + parent_key = self._makeOne('KIND', 1234, project='FOO') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key, + project='BAR') + + def test_ctor_parent_empty_path(self): + parent_key = self._makeOne('KIND', 1234, + project=self._DEFAULT_PROJECT) + with self.assertRaises(ValueError): + self._makeOne(parent=parent_key) + + def test_ctor_explicit(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 1234 + _PATH = [{'kind': _KIND, 'id': _ID}] + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + project=_PROJECT) + self.assertEqual(key.project, _PROJECT) + self.assertEqual(key.namespace, _NAMESPACE) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, _PATH) + + def test_ctor_bad_kind(self): + self.assertRaises(ValueError, self._makeOne, object(), + project=self._DEFAULT_PROJECT) + + def test_ctor_bad_id_or_name(self): + self.assertRaises(ValueError, self._makeOne, 'KIND', object(), + project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, self._makeOne, 'KIND', None, + project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None, + project=self._DEFAULT_PROJECT) + + def test__clone(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 1234 + _PATH = [{'kind': _KIND, 'id': _ID}] + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + project=_PROJECT) + clone = key._clone() + self.assertEqual(clone.project, _PROJECT) + self.assertEqual(clone.namespace, _NAMESPACE) + self.assertEqual(clone.kind, _KIND) + self.assertEqual(clone.path, _PATH) + + def test__clone_with_parent(self): + _PROJECT = 'PROJECT-ALT' + _NAMESPACE = 'NAMESPACE' + _KIND1 = 'PARENT' + _KIND2 = 'KIND' + _ID1 = 1234 + _ID2 = 2345 + _PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}] + + parent = self._makeOne(_KIND1, _ID1, namespace=_NAMESPACE, + project=_PROJECT) + key = self._makeOne(_KIND2, _ID2, parent=parent) + self.assertTrue(key.parent is parent) + clone = key._clone() + self.assertTrue(clone.parent is key.parent) + self.assertEqual(clone.project, _PROJECT) + self.assertEqual(clone.namespace, _NAMESPACE) + self.assertEqual(clone.path, _PATH) + + def test___eq_____ne___w_non_key(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'one' + key = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertFalse(key == object()) + self.assertTrue(key != object()) + + def test___eq_____ne___two_incomplete_keys_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + key1 = self._makeOne(_KIND, project=_PROJECT) + key2 = self._makeOne(_KIND, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT) + key2 = self._makeOne(_KIND, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_different_ids(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID1 = 1234 + _ID2 = 2345 + key1 = self._makeOne(_KIND, _ID1, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID2, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_id(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertTrue(key1 == key2) + self.assertFalse(key1 != key2) + + def test___eq_____ne___same_kind_and_id_different_project(self): + _PROJECT1 = 'PROJECT1' + _PROJECT2 = 'PROJECT2' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT1) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_id_different_namespace(self): + _PROJECT = 'PROJECT' + _NAMESPACE1 = 'NAMESPACE1' + _NAMESPACE2 = 'NAMESPACE2' + _KIND = 'KIND' + _ID = 1234 + key1 = self._makeOne(_KIND, _ID, project=_PROJECT, + namespace=_NAMESPACE1) + key2 = self._makeOne(_KIND, _ID, project=_PROJECT, + namespace=_NAMESPACE2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_different_names(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME1 = 'one' + _NAME2 = 'two' + key1 = self._makeOne(_KIND, _NAME1, project=_PROJECT) + key2 = self._makeOne(_KIND, _NAME2, project=_PROJECT) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_name(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertTrue(key1 == key2) + self.assertFalse(key1 != key2) + + def test___eq_____ne___same_kind_and_name_different_project(self): + _PROJECT1 = 'PROJECT1' + _PROJECT2 = 'PROJECT2' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT1) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___eq_____ne___same_kind_and_name_different_namespace(self): + _PROJECT = 'PROJECT' + _NAMESPACE1 = 'NAMESPACE1' + _NAMESPACE2 = 'NAMESPACE2' + _KIND = 'KIND' + _NAME = 'one' + key1 = self._makeOne(_KIND, _NAME, project=_PROJECT, + namespace=_NAMESPACE1) + key2 = self._makeOne(_KIND, _NAME, project=_PROJECT, + namespace=_NAMESPACE2) + self.assertFalse(key1 == key2) + self.assertTrue(key1 != key2) + + def test___hash___incomplete(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + key = self._makeOne(_KIND, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_PROJECT) + hash(None)) + + def test___hash___completed_w_id(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 1234 + key = self._makeOne(_KIND, _ID, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_ID) + + hash(_PROJECT) + hash(None)) + + def test___hash___completed_w_name(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _NAME = 'NAME' + key = self._makeOne(_KIND, _NAME, project=_PROJECT) + self.assertNotEqual(hash(key), + hash(_KIND) + hash(_NAME) + + hash(_PROJECT) + hash(None)) + + def test_completed_key_on_partial_w_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + _ID = 1234 + new_key = key.completed_key(_ID) + self.assertFalse(key is new_key) + self.assertEqual(new_key.id, _ID) + self.assertEqual(new_key.name, None) + + def test_completed_key_on_partial_w_name(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + _NAME = 'NAME' + new_key = key.completed_key(_NAME) + self.assertFalse(key is new_key) + self.assertEqual(new_key.id, None) + self.assertEqual(new_key.name, _NAME) + + def test_completed_key_on_partial_w_invalid(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, key.completed_key, object()) + + def test_completed_key_on_complete(self): + key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + self.assertRaises(ValueError, key.completed_key, 5678) + + def test_to_protobuf_defaults(self): + from gcloud.datastore._generated import entity_pb2 + + _KIND = 'KIND' + key = self._makeOne(_KIND, project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + self.assertTrue(isinstance(pb, entity_pb2.Key)) + + # Check partition ID. + self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT) + # Unset values are False-y. + self.assertEqual(pb.partition_id.namespace_id, '') + + # Check the element PB matches the partial key and kind. + elem, = list(pb.path) + self.assertEqual(elem.kind, _KIND) + # Unset values are False-y. + self.assertEqual(elem.name, '') + # Unset values are False-y. + self.assertEqual(elem.id, 0) + + def test_to_protobuf_w_explicit_project(self): + _PROJECT = 'PROJECT-ALT' + key = self._makeOne('KIND', project=_PROJECT) + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.project_id, _PROJECT) + + def test_to_protobuf_w_explicit_namespace(self): + _NAMESPACE = 'NAMESPACE' + key = self._makeOne('KIND', namespace=_NAMESPACE, + project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE) + + def test_to_protobuf_w_explicit_path(self): + _PARENT = 'PARENT' + _CHILD = 'CHILD' + _ID = 1234 + _NAME = 'NAME' + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID, + project=self._DEFAULT_PROJECT) + pb = key.to_protobuf() + elems = list(pb.path) + self.assertEqual(len(elems), 2) + self.assertEqual(elems[0].kind, _PARENT) + self.assertEqual(elems[0].name, _NAME) + self.assertEqual(elems[1].kind, _CHILD) + self.assertEqual(elems[1].id, _ID) + + def test_to_protobuf_w_no_kind(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + # Force the 'kind' to be unset. Maybe `to_protobuf` should fail + # on this? The backend certainly will. + key._path[-1].pop('kind') + pb = key.to_protobuf() + # Unset values are False-y. + self.assertEqual(pb.path[0].kind, '') + + def test_is_partial_no_name_or_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertTrue(key.is_partial) + + def test_is_partial_w_id(self): + _ID = 1234 + key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + self.assertFalse(key.is_partial) + + def test_is_partial_w_name(self): + _NAME = 'NAME' + key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + self.assertFalse(key.is_partial) + + def test_id_or_name_no_name_or_id(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, None) + + def test_id_or_name_no_name_or_id_child(self): + key = self._makeOne('KIND1', 1234, 'KIND2', + project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, None) + + def test_id_or_name_w_id_only(self): + _ID = 1234 + key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, _ID) + + def test_id_or_name_w_name_only(self): + _NAME = 'NAME' + key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT) + self.assertEqual(key.id_or_name, _NAME) + + def test_parent_default(self): + key = self._makeOne('KIND', project=self._DEFAULT_PROJECT) + self.assertEqual(key.parent, None) + + def test_parent_explicit_top_level(self): + key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT) + self.assertEqual(key.parent, None) + + def test_parent_explicit_nested(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + project=self._DEFAULT_PROJECT) + self.assertEqual(key.parent.path, _PARENT_PATH) + + def test_parent_multiple_calls(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + project=self._DEFAULT_PROJECT) + parent = key.parent + self.assertEqual(parent.path, _PARENT_PATH) + new_parent = key.parent + self.assertTrue(parent is new_parent) diff --git a/env/Lib/site-packages/gcloud/datastore/test_query.py b/env/Lib/site-packages/gcloud/datastore/test_query.py new file mode 100644 index 0000000..f22f92e --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_query.py @@ -0,0 +1,759 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestQuery(unittest2.TestCase): + + _PROJECT = 'PROJECT' + + def _getTargetClass(self): + from gcloud.datastore.query import Query + return Query + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeClient(self, connection=None): + if connection is None: + connection = _Connection() + return _Client(self._PROJECT, connection) + + def test_ctor_defaults(self): + client = self._makeClient() + query = self._makeOne(client) + self.assertTrue(query._client is client) + self.assertEqual(query.project, client.project) + self.assertEqual(query.kind, None) + self.assertEqual(query.namespace, client.namespace) + self.assertEqual(query.ancestor, None) + self.assertEqual(query.filters, []) + self.assertEqual(query.projection, []) + self.assertEqual(query.order, []) + self.assertEqual(query.distinct_on, []) + + def test_ctor_explicit(self): + from gcloud.datastore.key import Key + _PROJECT = 'OTHER_PROJECT' + _KIND = 'KIND' + _NAMESPACE = 'OTHER_NAMESPACE' + client = self._makeClient() + ancestor = Key('ANCESTOR', 123, project=_PROJECT) + FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] + PROJECTION = ['foo', 'bar', 'baz'] + ORDER = ['foo', 'bar'] + DISTINCT_ON = ['foo'] + query = self._makeOne( + client, + kind=_KIND, + project=_PROJECT, + namespace=_NAMESPACE, + ancestor=ancestor, + filters=FILTERS, + projection=PROJECTION, + order=ORDER, + distinct_on=DISTINCT_ON, + ) + self.assertTrue(query._client is client) + self.assertEqual(query.project, _PROJECT) + self.assertEqual(query.kind, _KIND) + self.assertEqual(query.namespace, _NAMESPACE) + self.assertEqual(query.ancestor.path, ancestor.path) + self.assertEqual(query.filters, FILTERS) + self.assertEqual(query.projection, PROJECTION) + self.assertEqual(query.order, ORDER) + self.assertEqual(query.distinct_on, DISTINCT_ON) + + def test_ctor_bad_projection(self): + BAD_PROJECTION = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + projection=BAD_PROJECTION) + + def test_ctor_bad_order(self): + BAD_ORDER = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + order=BAD_ORDER) + + def test_ctor_bad_distinct_on(self): + BAD_DISTINCT_ON = object() + self.assertRaises(TypeError, self._makeOne, self._makeClient(), + distinct_on=BAD_DISTINCT_ON) + + def test_ctor_bad_filters(self): + FILTERS_CANT_UNPACK = [('one', 'two')] + self.assertRaises(ValueError, self._makeOne, self._makeClient(), + filters=FILTERS_CANT_UNPACK) + + def test_namespace_setter_w_non_string(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.namespace = val + + self.assertRaises(ValueError, _assign, object()) + + def test_namespace_setter(self): + _NAMESPACE = 'OTHER_NAMESPACE' + query = self._makeOne(self._makeClient()) + query.namespace = _NAMESPACE + self.assertEqual(query.namespace, _NAMESPACE) + + def test_kind_setter_w_non_string(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.kind = val + + self.assertRaises(TypeError, _assign, object()) + + def test_kind_setter_wo_existing(self): + _KIND = 'KIND' + query = self._makeOne(self._makeClient()) + query.kind = _KIND + self.assertEqual(query.kind, _KIND) + + def test_kind_setter_w_existing(self): + _KIND_BEFORE = 'KIND_BEFORE' + _KIND_AFTER = 'KIND_AFTER' + query = self._makeOne(self._makeClient(), kind=_KIND_BEFORE) + self.assertEqual(query.kind, _KIND_BEFORE) + query.kind = _KIND_AFTER + self.assertEqual(query.project, self._PROJECT) + self.assertEqual(query.kind, _KIND_AFTER) + + def test_ancestor_setter_w_non_key(self): + query = self._makeOne(self._makeClient()) + + def _assign(val): + query.ancestor = val + + self.assertRaises(TypeError, _assign, object()) + self.assertRaises(TypeError, _assign, ['KIND', 'NAME']) + + def test_ancestor_setter_w_key(self): + from gcloud.datastore.key import Key + _NAME = u'NAME' + key = Key('KIND', 123, project=self._PROJECT) + query = self._makeOne(self._makeClient()) + query.add_filter('name', '=', _NAME) + query.ancestor = key + self.assertEqual(query.ancestor.path, key.path) + + def test_ancestor_deleter_w_key(self): + from gcloud.datastore.key import Key + key = Key('KIND', 123, project=self._PROJECT) + query = self._makeOne(client=self._makeClient(), ancestor=key) + del query.ancestor + self.assertTrue(query.ancestor is None) + + def test_add_filter_setter_w_unknown_operator(self): + query = self._makeOne(self._makeClient()) + self.assertRaises(ValueError, query.add_filter, + 'firstname', '~~', 'John') + + def test_add_filter_w_known_operator(self): + query = self._makeOne(self._makeClient()) + query.add_filter('firstname', '=', u'John') + self.assertEqual(query.filters, [('firstname', '=', u'John')]) + + def test_add_filter_w_all_operators(self): + query = self._makeOne(self._makeClient()) + query.add_filter('leq_prop', '<=', u'val1') + query.add_filter('geq_prop', '>=', u'val2') + query.add_filter('lt_prop', '<', u'val3') + query.add_filter('gt_prop', '>', u'val4') + query.add_filter('eq_prop', '=', u'val5') + self.assertEqual(len(query.filters), 5) + self.assertEqual(query.filters[0], ('leq_prop', '<=', u'val1')) + self.assertEqual(query.filters[1], ('geq_prop', '>=', u'val2')) + self.assertEqual(query.filters[2], ('lt_prop', '<', u'val3')) + self.assertEqual(query.filters[3], ('gt_prop', '>', u'val4')) + self.assertEqual(query.filters[4], ('eq_prop', '=', u'val5')) + + def test_add_filter_w_known_operator_and_entity(self): + from gcloud.datastore.entity import Entity + query = self._makeOne(self._makeClient()) + other = Entity() + other['firstname'] = u'John' + other['lastname'] = u'Smith' + query.add_filter('other', '=', other) + self.assertEqual(query.filters, [('other', '=', other)]) + + def test_add_filter_w_whitespace_property_name(self): + query = self._makeOne(self._makeClient()) + PROPERTY_NAME = ' property with lots of space ' + query.add_filter(PROPERTY_NAME, '=', u'John') + self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')]) + + def test_add_filter___key__valid_key(self): + from gcloud.datastore.key import Key + query = self._makeOne(self._makeClient()) + key = Key('Foo', project=self._PROJECT) + query.add_filter('__key__', '=', key) + self.assertEqual(query.filters, [('__key__', '=', key)]) + + def test_filter___key__not_equal_operator(self): + from gcloud.datastore.key import Key + key = Key('Foo', project=self._PROJECT) + query = self._makeOne(self._makeClient()) + query.add_filter('__key__', '<', key) + self.assertEqual(query.filters, [('__key__', '<', key)]) + + def test_filter___key__invalid_value(self): + query = self._makeOne(self._makeClient()) + self.assertRaises(ValueError, query.add_filter, '__key__', '=', None) + + def test_projection_setter_empty(self): + query = self._makeOne(self._makeClient()) + query.projection = [] + self.assertEqual(query.projection, []) + + def test_projection_setter_string(self): + query = self._makeOne(self._makeClient()) + query.projection = 'field1' + self.assertEqual(query.projection, ['field1']) + + def test_projection_setter_non_empty(self): + query = self._makeOne(self._makeClient()) + query.projection = ['field1', 'field2'] + self.assertEqual(query.projection, ['field1', 'field2']) + + def test_projection_setter_multiple_calls(self): + _PROJECTION1 = ['field1', 'field2'] + _PROJECTION2 = ['field3'] + query = self._makeOne(self._makeClient()) + query.projection = _PROJECTION1 + self.assertEqual(query.projection, _PROJECTION1) + query.projection = _PROJECTION2 + self.assertEqual(query.projection, _PROJECTION2) + + def test_keys_only(self): + query = self._makeOne(self._makeClient()) + query.keys_only() + self.assertEqual(query.projection, ['__key__']) + + def test_key_filter_defaults(self): + from gcloud.datastore.key import Key + + client = self._makeClient() + query = self._makeOne(client) + self.assertEqual(query.filters, []) + key = Key('Kind', 1234, project='project') + query.key_filter(key) + self.assertEqual(query.filters, [('__key__', '=', key)]) + + def test_key_filter_explicit(self): + from gcloud.datastore.key import Key + + client = self._makeClient() + query = self._makeOne(client) + self.assertEqual(query.filters, []) + key = Key('Kind', 1234, project='project') + query.key_filter(key, operator='>') + self.assertEqual(query.filters, [('__key__', '>', key)]) + + def test_order_setter_empty(self): + query = self._makeOne(self._makeClient(), order=['foo', '-bar']) + query.order = [] + self.assertEqual(query.order, []) + + def test_order_setter_string(self): + query = self._makeOne(self._makeClient()) + query.order = 'field' + self.assertEqual(query.order, ['field']) + + def test_order_setter_single_item_list_desc(self): + query = self._makeOne(self._makeClient()) + query.order = ['-field'] + self.assertEqual(query.order, ['-field']) + + def test_order_setter_multiple(self): + query = self._makeOne(self._makeClient()) + query.order = ['foo', '-bar'] + self.assertEqual(query.order, ['foo', '-bar']) + + def test_distinct_on_setter_empty(self): + query = self._makeOne(self._makeClient(), distinct_on=['foo', 'bar']) + query.distinct_on = [] + self.assertEqual(query.distinct_on, []) + + def test_distinct_on_setter_string(self): + query = self._makeOne(self._makeClient()) + query.distinct_on = 'field1' + self.assertEqual(query.distinct_on, ['field1']) + + def test_distinct_on_setter_non_empty(self): + query = self._makeOne(self._makeClient()) + query.distinct_on = ['field1', 'field2'] + self.assertEqual(query.distinct_on, ['field1', 'field2']) + + def test_distinct_on_multiple_calls(self): + _DISTINCT_ON1 = ['field1', 'field2'] + _DISTINCT_ON2 = ['field3'] + query = self._makeOne(self._makeClient()) + query.distinct_on = _DISTINCT_ON1 + self.assertEqual(query.distinct_on, _DISTINCT_ON1) + query.distinct_on = _DISTINCT_ON2 + self.assertEqual(query.distinct_on, _DISTINCT_ON2) + + def test_fetch_defaults_w_client_attr(self): + connection = _Connection() + client = self._makeClient(connection) + query = self._makeOne(client) + iterator = query.fetch() + self.assertTrue(iterator._query is query) + self.assertTrue(iterator._client is client) + self.assertEqual(iterator._limit, None) + self.assertEqual(iterator._offset, 0) + + def test_fetch_w_explicit_client(self): + connection = _Connection() + client = self._makeClient(connection) + other_client = self._makeClient(connection) + query = self._makeOne(client) + iterator = query.fetch(limit=7, offset=8, client=other_client) + self.assertTrue(iterator._query is query) + self.assertTrue(iterator._client is other_client) + self.assertEqual(iterator._limit, 7) + self.assertEqual(iterator._offset, 8) + + +class TestIterator(unittest2.TestCase): + _PROJECT = 'PROJECT' + _NAMESPACE = 'NAMESPACE' + _KIND = 'KIND' + _ID = 123 + _START = b'\x00' + _END = b'\xFF' + + def _getTargetClass(self): + from gcloud.datastore.query import Iterator + return Iterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _addQueryResults(self, connection, cursor=_END, more=False, + skipped_results=None, no_entity=False): + from gcloud.datastore._generated import entity_pb2 + from gcloud.datastore._generated import query_pb2 + from gcloud.datastore.helpers import _new_value_pb + + if more: + more_enum = query_pb2.QueryResultBatch.NOT_FINISHED + else: + more_enum = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT + _ID = 123 + if no_entity: + entities = [] + else: + entity_pb = entity_pb2.Entity() + entity_pb.key.partition_id.project_id = self._PROJECT + path_element = entity_pb.key.path.add() + path_element.kind = self._KIND + path_element.id = _ID + value_pb = _new_value_pb(entity_pb, 'foo') + value_pb.string_value = u'Foo' + entities = [entity_pb] + + connection._results.append( + (entities, cursor, more_enum, skipped_results)) + + def _makeClient(self, connection=None): + if connection is None: + connection = _Connection() + return _Client(self._PROJECT, connection) + + def test_ctor_defaults(self): + connection = _Connection() + query = object() + iterator = self._makeOne(query, connection) + self.assertTrue(iterator._query is query) + self.assertEqual(iterator._limit, None) + self.assertEqual(iterator._offset, None) + self.assertEqual(iterator._skipped_results, None) + + def test_ctor_explicit(self): + client = self._makeClient() + query = _Query(client) + iterator = self._makeOne(query, client, 13, 29) + self.assertTrue(iterator._query is query) + self.assertEqual(iterator._limit, 13) + self.assertEqual(iterator._offset, 29) + + def test_next_page_no_cursors_no_more(self): + from gcloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=b'') + iterator = self._makeOne(query, client) + entities, more_results, cursor = iterator.next_page() + self.assertEqual(iterator._skipped_results, None) + + self.assertEqual(cursor, None) + self.assertFalse(more_results) + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_no_cursors_no_more_w_offset_and_limit(self): + from gcloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + skipped_results = object() + self._addQueryResults(connection, cursor=b'', + skipped_results=skipped_results) + iterator = self._makeOne(query, client, 13, 29) + entities, more_results, cursor = iterator.next_page() + + self.assertEqual(cursor, None) + self.assertFalse(more_results) + self.assertFalse(iterator._more_results) + self.assertEqual(iterator._skipped_results, skipped_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.limit.value = 13 + qpb.offset = 29 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_w_cursors_w_more(self): + from base64 import urlsafe_b64decode + from base64 import urlsafe_b64encode + from gcloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + iterator = self._makeOne(query, client) + iterator._start_cursor = self._START + iterator._end_cursor = self._END + entities, more_results, cursor = iterator.next_page() + + self.assertEqual(cursor, urlsafe_b64encode(self._END)) + self.assertTrue(more_results) + self.assertTrue(iterator._more_results) + self.assertEqual(iterator._skipped_results, None) + self.assertEqual(iterator._end_cursor, None) + self.assertEqual(urlsafe_b64decode(iterator._start_cursor), self._END) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + qpb.start_cursor = urlsafe_b64decode(self._START) + qpb.end_cursor = urlsafe_b64decode(self._END) + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test_next_page_w_cursors_w_bogus_more(self): + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + epb, cursor, _, _ = connection._results.pop() + connection._results.append((epb, cursor, 5, None)) # invalid enum + iterator = self._makeOne(query, client) + self.assertRaises(ValueError, iterator.next_page) + + def test___iter___no_more(self): + from gcloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection) + iterator = self._makeOne(query, client) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 1) + self.assertEqual(entities[0].key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[0]['foo'], u'Foo') + qpb = _pb_from_query(query) + qpb.offset = 0 + EXPECTED = { + 'project': self._PROJECT, + 'query_pb': qpb, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(connection._called_with, [EXPECTED]) + + def test___iter___w_more(self): + from gcloud.datastore.query import _pb_from_query + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + self._addQueryResults(connection, cursor=self._END, more=True) + self._addQueryResults(connection) + iterator = self._makeOne(query, client) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 2) + for entity in entities: + self.assertEqual( + entity.key.path, + [{'kind': self._KIND, 'id': self._ID}]) + self.assertEqual(entities[1]['foo'], u'Foo') + qpb1 = _pb_from_query(query) + qpb2 = _pb_from_query(query) + qpb2.start_cursor = self._END + EXPECTED1 = { + 'project': self._PROJECT, + 'query_pb': qpb1, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED2 = { + 'project': self._PROJECT, + 'query_pb': qpb2, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(len(connection._called_with), 2) + self.assertEqual(connection._called_with[0], EXPECTED1) + self.assertEqual(connection._called_with[1], EXPECTED2) + + def test___iter___w_limit(self): + from gcloud.datastore.query import _pb_from_query + + connection = _Connection() + client = self._makeClient(connection) + query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE) + skip1 = 4 + skip2 = 9 + self._addQueryResults(connection, more=True, skipped_results=skip1, + no_entity=True) + self._addQueryResults(connection, more=True, skipped_results=skip2) + self._addQueryResults(connection) + offset = skip1 + skip2 + iterator = self._makeOne(query, client, limit=2, offset=offset) + entities = list(iterator) + + self.assertFalse(iterator._more_results) + self.assertEqual(len(entities), 2) + for entity in entities: + self.assertEqual( + entity.key.path, + [{'kind': self._KIND, 'id': self._ID}]) + qpb1 = _pb_from_query(query) + qpb1.limit.value = 2 + qpb1.offset = offset + qpb2 = _pb_from_query(query) + qpb2.start_cursor = self._END + qpb2.limit.value = 2 + qpb2.offset = offset - skip1 + qpb3 = _pb_from_query(query) + qpb3.start_cursor = self._END + qpb3.limit.value = 1 + EXPECTED1 = { + 'project': self._PROJECT, + 'query_pb': qpb1, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED2 = { + 'project': self._PROJECT, + 'query_pb': qpb2, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + EXPECTED3 = { + 'project': self._PROJECT, + 'query_pb': qpb3, + 'namespace': self._NAMESPACE, + 'transaction_id': None, + } + self.assertEqual(len(connection._called_with), 3) + self.assertEqual(connection._called_with[0], EXPECTED1) + self.assertEqual(connection._called_with[1], EXPECTED2) + self.assertEqual(connection._called_with[2], EXPECTED3) + + +class Test__pb_from_query(unittest2.TestCase): + + def _callFUT(self, query): + from gcloud.datastore.query import _pb_from_query + return _pb_from_query(query) + + def test_empty(self): + from gcloud.datastore._generated import query_pb2 + + pb = self._callFUT(_Query()) + self.assertEqual(list(pb.projection), []) + self.assertEqual(list(pb.kind), []) + self.assertEqual(list(pb.order), []) + self.assertEqual(list(pb.distinct_on), []) + self.assertEqual(pb.filter.property_filter.property.name, '') + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, + query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED) + self.assertEqual(list(cfilter.filters), []) + self.assertEqual(pb.start_cursor, b'') + self.assertEqual(pb.end_cursor, b'') + self.assertEqual(pb.limit.value, 0) + self.assertEqual(pb.offset, 0) + + def test_projection(self): + pb = self._callFUT(_Query(projection=['a', 'b', 'c'])) + self.assertEqual([item.property.name for item in pb.projection], + ['a', 'b', 'c']) + + def test_kind(self): + pb = self._callFUT(_Query(kind='KIND')) + self.assertEqual([item.name for item in pb.kind], ['KIND']) + + def test_ancestor(self): + from gcloud.datastore.key import Key + from gcloud.datastore._generated import query_pb2 + + ancestor = Key('Ancestor', 123, project='PROJECT') + pb = self._callFUT(_Query(ancestor=ancestor)) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + ancestor_pb = ancestor.to_protobuf() + self.assertEqual(pfilter.value.key_value, ancestor_pb) + + def test_filter(self): + from gcloud.datastore._generated import query_pb2 + + query = _Query(filters=[('name', '=', u'John')]) + query.OPERATORS = { + '=': query_pb2.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, 'name') + self.assertEqual(pfilter.value.string_value, u'John') + + def test_filter_key(self): + from gcloud.datastore.key import Key + from gcloud.datastore._generated import query_pb2 + + key = Key('Kind', 123, project='PROJECT') + query = _Query(filters=[('__key__', '=', key)]) + query.OPERATORS = { + '=': query_pb2.PropertyFilter.EQUAL, + } + pb = self._callFUT(query) + cfilter = pb.filter.composite_filter + self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) + self.assertEqual(len(cfilter.filters), 1) + pfilter = cfilter.filters[0].property_filter + self.assertEqual(pfilter.property.name, '__key__') + key_pb = key.to_protobuf() + self.assertEqual(pfilter.value.key_value, key_pb) + + def test_order(self): + from gcloud.datastore._generated import query_pb2 + + pb = self._callFUT(_Query(order=['a', '-b', 'c'])) + self.assertEqual([item.property.name for item in pb.order], + ['a', 'b', 'c']) + self.assertEqual([item.direction for item in pb.order], + [query_pb2.PropertyOrder.ASCENDING, + query_pb2.PropertyOrder.DESCENDING, + query_pb2.PropertyOrder.ASCENDING]) + + def test_distinct_on(self): + pb = self._callFUT(_Query(distinct_on=['a', 'b', 'c'])) + self.assertEqual([item.name for item in pb.distinct_on], + ['a', 'b', 'c']) + + +class _Query(object): + + def __init__(self, + client=object(), + kind=None, + project=None, + namespace=None, + ancestor=None, + filters=(), + projection=(), + order=(), + distinct_on=()): + self._client = client + self.kind = kind + self.project = project + self.namespace = namespace + self.ancestor = ancestor + self.filters = filters + self.projection = projection + self.order = order + self.distinct_on = distinct_on + + +class _Connection(object): + + _called_with = None + _cursor = b'\x00' + _skipped = 0 + + def __init__(self): + self._results = [] + self._called_with = [] + + def run_query(self, **kw): + self._called_with.append(kw) + result, self._results = self._results[0], self._results[1:] + return result + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + + @property + def current_transaction(self): + pass diff --git a/env/Lib/site-packages/gcloud/datastore/test_transaction.py b/env/Lib/site-packages/gcloud/datastore/test_transaction.py new file mode 100644 index 0000000..5f780f6 --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/test_transaction.py @@ -0,0 +1,223 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestTransaction(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.datastore.transaction import Transaction + return Transaction + + def _makeOne(self, client, **kw): + return self._getTargetClass()(client, **kw) + + def test_ctor_defaults(self): + from gcloud.datastore._generated import datastore_pb2 + + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + self.assertEqual(xact.project, _PROJECT) + self.assertEqual(xact.connection, connection) + self.assertEqual(xact.id, None) + self.assertEqual(xact._status, self._getTargetClass()._INITIAL) + self.assertTrue(isinstance(xact._commit_request, + datastore_pb2.CommitRequest)) + self.assertTrue(xact.mutations is xact._commit_request.mutations) + self.assertEqual(len(xact._partial_key_entities), 0) + + def test_current(self): + from gcloud.datastore.test_client import _NoCommitBatch + _PROJECT = 'PROJECT' + connection = _Connection() + client = _Client(_PROJECT, connection) + xact1 = self._makeOne(client) + xact2 = self._makeOne(client) + self.assertTrue(xact1.current() is None) + self.assertTrue(xact2.current() is None) + with xact1: + self.assertTrue(xact1.current() is xact1) + self.assertTrue(xact2.current() is xact1) + with _NoCommitBatch(client): + self.assertTrue(xact1.current() is None) + self.assertTrue(xact2.current() is None) + with xact2: + self.assertTrue(xact1.current() is xact2) + self.assertTrue(xact2.current() is xact2) + with _NoCommitBatch(client): + self.assertTrue(xact1.current() is None) + self.assertTrue(xact2.current() is None) + self.assertTrue(xact1.current() is xact1) + self.assertTrue(xact2.current() is xact1) + self.assertTrue(xact1.current() is None) + self.assertTrue(xact2.current() is None) + + def test_begin(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + + def test_begin_tombstoned(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + + xact.rollback() + self.assertEqual(xact.id, None) + + self.assertRaises(ValueError, xact.begin) + + def test_rollback(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact.begin() + xact.rollback() + self.assertEqual(xact.id, None) + self.assertEqual(connection._rolled_back, (_PROJECT, 234)) + + def test_commit_no_partial_keys(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._commit_request = commit_request = object() + xact.begin() + xact.commit() + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertEqual(xact.id, None) + + def test_commit_w_partial_keys(self): + _PROJECT = 'PROJECT' + _KIND = 'KIND' + _ID = 123 + connection = _Connection(234) + connection._completed_keys = [_make_key(_KIND, _ID, _PROJECT)] + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + entity = _Entity() + xact.put(entity) + xact._commit_request = commit_request = object() + xact.begin() + xact.commit() + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertEqual(xact.id, None) + self.assertEqual(entity.key.path, [{'kind': _KIND, 'id': _ID}]) + + def test_context_manager_no_raise(self): + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._commit_request = commit_request = object() + with xact: + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + self.assertEqual(connection._committed, + (_PROJECT, commit_request, 234)) + self.assertEqual(xact.id, None) + + def test_context_manager_w_raise(self): + + class Foo(Exception): + pass + + _PROJECT = 'PROJECT' + connection = _Connection(234) + client = _Client(_PROJECT, connection) + xact = self._makeOne(client) + xact._mutation = object() + try: + with xact: + self.assertEqual(xact.id, 234) + self.assertEqual(connection._begun, _PROJECT) + raise Foo() + except Foo: + self.assertEqual(xact.id, None) + self.assertEqual(connection._rolled_back, (_PROJECT, 234)) + self.assertEqual(connection._committed, None) + self.assertEqual(xact.id, None) + + +def _make_key(kind, id_, project): + from gcloud.datastore._generated import entity_pb2 + + key = entity_pb2.Key() + key.partition_id.project_id = project + elem = key.path.add() + elem.kind = kind + elem.id = id_ + return key + + +class _Connection(object): + _marker = object() + _begun = _rolled_back = _committed = None + + def __init__(self, xact_id=123): + self._xact_id = xact_id + self._completed_keys = [] + self._index_updates = 0 + + def begin_transaction(self, project): + self._begun = project + return self._xact_id + + def rollback(self, project, transaction_id): + self._rolled_back = project, transaction_id + + def commit(self, project, commit_request, transaction_id): + self._committed = (project, commit_request, transaction_id) + return self._index_updates, self._completed_keys + + +class _Entity(dict): + + def __init__(self): + super(_Entity, self).__init__() + from gcloud.datastore.key import Key + self.key = Key('KIND', project='PROJECT') + + +class _Client(object): + + def __init__(self, project, connection, namespace=None): + self.project = project + self.connection = connection + self.namespace = namespace + self._batches = [] + + def _push_batch(self, batch): + self._batches.insert(0, batch) + + def _pop_batch(self): + return self._batches.pop(0) + + @property + def current_batch(self): + return self._batches and self._batches[0] or None diff --git a/env/Lib/site-packages/gcloud/datastore/transaction.py b/env/Lib/site-packages/gcloud/datastore/transaction.py new file mode 100644 index 0000000..dc78c7b --- /dev/null +++ b/env/Lib/site-packages/gcloud/datastore/transaction.py @@ -0,0 +1,162 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud datastore transactions.""" + +from gcloud.datastore.batch import Batch + + +class Transaction(Batch): + """An abstraction representing datastore Transactions. + + Transactions can be used to build up a bulk mutation and ensure all + or none succeed (transactionally). + + For example, the following snippet of code will put the two ``save`` + operations (either ``insert`` or ``upsert``) into the same + mutation, and execute those within a transaction:: + + >>> from gcloud import datastore + >>> client = datastore.Client() + >>> with client.transaction(): + ... client.put_multi([entity1, entity2]) + + Because it derives from :class:`Batch <.datastore.batch.Batch>`, + :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods:: + + >>> with client.transaction() as xact: + ... xact.put(entity1) + ... xact.delete(entity2.key) + + By default, the transaction is rolled back if the transaction block + exits with an error:: + + >>> with client.transaction(): + ... do_some_work() + ... raise SomeException() # rolls back + + If the transaction block exists without an exception, it will commit + by default. + + .. warning:: Inside a transaction, automatically assigned IDs for + entities will not be available at save time! That means, if you + try:: + + >>> with client.transaction(): + ... entity = datastore.Entity(key=client.key('Thing')) + ... client.put(entity) + + ``entity`` won't have a complete key until the transaction is + committed. + + Once you exit the transaction (or call :meth:`commit`), the + automatically generated ID will be assigned to the entity:: + + >>> with client.transaction(): + ... entity = datastore.Entity(key=client.key('Thing')) + ... client.put(entity) + ... print(entity.key.is_partial) # There is no ID on this key. + ... + True + >>> print(entity.key.is_partial) # There *is* an ID. + False + + If you don't want to use the context manager you can initialize a + transaction manually:: + + >>> transaction = client.transaction() + >>> transaction.begin() + >>> + >>> entity = datastore.Entity(key=client.key('Thing')) + >>> transaction.put(entity) + >>> + >>> if error: + ... transaction.rollback() + ... else: + ... transaction.commit() + + :type client: :class:`gcloud.datastore.client.Client` + :param client: the client used to connect to datastore. + """ + + def __init__(self, client): + super(Transaction, self).__init__(client) + self._id = None + + @property + def id(self): + """Getter for the transaction ID. + + :rtype: string + :returns: The ID of the current transaction. + """ + return self._id + + def current(self): + """Return the topmost transaction. + + .. note:: + + If the topmost element on the stack is not a transaction, + returns None. + + :rtype: :class:`gcloud.datastore.transaction.Transaction` or None + """ + top = super(Transaction, self).current() + if isinstance(top, Transaction): + return top + + def begin(self): + """Begins a transaction. + + This method is called automatically when entering a with + statement, however it can be called explicitly if you don't want + to use a context manager. + + :raises: :class:`ValueError` if the transaction has already begun. + """ + super(Transaction, self).begin() + self._id = self.connection.begin_transaction(self.project) + + def rollback(self): + """Rolls back the current transaction. + + This method has necessary side-effects: + + - Sets the current connection's transaction reference to None. + - Sets the current transaction's ID to None. + """ + try: + self.connection.rollback(self.project, self._id) + finally: + super(Transaction, self).rollback() + # Clear our own ID in case this gets accidentally reused. + self._id = None + + def commit(self): + """Commits the transaction. + + This is called automatically upon exiting a with statement, + however it can be called explicitly if you don't want to use a + context manager. + + This method has necessary side-effects: + + - Sets the current transaction's ID to None. + """ + try: + super(Transaction, self).commit() + finally: + # Clear our own ID in case this gets accidentally reused. + self._id = None diff --git a/env/Lib/site-packages/gcloud/dns/__init__.py b/env/Lib/site-packages/gcloud/dns/__init__.py new file mode 100644 index 0000000..f92c143 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud DNS API wrapper. + +The main concepts with this API are: + +- :class:`gcloud.DNS.zone.ManagedZone` represents an collection of tables. +- :class:`gcloud.DNS.resource_record_set.ResourceRecordSet` represents a + single resource definition within a zone. +- :class:`gcloud.DNS.changes.Changes` represents a set of changes (adding/ + deleting resource record sets) to a zone. +""" + +from gcloud.dns.zone import Changes +from gcloud.dns.client import Client +from gcloud.dns.connection import Connection +from gcloud.dns.zone import ManagedZone +from gcloud.dns.resource_record_set import ResourceRecordSet + + +SCOPE = Connection.SCOPE diff --git a/env/Lib/site-packages/gcloud/dns/changes.py b/env/Lib/site-packages/gcloud/dns/changes.py new file mode 100644 index 0000000..e3e05e7 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/changes.py @@ -0,0 +1,256 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API ResourceRecordSets.""" + +import six + +from gcloud._helpers import _rfc3339_to_datetime +from gcloud.exceptions import NotFound +from gcloud.dns.resource_record_set import ResourceRecordSet + + +class Changes(object): + """Changes are bundled additions / deletions of DNS resource records. + + Changes are owned by a :class:`gcloud.dns.zone.ManagedZone` instance. + + See: + https://cloud.google.com/dns/api/v1/changes + + :type zone: :class:`gcloud.dns.zone.ManagedZone` + :param zone: A zone which holds one or more record sets. + """ + + def __init__(self, zone): + self.zone = zone + self._properties = {} + self._additions = self._deletions = () + + @classmethod + def from_api_repr(cls, resource, zone): + """Factory: construct a change set given its API representation + + :type resource: dict + :param resource: change set representation returned from the API + + :type zone: :class:`gcloud.dns.zone.ManagedZone` + :param zone: A zone which holds zero or more change sets. + + :rtype: :class:`gcloud.dns.changes.Changes` + :returns: RRS parsed from ``resource``. + """ + changes = cls(zone=zone) + changes._set_properties(resource) + return changes + + def _set_properties(self, resource): + """Helper method for :meth:`from_api_repr`, :meth:`create`, etc. + + :type resource: dict + :param resource: change set representation returned from the API + """ + resource = resource.copy() + self._additions = tuple([ + ResourceRecordSet.from_api_repr(added_res, self.zone) + for added_res in resource.pop('additions', ())]) + self._deletions = tuple([ + ResourceRecordSet.from_api_repr(added_res, self.zone) + for added_res in resource.pop('deletions', ())]) + self._properties = resource + + @property + def path(self): + """URL path for change set APIs. + + :rtype: string + :returns: the path based on project, zone, and change set names. + """ + return '/projects/%s/managedZones/%s/changes/%s' % ( + self.zone.project, self.zone.name, self.name) + + @property + def name(self): + """Name of the change set. + + :rtype: string or ``NoneType`` + :returns: Name, as set by the back-end, or None. + """ + return self._properties.get('id') + + @name.setter + def name(self, value): + """Update name of the change set. + + :type value: string + :param value: New name for the changeset. + """ + if not isinstance(value, six.string_types): + raise ValueError("Pass a string") + self._properties['id'] = value + + @property + def status(self): + """Status of the change set. + + :rtype: string or ``NoneType`` + :returns: Status, as set by the back-end, or None. + """ + return self._properties.get('status') + + @property + def started(self): + """Time when the change set was started. + + :rtype: ``datetime.datetime`` or ``NoneType`` + :returns: Time, as set by the back-end, or None. + """ + stamp = self._properties.get('startTime') + if stamp is not None: + return _rfc3339_to_datetime(stamp) + + @property + def additions(self): + """Resource record sets to be added to the zone. + + :rtype: sequence of + :class:`gcloud.dns.resource_record_set.ResourceRecordSet`. + :returns: record sets appended via :meth:`add_record_set` + """ + return self._additions + + @property + def deletions(self): + """Resource record sets to be deleted from the zone. + + :rtype: sequence of + :class:`gcloud.dns.resource_record_set.ResourceRecordSet`. + :returns: record sets appended via :meth:`delete_record_set` + """ + return self._deletions + + def add_record_set(self, record_set): + """Append a record set to the 'additions' for the change set. + + :type record_set: + :class:`gcloud.dns.resource_record_set.ResourceRecordSet` + :param record_set: the record set to append + + :raises: ``ValueError`` if ``record_set`` is not of the required type. + """ + if not isinstance(record_set, ResourceRecordSet): + raise ValueError("Pass a ResourceRecordSet") + self._additions += (record_set,) + + def delete_record_set(self, record_set): + """Append a record set to the 'deletions' for the change set. + + :type record_set: + :class:`gcloud.dns.resource_record_set.ResourceRecordSet` + :param record_set: the record set to append + + :raises: ``ValueError`` if ``record_set`` is not of the required type. + """ + if not isinstance(record_set, ResourceRecordSet): + raise ValueError("Pass a ResourceRecordSet") + self._deletions += (record_set,) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + + :rtype: :class:`gcloud.dns.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self.zone._client + return client + + def _build_resource(self): + """Generate a resource for ``create``.""" + additions = [{ + 'name': added.name, + 'type': added.record_type, + 'ttl': str(added.ttl), + 'rrdatas': added.rrdatas, + } for added in self.additions] + + deletions = [{ + 'name': deleted.name, + 'type': deleted.record_type, + 'ttl': str(deleted.ttl), + 'rrdatas': deleted.rrdatas, + } for deleted in self.deletions] + + return { + 'additions': additions, + 'deletions': deletions, + } + + def create(self, client=None): + """API call: create the change set via a POST request + + See: + https://cloud.google.com/dns/api/v1/changes/create + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + if len(self.additions) == 0 and len(self.deletions) == 0: + raise ValueError("No record sets added or deleted") + client = self._require_client(client) + path = '/projects/%s/managedZones/%s/changes' % ( + self.zone.project, self.zone.name) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def exists(self, client=None): + """API call: test for the existence of the change set via a GET request + + See + https://cloud.google.com/dns/api/v1/changes/get + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + try: + client.connection.api_request(method='GET', path=self.path, + query_params={'fields': 'id'}) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: refresh zone properties via a GET request + + See + https://cloud.google.com/dns/api/v1/changes/get + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='GET', path=self.path) + self._set_properties(api_response) diff --git a/env/Lib/site-packages/gcloud/dns/client.py b/env/Lib/site-packages/gcloud/dns/client.py new file mode 100644 index 0000000..c69d54a --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/client.py @@ -0,0 +1,116 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud DNS API.""" + + +from gcloud.client import JSONClient +from gcloud.dns.connection import Connection +from gcloud.dns.zone import ManagedZone + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: string + :param project: the project which the client acts on behalf of. Will be + passed when creating a zone. If not passed, + falls back to the default inferred from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def quotas(self): + """Return DNS quots for the project associated with this client. + + See: + https://cloud.google.com/dns/api/v1/projects/get + + :rtype: mapping + :returns: keys for the mapping correspond to those of the ``quota`` + sub-mapping of the project resource. + """ + path = '/projects/%s' % (self.project,) + resp = self.connection.api_request(method='GET', path=path) + + return dict([(key, int(value)) + for key, value in resp['quota'].items() if key != 'kind']) + + def list_zones(self, max_results=None, page_token=None): + """List zones for the project associated with this client. + + See: + https://cloud.google.com/dns/api/v1/managedZones/list + + :type max_results: int + :param max_results: maximum number of zones to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of zones. If + not passed, the API will return the first page of + zones. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.dns.zone.ManagedZone`, plus a + "next page token" string: if the token is not None, + indicates that more zones can be retrieved with another + call (pass that value as ``page_token``). + """ + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/managedZones' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + zones = [ManagedZone.from_api_repr(resource, self) + for resource in resp['managedZones']] + return zones, resp.get('nextPageToken') + + def zone(self, name, dns_name=None, description=None): + """Construct a zone bound to this client. + + :type name: string + :param name: Name of the zone. + + :type dns_name: string or :class:`NoneType` + :param dns_name: DNS name of the zone. If not passed, then calls + to :meth:`zone.create` will fail. + + :type description: string or :class:`NoneType` + :param description: the description for the zone. If not passed, + defaults to the value of 'dns_name'. + + :rtype: :class:`gcloud.dns.zone.ManagedZone` + :returns: a new ``ManagedZone`` instance + """ + return ManagedZone(name, dns_name, client=self, + description=description) diff --git a/env/Lib/site-packages/gcloud/dns/connection.py b/env/Lib/site-packages/gcloud/dns/connection.py new file mode 100644 index 0000000..e2b382f --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/connection.py @@ -0,0 +1,33 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud dns connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud DNS via the JSON REST API.""" + + API_BASE_URL = 'https://www.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/dns/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/ndev.clouddns.readwrite',) + """The scopes required for authenticating as a Cloud DNS consumer.""" diff --git a/env/Lib/site-packages/gcloud/dns/resource_record_set.py b/env/Lib/site-packages/gcloud/dns/resource_record_set.py new file mode 100644 index 0000000..dbd95b3 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/resource_record_set.py @@ -0,0 +1,66 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API ResourceRecordSets.""" + + +class ResourceRecordSet(object): + """ResourceRecordSets are DNS resource records. + + RRS are owned by a :class:`gcloud.dns.zone.ManagedZone` instance. + + See: + https://cloud.google.com/dns/api/v1/resourceRecordSets + + :type name: string + :param name: the name of the record set + + :type record_type: string + :param record_type: the RR type of the zone + + :type ttl: integer + :param ttl: TTL (in seconds) for caching the record sets + + :type rrdatas: list of string + :param rrdatas: one or more lines containing the resource data + + :type zone: :class:`gcloud.dns.zone.ManagedZone` + :param zone: A zone which holds one or more record sets. + """ + + def __init__(self, name, record_type, ttl, rrdatas, zone): + self.name = name + self.record_type = record_type + self.ttl = ttl + self.rrdatas = rrdatas + self.zone = zone + + @classmethod + def from_api_repr(cls, resource, zone): + """Factory: construct a record set given its API representation + + :type resource: dict + :param resource: record sets representation returned from the API + + :type zone: :class:`gcloud.dns.zone.ManagedZone` + :param zone: A zone which holds one or more record sets. + + :rtype: :class:`gcloud.dns.zone.ResourceRecordSet` + :returns: RRS parsed from ``resource``. + """ + name = resource['name'] + record_type = resource['type'] + ttl = int(resource['ttl']) + rrdatas = resource['rrdatas'] + return cls(name, record_type, ttl, rrdatas, zone=zone) diff --git a/env/Lib/site-packages/gcloud/dns/test_changes.py b/env/Lib/site-packages/gcloud/dns/test_changes.py new file mode 100644 index 0000000..f7902a1 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/test_changes.py @@ -0,0 +1,344 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestChanges(unittest2.TestCase): + PROJECT = 'project' + ZONE_NAME = 'example.com' + CHANGES_NAME = 'changeset_id' + + def _getTargetClass(self): + from gcloud.dns.changes import Changes + return Changes + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _setUpConstants(self): + from gcloud._helpers import UTC + from gcloud._helpers import _NOW + self.WHEN = _NOW().replace(tzinfo=UTC) + + def _makeResource(self): + from gcloud._helpers import _datetime_to_rfc3339 + when_str = _datetime_to_rfc3339(self.WHEN) + return { + 'kind': 'dns#change', + 'id': self.CHANGES_NAME, + 'startTime': when_str, + 'status': 'done', + 'additions': [ + {'name': 'test.example.com', + 'type': 'CNAME', + 'ttl': '3600', + 'rrdatas': ['www.example.com']}, + ], + 'deletions': [ + {'name': 'test.example.com', + 'type': 'CNAME', + 'ttl': '86400', + 'rrdatas': ['other.example.com']}, + ], + } + + def _verifyResourceProperties(self, changes, resource, zone): + from gcloud._helpers import _rfc3339_to_datetime + from gcloud._helpers import UTC + self.assertEqual(changes.name, resource['id']) + started = _rfc3339_to_datetime(resource['startTime']) + self.assertEqual(changes.started, started) + self.assertEqual(changes.status, resource['status']) + + r_additions = resource.get('additions', ()) + self.assertEqual(len(changes.additions), len(r_additions)) + for found, expected in zip(changes.additions, r_additions): + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.record_type, expected['type']) + self.assertEqual(found.ttl, int(expected['ttl'])) + self.assertEqual(found.rrdatas, expected['rrdatas']) + self.assertTrue(found.zone is zone) + + r_deletions = resource.get('deletions', ()) + self.assertEqual(len(changes.deletions), len(r_deletions)) + for found, expected in zip(changes.deletions, r_deletions): + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.record_type, expected['type']) + self.assertEqual(found.ttl, int(expected['ttl'])) + self.assertEqual(found.rrdatas, expected['rrdatas']) + self.assertTrue(found.zone is zone) + + def test_ctor(self): + zone = _Zone() + + changes = self._makeOne(zone) + + self.assertTrue(changes.zone is zone) + self.assertEqual(changes.name, None) + self.assertEqual(changes.status, None) + self.assertEqual(changes.started, None) + self.assertEqual(list(changes.additions), []) + self.assertEqual(list(changes.deletions), []) + + def test_from_api_repr_missing_additions_deletions(self): + self._setUpConstants() + RESOURCE = self._makeResource() + del RESOURCE['additions'] + del RESOURCE['deletions'] + zone = _Zone() + klass = self._getTargetClass() + + changes = klass.from_api_repr(RESOURCE, zone=zone) + + self._verifyResourceProperties(changes, RESOURCE, zone) + + def test_from_api_repr(self): + self._setUpConstants() + RESOURCE = self._makeResource() + zone = _Zone() + klass = self._getTargetClass() + + changes = klass.from_api_repr(RESOURCE, zone=zone) + + self._verifyResourceProperties(changes, RESOURCE, zone) + + def test_name_setter_bad_value(self): + zone = _Zone() + changes = self._makeOne(zone) + with self.assertRaises(ValueError): + changes.name = 12345 + + def test_name_setter(self): + zone = _Zone() + changes = self._makeOne(zone) + changes.name = 'NAME' + self.assertEqual(changes.name, 'NAME') + + def test_add_record_set_invalid_value(self): + zone = _Zone() + changes = self._makeOne(zone) + + with self.assertRaises(ValueError): + changes.add_record_set(object()) + + def test_add_record_set(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + zone = _Zone() + changes = self._makeOne(zone) + rrs = ResourceRecordSet('test.example.com', 'CNAME', 3600, + ['www.example.com'], zone) + changes.add_record_set(rrs) + self.assertEqual(list(changes.additions), [rrs]) + + def test_delete_record_set_invalid_value(self): + zone = _Zone() + changes = self._makeOne(zone) + + with self.assertRaises(ValueError): + changes.delete_record_set(object()) + + def test_delete_record_set(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + zone = _Zone() + changes = self._makeOne(zone) + rrs = ResourceRecordSet('test.example.com', 'CNAME', 3600, + ['www.example.com'], zone) + changes.delete_record_set(rrs) + self.assertEqual(list(changes.deletions), [rrs]) + + def test_create_wo_additions_or_deletions(self): + self._setUpConstants() + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = _Zone(client) + changes = self._makeOne(zone) + + with self.assertRaises(ValueError): + changes.create() + + self.assertEqual(len(conn._requested), 0) + + def test_create_w_bound_client(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + self._setUpConstants() + RESOURCE = self._makeResource() + PATH = 'projects/%s/managedZones/%s/changes' % ( + self.PROJECT, self.ZONE_NAME) + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = _Zone(client) + changes = self._makeOne(zone) + changes.add_record_set(ResourceRecordSet( + 'test.example.com', 'CNAME', 3600, ['www.example.com'], zone)) + changes.delete_record_set(ResourceRecordSet( + 'test.example.com', 'CNAME', 86400, ['other.example.com'], zone)) + + changes.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'additions': RESOURCE['additions'], + 'deletions': RESOURCE['deletions'], + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(changes, RESOURCE, zone) + + def test_create_w_alternate_client(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + self._setUpConstants() + RESOURCE = self._makeResource() + PATH = 'projects/%s/managedZones/%s/changes' % ( + self.PROJECT, self.ZONE_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = _Zone(client1) + changes = self._makeOne(zone) + changes.add_record_set(ResourceRecordSet( + 'test.example.com', 'CNAME', 3600, ['www.example.com'], zone)) + changes.delete_record_set(ResourceRecordSet( + 'test.example.com', 'CNAME', 86400, ['other.example.com'], zone)) + + changes.create(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'additions': RESOURCE['additions'], + 'deletions': RESOURCE['deletions'], + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(changes, RESOURCE, zone) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/managedZones/%s/changes/%s' % ( + self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) + self._setUpConstants() + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + zone = _Zone(client) + changes = self._makeOne(zone) + changes.name = self.CHANGES_NAME + + self.assertFalse(changes.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/managedZones/%s/changes/%s' % ( + self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = _Zone(client1) + changes = self._makeOne(zone) + changes.name = self.CHANGES_NAME + + self.assertTrue(changes.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/managedZones/%s/changes/%s' % ( + self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) + self._setUpConstants() + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = _Zone(client) + changes = self._makeOne(zone) + changes.name = self.CHANGES_NAME + + changes.reload() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(changes, RESOURCE, zone) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/managedZones/%s/changes/%s' % ( + self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) + self._setUpConstants() + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = _Zone(client1) + changes = self._makeOne(zone) + changes.name = self.CHANGES_NAME + + changes.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(changes, RESOURCE, zone) + + +class _Zone(object): + + def __init__(self, client=None, project=TestChanges.PROJECT, + name=TestChanges.ZONE_NAME): + self._client = client + self.project = project + self.name = name + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: + raise NotFound('miss') + else: + return response diff --git a/env/Lib/site-packages/gcloud/dns/test_client.py b/env/Lib/site-packages/gcloud/dns/test_client.py new file mode 100644 index 0000000..e3b1190 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/test_client.py @@ -0,0 +1,252 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestClient(unittest2.TestCase): + + PROJECT = 'PROJECT' + ZONE_NAME = 'zone-name' + + def _getTargetClass(self): + from gcloud.dns.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + from gcloud.dns.connection import Connection + creds = _Credentials() + http = object() + client = self._makeOne(project=self.PROJECT, credentials=creds, + http=http) + self.assertTrue(isinstance(client.connection, Connection)) + self.assertTrue(client.connection.credentials is creds) + self.assertTrue(client.connection.http is http) + + def test_quotas_defaults(self): + PATH = 'projects/%s' % (self.PROJECT,) + MANAGED_ZONES = 1234 + RRS_PER_RRSET = 23 + RRSETS_PER_ZONE = 345 + RRSET_ADDITIONS = 456 + RRSET_DELETIONS = 567 + TOTAL_SIZE = 67890 + DATA = { + 'quota': { + 'managedZones': str(MANAGED_ZONES), + 'resourceRecordsPerRrset': str(RRS_PER_RRSET), + 'rrsetsPerManagedZone': str(RRSETS_PER_ZONE), + 'rrsetAdditionsPerChange': str(RRSET_ADDITIONS), + 'rrsetDeletionsPerChange': str(RRSET_DELETIONS), + 'totalRrdataSizePerChange': str(TOTAL_SIZE), + } + } + CONVERTED = dict([(key, int(value)) + for key, value in DATA['quota'].items()]) + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + conn = client.connection = _Connection(DATA) + + quotas = client.quotas() + + self.assertEqual(quotas, CONVERTED) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_quotas_w_kind_key(self): + PATH = 'projects/%s' % (self.PROJECT,) + MANAGED_ZONES = 1234 + RRS_PER_RRSET = 23 + RRSETS_PER_ZONE = 345 + RRSET_ADDITIONS = 456 + RRSET_DELETIONS = 567 + TOTAL_SIZE = 67890 + DATA = { + 'quota': { + 'managedZones': str(MANAGED_ZONES), + 'resourceRecordsPerRrset': str(RRS_PER_RRSET), + 'rrsetsPerManagedZone': str(RRSETS_PER_ZONE), + 'rrsetAdditionsPerChange': str(RRSET_ADDITIONS), + 'rrsetDeletionsPerChange': str(RRSET_DELETIONS), + 'totalRrdataSizePerChange': str(TOTAL_SIZE), + } + } + CONVERTED = dict([(key, int(value)) + for key, value in DATA['quota'].items()]) + WITH_KIND = {'quota': DATA['quota'].copy()} + WITH_KIND['quota']['kind'] = 'dns#quota' + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + conn = client.connection = _Connection(WITH_KIND) + + quotas = client.quotas() + + self.assertEqual(quotas, CONVERTED) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_zones_defaults(self): + from gcloud.dns.zone import ManagedZone + ID_1 = '123' + ZONE_1 = 'zone_one' + DNS_1 = 'one.example.com' + ID_2 = '234' + ZONE_2 = 'zone_two' + DNS_2 = 'two.example.com' + PATH = 'projects/%s/managedZones' % (self.PROJECT,) + TOKEN = 'TOKEN' + DATA = { + 'nextPageToken': TOKEN, + 'managedZones': [ + {'kind': 'dns#managedZone', + 'id': ID_1, + 'name': ZONE_1, + 'dnsName': DNS_1}, + {'kind': 'dns#managedZone', + 'id': ID_2, + 'name': ZONE_2, + 'dnsName': DNS_2}, + ] + } + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + conn = client.connection = _Connection(DATA) + + zones, token = client.list_zones() + + self.assertEqual(len(zones), len(DATA['managedZones'])) + for found, expected in zip(zones, DATA['managedZones']): + self.assertTrue(isinstance(found, ManagedZone)) + self.assertEqual(found.zone_id, expected['id']) + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.dns_name, expected['dnsName']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_zones_explicit(self): + from gcloud.dns.zone import ManagedZone + ID_1 = '123' + ZONE_1 = 'zone_one' + DNS_1 = 'one.example.com' + ID_2 = '234' + ZONE_2 = 'zone_two' + DNS_2 = 'two.example.com' + PATH = 'projects/%s/managedZones' % (self.PROJECT,) + TOKEN = 'TOKEN' + DATA = { + 'managedZones': [ + {'kind': 'dns#managedZone', + 'id': ID_1, + 'name': ZONE_1, + 'dnsName': DNS_1}, + {'kind': 'dns#managedZone', + 'id': ID_2, + 'name': ZONE_2, + 'dnsName': DNS_2}, + ] + } + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + conn = client.connection = _Connection(DATA) + + zones, token = client.list_zones(max_results=3, page_token=TOKEN) + + self.assertEqual(len(zones), len(DATA['managedZones'])) + for found, expected in zip(zones, DATA['managedZones']): + self.assertTrue(isinstance(found, ManagedZone)) + self.assertEqual(found.zone_id, expected['id']) + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.dns_name, expected['dnsName']) + self.assertEqual(token, None) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': 3, 'pageToken': TOKEN}) + + def test_zone_explicit(self): + from gcloud.dns.zone import ManagedZone + DESCRIPTION = 'DESCRIPTION' + DNS_NAME = 'test.example.com' + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + zone = client.zone(self.ZONE_NAME, DNS_NAME, DESCRIPTION) + self.assertTrue(isinstance(zone, ManagedZone)) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, DNS_NAME) + self.assertEqual(zone.description, DESCRIPTION) + self.assertTrue(zone._client is client) + + def test_zone_w_dns_name_wo_description(self): + from gcloud.dns.zone import ManagedZone + DNS_NAME = 'test.example.com' + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + zone = client.zone(self.ZONE_NAME, DNS_NAME) + self.assertTrue(isinstance(zone, ManagedZone)) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, DNS_NAME) + self.assertEqual(zone.description, DNS_NAME) + self.assertTrue(zone._client is client) + + def test_zone_wo_dns_name(self): + from gcloud.dns.zone import ManagedZone + creds = _Credentials() + client = self._makeOne(self.PROJECT, creds) + zone = client.zone(self.ZONE_NAME) + self.assertTrue(isinstance(zone, ManagedZone)) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, None) + self.assertEqual(zone.description, None) + self.assertTrue(zone._client is client) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/env/Lib/site-packages/gcloud/dns/test_connection.py b/env/Lib/site-packages/gcloud/dns/test_connection.py new file mode 100644 index 0000000..1a3f777 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/test_connection.py @@ -0,0 +1,47 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.dns.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_build_api_url_no_extra_query_params(self): + conn = self._makeOne() + URI = '/'.join([ + conn.API_BASE_URL, + 'dns', + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo'), URI) + + def test_build_api_url_w_extra_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeOne() + uri = conn.build_api_url('/foo', {'bar': 'baz'}) + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, + '/'.join(['', 'dns', conn.API_VERSION, 'foo'])) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['bar'], 'baz') diff --git a/env/Lib/site-packages/gcloud/dns/test_resource_record_set.py b/env/Lib/site-packages/gcloud/dns/test_resource_record_set.py new file mode 100644 index 0000000..8f4bc98 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/test_resource_record_set.py @@ -0,0 +1,94 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestResourceRecordSet(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + return ResourceRecordSet + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + zone = _Zone() + + rrs = self._makeOne('test.example.com', 'CNAME', 3600, + ['www.example.com'], zone) + + self.assertEqual(rrs.name, 'test.example.com') + self.assertEqual(rrs.record_type, 'CNAME') + self.assertEqual(rrs.ttl, 3600) + self.assertEqual(rrs.rrdatas, ['www.example.com']) + self.assertTrue(rrs.zone is zone) + + def test_from_api_repr_missing_rrdatas(self): + zone = _Zone() + klass = self._getTargetClass() + + with self.assertRaises(KeyError): + klass.from_api_repr({'name': 'test.example.com', + 'type': 'CNAME', + 'ttl': 3600}, zone=zone) + + def test_from_api_repr_missing_ttl(self): + zone = _Zone() + klass = self._getTargetClass() + + with self.assertRaises(KeyError): + klass.from_api_repr({'name': 'test.example.com', + 'type': 'CNAME', + 'rrdatas': ['www.example.com']}, zone=zone) + + def test_from_api_repr_missing_type(self): + zone = _Zone() + klass = self._getTargetClass() + + with self.assertRaises(KeyError): + klass.from_api_repr({'name': 'test.example.com', + 'ttl': 3600, + 'rrdatas': ['www.example.com']}, zone=zone) + + def test_from_api_repr_missing_name(self): + zone = _Zone() + klass = self._getTargetClass() + + with self.assertRaises(KeyError): + klass.from_api_repr({'type': 'CNAME', + 'ttl': 3600, + 'rrdatas': ['www.example.com']}, zone=zone) + + def test_from_api_repr_bare(self): + zone = _Zone() + RESOURCE = { + 'kind': 'dns#resourceRecordSet', + 'name': 'test.example.com', + 'type': 'CNAME', + 'ttl': '3600', + 'rrdatas': ['www.example.com'], + } + klass = self._getTargetClass() + rrs = klass.from_api_repr(RESOURCE, zone=zone) + self.assertEqual(rrs.name, 'test.example.com') + self.assertEqual(rrs.record_type, 'CNAME') + self.assertEqual(rrs.ttl, 3600) + self.assertEqual(rrs.rrdatas, ['www.example.com']) + self.assertTrue(rrs.zone is zone) + + +class _Zone(object): + pass diff --git a/env/Lib/site-packages/gcloud/dns/test_zone.py b/env/Lib/site-packages/gcloud/dns/test_zone.py new file mode 100644 index 0000000..b0609c2 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/test_zone.py @@ -0,0 +1,692 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestManagedZone(unittest2.TestCase): + PROJECT = 'project' + ZONE_NAME = 'zone-name' + DESCRIPTION = 'ZONE DESCRIPTION' + DNS_NAME = 'test.example.com' + + def _getTargetClass(self): + from gcloud.dns.zone import ManagedZone + return ManagedZone + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _setUpConstants(self): + import datetime + from gcloud._helpers import UTC + + year = 2015 + month = 7 + day = 24 + hour = 19 + minute = 53 + seconds = 19 + micros = 6000 + + self.WHEN_STR = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, micros) + self.WHEN = datetime.datetime( + year, month, day, hour, minute, seconds, micros, tzinfo=UTC) + self.ZONE_ID = 12345 + + def _makeResource(self): + self._setUpConstants() + return { + 'name': self.ZONE_NAME, + 'dnsName': self.DNS_NAME, + 'description': self.DESCRIPTION, + 'id': self.ZONE_ID, + 'creationTime': self.WHEN_STR, + 'nameServers': [ + 'ns-cloud1.googledomains.com', + 'ns-cloud2.googledomains.com', + ], + } + + def _verifyReadonlyResourceProperties(self, zone, resource): + + self.assertEqual(zone.zone_id, resource.get('id')) + + if 'creationTime' in resource: + self.assertEqual(zone.created, self.WHEN) + else: + self.assertEqual(zone.created, None) + + if 'nameServers' in resource: + self.assertEqual(zone.name_servers, resource['nameServers']) + else: + self.assertEqual(zone.name_servers, None) + + def _verifyResourceProperties(self, zone, resource): + + self._verifyReadonlyResourceProperties(zone, resource) + + self.assertEqual(zone.name, resource.get('name')) + self.assertEqual(zone.dns_name, resource.get('dnsName')) + self.assertEqual(zone.description, resource.get('description')) + self.assertEqual(zone.zone_id, resource.get('id')) + self.assertEqual(zone.name_server_set, resource.get('nameServerSet')) + + def test_ctor_defaults(self): + zone = self._makeOne(self.ZONE_NAME) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, None) + self.assertTrue(zone._client is None) + + with self.assertRaises(AttributeError): + _ = zone.project + + with self.assertRaises(AttributeError): + _ = zone.path + + self.assertEqual(zone.zone_id, None) + self.assertEqual(zone.created, None) + self.assertEqual(zone.description, None) + + def test_ctor_wo_description(self): + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, self.DNS_NAME) + self.assertTrue(zone._client is client) + self.assertEqual(zone.project, client.project) + self.assertEqual( + zone.path, + '/projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME)) + self.assertEqual(zone.zone_id, None) + self.assertEqual(zone.created, None) + self.assertEqual(zone.description, self.DNS_NAME) + + def test_ctor_explicit(self): + DESCRIPTION = 'DESCRIPTION' + client = _Client(self.PROJECT) + zone = self._makeOne( + self.ZONE_NAME, self.DNS_NAME, client, DESCRIPTION) + self.assertEqual(zone.name, self.ZONE_NAME) + self.assertEqual(zone.dns_name, self.DNS_NAME) + self.assertTrue(zone._client is client) + self.assertEqual(zone.project, client.project) + self.assertEqual( + zone.path, + '/projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME)) + self.assertEqual(zone.zone_id, None) + self.assertEqual(zone.created, None) + self.assertEqual(zone.description, DESCRIPTION) + + def test_from_api_repr_missing_identity(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = {} + klass = self._getTargetClass() + with self.assertRaises(KeyError): + klass.from_api_repr(RESOURCE, client=client) + + def test_from_api_repr_bare(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = { + 'name': self.ZONE_NAME, + 'dnsName': self.DNS_NAME, + } + klass = self._getTargetClass() + zone = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(zone._client is client) + self._verifyResourceProperties(zone, RESOURCE) + + def test_from_api_repr_w_properties(self): + self._setUpConstants() + client = _Client(self.PROJECT) + RESOURCE = self._makeResource() + klass = self._getTargetClass() + zone = klass.from_api_repr(RESOURCE, client=client) + self.assertTrue(zone._client is client) + self._verifyResourceProperties(zone, RESOURCE) + + def test_description_setter_bad_value(self): + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + with self.assertRaises(ValueError): + zone.description = 12345 + + def test_description_setter(self): + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + zone.description = 'DESCRIPTION' + self.assertEqual(zone.description, 'DESCRIPTION') + + def test_name_server_set_setter_bad_value(self): + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + with self.assertRaises(ValueError): + zone.name_server_set = 12345 + + def test_name_server_set_setter(self): + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + zone.name_server_set = 'NAME_SERVER_SET' + self.assertEqual(zone.name_server_set, 'NAME_SERVER_SET') + + def test_resource_record_set(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + RRS_NAME = 'other.example.com' + RRS_TYPE = 'CNAME' + TTL = 3600 + RRDATAS = ['www.example.com'] + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + rrs = zone.resource_record_set(RRS_NAME, RRS_TYPE, TTL, RRDATAS) + self.assertTrue(isinstance(rrs, ResourceRecordSet)) + self.assertEqual(rrs.name, RRS_NAME) + self.assertEqual(rrs.record_type, RRS_TYPE) + self.assertEqual(rrs.ttl, TTL) + self.assertEqual(rrs.rrdatas, RRDATAS) + self.assertTrue(rrs.zone is zone) + + def test_changes(self): + from gcloud.dns.changes import Changes + client = _Client(self.PROJECT) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + changes = zone.changes() + self.assertTrue(isinstance(changes, Changes)) + self.assertTrue(changes.zone is zone) + + def test_create_w_bound_client(self): + PATH = 'projects/%s/managedZones' % self.PROJECT + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + zone.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'name': self.ZONE_NAME, + 'dnsName': self.DNS_NAME, + 'description': self.DNS_NAME, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(zone, RESOURCE) + + def test_create_w_alternate_client(self): + PATH = 'projects/%s/managedZones' % self.PROJECT + DESCRIPTION = 'DESCRIPTION' + NAME_SERVER_SET = 'NAME_SERVER_SET' + RESOURCE = self._makeResource() + RESOURCE['nameServerSet'] = NAME_SERVER_SET + RESOURCE['description'] = DESCRIPTION + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + zone.name_server_set = NAME_SERVER_SET + zone.description = DESCRIPTION + + zone.create(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'name': self.ZONE_NAME, + 'dnsName': self.DNS_NAME, + 'nameServerSet': NAME_SERVER_SET, + 'description': DESCRIPTION, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(zone, RESOURCE) + + def test_create_wo_dns_name_or_description(self): + from gcloud.exceptions import BadRequest + PATH = 'projects/%s/managedZones' % self.PROJECT + + _requested = [] + + def _api_request(**kw): + _requested.append(kw) + raise BadRequest('missing dns_name / description') + + conn = _Connection() + conn.api_request = _api_request + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, client=client) + + with self.assertRaises(BadRequest): + zone.create() + + self.assertEqual(len(_requested), 1) + req = _requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'name': self.ZONE_NAME, + } + self.assertEqual(req['data'], SENT) + + def test_create_w_missing_output_properties(self): + # In the wild, the resource returned from 'zone.create' sometimes + # lacks 'creationTime' / 'lastModifiedTime' + PATH = 'projects/%s/managedZones' % (self.PROJECT,) + RESOURCE = self._makeResource() + del RESOURCE['creationTime'] + del RESOURCE['id'] + del RESOURCE['nameServers'] + self.WHEN = None + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + zone.create() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % PATH) + SENT = { + 'name': self.ZONE_NAME, + 'dnsName': self.DNS_NAME, + 'description': self.DNS_NAME, + } + self.assertEqual(req['data'], SENT) + self._verifyResourceProperties(zone, RESOURCE) + + def test_exists_miss_w_bound_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + conn = _Connection() + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + self.assertFalse(zone.exists()) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_exists_hit_w_alternate_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + + self.assertTrue(zone.exists(client=client2)) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], {'fields': 'id'}) + + def test_reload_w_bound_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + RESOURCE = self._makeResource() + conn = _Connection(RESOURCE) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, client=client) + + zone.reload() + + self.assertEqual(zone.dns_name, self.DNS_NAME) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(zone, RESOURCE) + + def test_reload_w_alternate_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + RESOURCE = self._makeResource() + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + + zone.reload(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self._verifyResourceProperties(zone, RESOURCE) + + def test_delete_w_bound_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + conn = _Connection({}) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + zone.delete() + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_delete_w_alternate_client(self): + PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + + zone.delete(client=client2) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_resource_record_sets_defaults(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + PATH = 'projects/%s/managedZones/%s/rrsets' % ( + self.PROJECT, self.ZONE_NAME) + TOKEN = 'TOKEN' + NAME_1 = 'www.example.com' + TYPE_1 = 'A' + TTL_1 = '86400' + RRDATAS_1 = ['123.45.67.89'] + NAME_2 = 'alias.example.com' + TYPE_2 = 'CNAME' + TTL_2 = '3600' + RRDATAS_2 = ['www.example.com'] + DATA = { + 'nextPageToken': TOKEN, + 'rrsets': [ + {'kind': 'dns#resourceRecordSet', + 'name': NAME_1, + 'type': TYPE_1, + 'ttl': TTL_1, + 'rrdatas': RRDATAS_1}, + {'kind': 'dns#resourceRecordSet', + 'name': NAME_2, + 'type': TYPE_2, + 'ttl': TTL_2, + 'rrdatas': RRDATAS_2}, + ] + } + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + rrsets, token = zone.list_resource_record_sets() + + self.assertEqual(len(rrsets), len(DATA['rrsets'])) + for found, expected in zip(rrsets, DATA['rrsets']): + self.assertTrue(isinstance(found, ResourceRecordSet)) + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.record_type, expected['type']) + self.assertEqual(found.ttl, int(expected['ttl'])) + self.assertEqual(found.rrdatas, expected['rrdatas']) + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_resource_record_sets_explicit(self): + from gcloud.dns.resource_record_set import ResourceRecordSet + PATH = 'projects/%s/managedZones/%s/rrsets' % ( + self.PROJECT, self.ZONE_NAME) + TOKEN = 'TOKEN' + NAME_1 = 'www.example.com' + TYPE_1 = 'A' + TTL_1 = '86400' + RRDATAS_1 = ['123.45.67.89'] + NAME_2 = 'alias.example.com' + TYPE_2 = 'CNAME' + TTL_2 = '3600' + RRDATAS_2 = ['www.example.com'] + DATA = { + 'rrsets': [ + {'kind': 'dns#resourceRecordSet', + 'name': NAME_1, + 'type': TYPE_1, + 'ttl': TTL_1, + 'rrdatas': RRDATAS_1}, + {'kind': 'dns#resourceRecordSet', + 'name': NAME_2, + 'type': TYPE_2, + 'ttl': TTL_2, + 'rrdatas': RRDATAS_2}, + ] + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(DATA) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + + rrsets, token = zone.list_resource_record_sets( + max_results=3, page_token=TOKEN, client=client2) + + self.assertEqual(len(rrsets), len(DATA['rrsets'])) + for found, expected in zip(rrsets, DATA['rrsets']): + self.assertTrue(isinstance(found, ResourceRecordSet)) + self.assertEqual(found.name, expected['name']) + self.assertEqual(found.record_type, expected['type']) + self.assertEqual(found.ttl, int(expected['ttl'])) + self.assertEqual(found.rrdatas, expected['rrdatas']) + self.assertEqual(token, None) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': 3, 'pageToken': TOKEN}) + + def test_list_changes_defaults(self): + from gcloud._helpers import _datetime_to_rfc3339 + from gcloud.dns.changes import Changes + from gcloud.dns.resource_record_set import ResourceRecordSet + self._setUpConstants() + PATH = 'projects/%s/managedZones/%s/changes' % ( + self.PROJECT, self.ZONE_NAME) + TOKEN = 'TOKEN' + NAME_1 = 'www.example.com' + TYPE_1 = 'A' + TTL_1 = '86400' + RRDATAS_1 = ['123.45.67.89'] + NAME_2 = 'alias.example.com' + TYPE_2 = 'CNAME' + TTL_2 = '3600' + RRDATAS_2 = ['www.example.com'] + CHANGES_NAME = 'changeset_id' + DATA = { + 'nextPageToken': TOKEN, + 'changes': [{ + 'kind': 'dns#change', + 'id': CHANGES_NAME, + 'status': 'pending', + 'startTime': _datetime_to_rfc3339(self.WHEN), + 'additions': [ + {'kind': 'dns#resourceRecordSet', + 'name': NAME_1, + 'type': TYPE_1, + 'ttl': TTL_1, + 'rrdatas': RRDATAS_1}], + 'deletions': [ + {'kind': 'dns#change', + 'name': NAME_2, + 'type': TYPE_2, + 'ttl': TTL_2, + 'rrdatas': RRDATAS_2}], + }] + } + conn = _Connection(DATA) + client = _Client(project=self.PROJECT, connection=conn) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client) + + changes, token = zone.list_changes() + + self.assertEqual(len(changes), len(DATA['changes'])) + for found, expected in zip(changes, DATA['changes']): + self.assertTrue(isinstance(found, Changes)) + self.assertEqual(found.name, CHANGES_NAME) + self.assertEqual(found.status, 'pending') + self.assertEqual(found.started, self.WHEN) + + self.assertEqual(len(found.additions), len(expected['additions'])) + for found_rr, expected_rr in zip(found.additions, + expected['additions']): + self.assertTrue(isinstance(found_rr, ResourceRecordSet)) + self.assertEqual(found_rr.name, expected_rr['name']) + self.assertEqual(found_rr.record_type, expected_rr['type']) + self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) + self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) + + self.assertEqual(len(found.deletions), len(expected['deletions'])) + for found_rr, expected_rr in zip(found.deletions, + expected['deletions']): + self.assertTrue(isinstance(found_rr, ResourceRecordSet)) + self.assertEqual(found_rr.name, expected_rr['name']) + self.assertEqual(found_rr.record_type, expected_rr['type']) + self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) + self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) + + self.assertEqual(token, TOKEN) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_list_changes_explicit(self): + from gcloud._helpers import _datetime_to_rfc3339 + from gcloud.dns.changes import Changes + from gcloud.dns.resource_record_set import ResourceRecordSet + self._setUpConstants() + PATH = 'projects/%s/managedZones/%s/changes' % ( + self.PROJECT, self.ZONE_NAME) + TOKEN = 'TOKEN' + NAME_1 = 'www.example.com' + TYPE_1 = 'A' + TTL_1 = '86400' + RRDATAS_1 = ['123.45.67.89'] + NAME_2 = 'alias.example.com' + TYPE_2 = 'CNAME' + TTL_2 = '3600' + RRDATAS_2 = ['www.example.com'] + CHANGES_NAME = 'changeset_id' + DATA = { + 'changes': [{ + 'kind': 'dns#change', + 'id': CHANGES_NAME, + 'status': 'pending', + 'startTime': _datetime_to_rfc3339(self.WHEN), + 'additions': [ + {'kind': 'dns#resourceRecordSet', + 'name': NAME_1, + 'type': TYPE_1, + 'ttl': TTL_1, + 'rrdatas': RRDATAS_1}], + 'deletions': [ + {'kind': 'dns#change', + 'name': NAME_2, + 'type': TYPE_2, + 'ttl': TTL_2, + 'rrdatas': RRDATAS_2}], + }] + } + conn1 = _Connection() + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(DATA) + client2 = _Client(project=self.PROJECT, connection=conn2) + zone = self._makeOne(self.ZONE_NAME, self.DNS_NAME, client1) + + changes, token = zone.list_changes( + max_results=3, page_token=TOKEN, client=client2) + + self.assertEqual(len(changes), len(DATA['changes'])) + for found, expected in zip(changes, DATA['changes']): + self.assertTrue(isinstance(found, Changes)) + self.assertEqual(found.name, CHANGES_NAME) + self.assertEqual(found.status, 'pending') + self.assertEqual(found.started, self.WHEN) + + self.assertEqual(len(found.additions), len(expected['additions'])) + for found_rr, expected_rr in zip(found.additions, + expected['additions']): + self.assertTrue(isinstance(found_rr, ResourceRecordSet)) + self.assertEqual(found_rr.name, expected_rr['name']) + self.assertEqual(found_rr.record_type, expected_rr['type']) + self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) + self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) + + self.assertEqual(len(found.deletions), len(expected['deletions'])) + for found_rr, expected_rr in zip(found.deletions, + expected['deletions']): + self.assertTrue(isinstance(found_rr, ResourceRecordSet)) + self.assertEqual(found_rr.name, expected_rr['name']) + self.assertEqual(found_rr.record_type, expected_rr['type']) + self.assertEqual(found_rr.ttl, int(expected_rr['ttl'])) + self.assertEqual(found_rr.rrdatas, expected_rr['rrdatas']) + + self.assertEqual(token, None) + + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % PATH) + self.assertEqual(req['query_params'], + {'maxResults': 3, 'pageToken': TOKEN}) + + +class _Client(object): + + def __init__(self, project='project', connection=None): + self.project = project + self.connection = connection + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: + raise NotFound('miss') + else: + return response diff --git a/env/Lib/site-packages/gcloud/dns/zone.py b/env/Lib/site-packages/gcloud/dns/zone.py new file mode 100644 index 0000000..2a7df03 --- /dev/null +++ b/env/Lib/site-packages/gcloud/dns/zone.py @@ -0,0 +1,395 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API ManagedZones.""" +import six + +from gcloud._helpers import _rfc3339_to_datetime +from gcloud.exceptions import NotFound +from gcloud.dns.changes import Changes +from gcloud.dns.resource_record_set import ResourceRecordSet + + +class ManagedZone(object): + """ManagedZones are containers for DNS resource records. + + See: + https://cloud.google.com/dns/api/v1/managedZones + + :type name: string + :param name: the name of the zone + + :type dns_name: string or :class:`NoneType` + :param dns_name: the DNS name of the zone. If not passed, then calls + to :meth:`create` will fail. + + :type client: :class:`gcloud.dns.client.Client` + :param client: A client which holds credentials and project configuration + for the zone (which requires a project). + + :type description: string or :class:`NoneType` + :param description: the description for the zone. If not passed, defaults + to the value of 'dns_name'. + """ + + def __init__(self, name, dns_name=None, client=None, description=None): + self.name = name + self.dns_name = dns_name + self._client = client + self._properties = {} + if description is None: + description = dns_name + self.description = description + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a zone given its API representation + + :type resource: dict + :param resource: zone resource representation returned from the API + + :type client: :class:`gcloud.dns.client.Client` + :param client: Client which holds credentials and project + configuration for the zone. + + :rtype: :class:`gcloud.dns.zone.ManagedZone` + :returns: Zone parsed from ``resource``. + """ + name = resource.get('name') + dns_name = resource.get('dnsName') + if name is None or dns_name is None: + raise KeyError('Resource lacks required identity information:' + '["name"]["dnsName"]') + zone = cls(name, dns_name, client=client) + zone._set_properties(resource) + return zone + + @property + def project(self): + """Project bound to the zone. + + :rtype: string + :returns: the project (derived from the client). + """ + return self._client.project + + @property + def path(self): + """URL path for the zone's APIs. + + :rtype: string + :returns: the path based on project and dataste name. + """ + return '/projects/%s/managedZones/%s' % (self.project, self.name) + + @property + def created(self): + """Datetime at which the zone was created. + + :rtype: ``datetime.datetime``, or ``NoneType`` + :returns: the creation time (None until set from the server). + """ + return self._properties.get('creationTime') + + @property + def name_servers(self): + """Datetime at which the zone was created. + + :rtype: list of strings, or ``NoneType``. + :returns: the assigned name servers (None until set from the server). + """ + return self._properties.get('nameServers') + + @property + def zone_id(self): + """ID for the zone resource. + + :rtype: string, or ``NoneType`` + :returns: the ID (None until set from the server). + """ + return self._properties.get('id') + + @property + def description(self): + """Description of the zone. + + :rtype: string, or ``NoneType`` + :returns: The description as set by the user, or None (the default). + """ + return self._properties.get('description') + + @description.setter + def description(self, value): + """Update description of the zone. + + :type value: string, or ``NoneType`` + :param value: new description + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['description'] = value + + @property + def name_server_set(self): + """Named set of DNS name servers that all host the same ManagedZones. + + Most users will leave this blank. + + See: + https://cloud.google.com/dns/api/v1/managedZones#nameServerSet + + :rtype: string, or ``NoneType`` + :returns: The name as set by the user, or None (the default). + """ + return self._properties.get('nameServerSet') + + @name_server_set.setter + def name_server_set(self, value): + """Update named set of DNS name servers. + + :type value: string, or ``NoneType`` + :param value: new title + + :raises: ValueError for invalid value types. + """ + if not isinstance(value, six.string_types) and value is not None: + raise ValueError("Pass a string, or None") + self._properties['nameServerSet'] = value + + def resource_record_set(self, name, record_type, ttl, rrdatas): + """Construct a resource record set bound to this zone. + + :type name: string + :param name: Name of the record set. + + :type record_type: string + :param record_type: RR type + + :type ttl: integer + :param ttl: TTL for the RR, in seconds + + :type rrdatas: list of string + :param rrdatas: resource data for the RR + + :rtype: :class:`gcloud.dns.resource_record_set.ResourceRecordSet` + :returns: a new ``ResourceRecordSet`` instance + """ + return ResourceRecordSet(name, record_type, ttl, rrdatas, zone=self) + + def changes(self): + """Construct a change set bound to this zone. + + :rtype: :class:`gcloud.dns.changes.Changes` + :returns: a new ``Changes`` instance + """ + return Changes(zone=self) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + + :rtype: :class:`gcloud.dns.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def _set_properties(self, api_response): + """Update properties from resource in body of ``api_response`` + + :type api_response: httplib2.Response + :param api_response: response returned from an API call + """ + self._properties.clear() + cleaned = api_response.copy() + self.dns_name = cleaned.pop('dnsName', None) + if 'creationTime' in cleaned: + cleaned['creationTime'] = _rfc3339_to_datetime( + cleaned['creationTime']) + self._properties.update(cleaned) + + def _build_resource(self): + """Generate a resource for ``create`` or ``update``.""" + resource = { + 'name': self.name, + } + + if self.dns_name is not None: + resource['dnsName'] = self.dns_name + + if self.description is not None: + resource['description'] = self.description + + if self.name_server_set is not None: + resource['nameServerSet'] = self.name_server_set + + return resource + + def create(self, client=None): + """API call: create the zone via a PUT request + + See: + https://cloud.google.com/dns/api/v1/managedZones/create + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + path = '/projects/%s/managedZones' % (self.project,) + api_response = client.connection.api_request( + method='POST', path=path, data=self._build_resource()) + self._set_properties(api_response) + + def exists(self, client=None): + """API call: test for the existence of the zone via a GET request + + See + https://cloud.google.com/dns/api/v1/managedZones/get + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path, + query_params={'fields': 'id'}) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: refresh zone properties via a GET request + + See + https://cloud.google.com/dns/api/v1/managedZones/get + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + + api_response = client.connection.api_request( + method='GET', path=self.path) + self._set_properties(api_response) + + def delete(self, client=None): + """API call: delete the zone via a DELETE request + + See: + https://cloud.google.com/dns/api/v1/managedZones/delete + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) + + def list_resource_record_sets(self, max_results=None, page_token=None, + client=None): + """List resource record sets for this zone. + + See: + https://cloud.google.com/dns/api/v1/resourceRecordSets/list + + :type max_results: int + :param max_results: maximum number of zones to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of zones. If + not passed, the API will return the first page of + zones. + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + + :rtype: tuple, (list, str) + :returns: list of + :class:`gcloud.dns.resource_record_set.ResourceRecordSet`, + plus a "next page token" string: if the token is not None, + indicates that more zones can be retrieved with another + call (pass that value as ``page_token``). + """ + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/managedZones/%s/rrsets' % ( + self.project, self.name) + client = self._require_client(client) + conn = client.connection + resp = conn.api_request(method='GET', path=path, query_params=params) + zones = [ResourceRecordSet.from_api_repr(resource, self) + for resource in resp['rrsets']] + return zones, resp.get('nextPageToken') + + def list_changes(self, max_results=None, page_token=None, client=None): + """List change sets for this zone. + + See: + https://cloud.google.com/dns/api/v1/resourceRecordSets/list + + :type max_results: int + :param max_results: maximum number of zones to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of zones. If + not passed, the API will return the first page of + zones. + + :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current zone. + + :rtype: tuple, (list, str) + :returns: list of + :class:`gcloud.dns.resource_record_set.ResourceRecordSet`, + plus a "next page token" string: if the token is not None, + indicates that more zones can be retrieved with another + call (pass that value as ``page_token``). + """ + params = {} + + if max_results is not None: + params['maxResults'] = max_results + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/managedZones/%s/changes' % ( + self.project, self.name) + client = self._require_client(client) + conn = client.connection + resp = conn.api_request(method='GET', path=path, query_params=params) + zones = [Changes.from_api_repr(resource, self) + for resource in resp['changes']] + return zones, resp.get('nextPageToken') diff --git a/env/Lib/site-packages/gcloud/environment_vars.py b/env/Lib/site-packages/gcloud/environment_vars.py new file mode 100644 index 0000000..344ffd3 --- /dev/null +++ b/env/Lib/site-packages/gcloud/environment_vars.py @@ -0,0 +1,37 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Comprehensive list of environment variables used in gcloud. + +These enable many types of implicit behavior in both production +and tests. +""" + +PROJECT = 'GCLOUD_PROJECT' +"""Environment variable defining default project.""" + +TESTS_PROJECT = 'GCLOUD_TESTS_PROJECT_ID' +"""Environment variable defining project for tests.""" + +GCD_DATASET = 'DATASTORE_DATASET' +"""Environment variable defining default dataset ID under GCD.""" + +GCD_HOST = 'DATASTORE_HOST' +"""Environment variable defining host for GCD dataset server.""" + +PUBSUB_EMULATOR = 'PUBSUB_EMULATOR_HOST' +"""Environment variable defining host for Pub/Sub emulator.""" + +CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' +"""Environment variable defining location of Google credentials.""" diff --git a/env/Lib/site-packages/gcloud/exceptions.py b/env/Lib/site-packages/gcloud/exceptions.py new file mode 100644 index 0000000..fffdb1f --- /dev/null +++ b/env/Lib/site-packages/gcloud/exceptions.py @@ -0,0 +1,224 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Custom exceptions for :mod:`gcloud` package. + +See: https://cloud.google.com/storage/docs/json_api/v1/status-codes +""" + +import copy +import json +import six + +_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module + + +class GCloudError(Exception): + """Base error class for gcloud errors (abstract). + + Each subclass represents a single type of HTTP error response. + """ + code = None + """HTTP status code. Concrete subclasses *must* define. + + See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + """ + + def __init__(self, message, errors=()): + super(GCloudError, self).__init__() + # suppress deprecation warning under 2.6.x + self.message = message + self._errors = errors + + def __str__(self): + return '%d %s' % (self.code, self.message) + + @property + def errors(self): + """Detailed error information. + + :rtype: list(dict) + :returns: a list of mappings describing each error. + """ + return [copy.deepcopy(error) for error in self._errors] + + +class Redirection(GCloudError): + """Base for 3xx responses + + This class is abstract. + """ + + +class MovedPermanently(Redirection): + """Exception mapping a '301 Moved Permanently' response.""" + code = 301 + + +class NotModified(Redirection): + """Exception mapping a '304 Not Modified' response.""" + code = 304 + + +class TemporaryRedirect(Redirection): + """Exception mapping a '307 Temporary Redirect' response.""" + code = 307 + + +class ResumeIncomplete(Redirection): + """Exception mapping a '308 Resume Incomplete' response.""" + code = 308 + + +class ClientError(GCloudError): + """Base for 4xx responses + + This class is abstract + """ + + +class BadRequest(ClientError): + """Exception mapping a '400 Bad Request' response.""" + code = 400 + + +class Unauthorized(ClientError): + """Exception mapping a '401 Unauthorized' response.""" + code = 401 + + +class Forbidden(ClientError): + """Exception mapping a '403 Forbidden' response.""" + code = 403 + + +class NotFound(ClientError): + """Exception mapping a '404 Not Found' response.""" + code = 404 + + +class MethodNotAllowed(ClientError): + """Exception mapping a '405 Method Not Allowed' response.""" + code = 405 + + +class Conflict(ClientError): + """Exception mapping a '409 Conflict' response.""" + code = 409 + + +class LengthRequired(ClientError): + """Exception mapping a '411 Length Required' response.""" + code = 411 + + +class PreconditionFailed(ClientError): + """Exception mapping a '412 Precondition Failed' response.""" + code = 412 + + +class RequestRangeNotSatisfiable(ClientError): + """Exception mapping a '416 Request Range Not Satisfiable' response.""" + code = 416 + + +class TooManyRequests(ClientError): + """Exception mapping a '429 Too Many Requests' response.""" + code = 429 + + +class ServerError(GCloudError): + """Base for 5xx responses: (abstract)""" + + +class InternalServerError(ServerError): + """Exception mapping a '500 Internal Server Error' response.""" + code = 500 + + +class MethodNotImplemented(ServerError): + """Exception mapping a '501 Not Implemented' response.""" + code = 501 + + +class ServiceUnavailable(ServerError): + """Exception mapping a '503 Service Unavailable' response.""" + code = 503 + + +def make_exception(response, content, error_info=None, use_json=True): + """Factory: create exception based on HTTP response code. + + :type response: :class:`httplib2.Response` or other HTTP response object + :param response: A response object that defines a status code as the + status attribute. + + :type content: string or dictionary + :param content: The body of the HTTP error response. + + :type error_info: string + :param error_info: Optional string giving extra information about the + failed request. + + :type use_json: bool + :param use_json: Flag indicating if ``content`` is expected to be JSON. + + :rtype: instance of :class:`GCloudError`, or a concrete subclass. + :returns: Exception specific to the error response. + """ + if isinstance(content, six.binary_type): + content = content.decode('utf-8') + + if isinstance(content, six.string_types): + payload = None + if use_json: + try: + payload = json.loads(content) + except ValueError: + # Expected JSON but received something else. + pass + if payload is None: + payload = {'error': {'message': content}} + else: + payload = content + + message = payload.get('error', {}).get('message', '') + errors = payload.get('error', {}).get('errors', ()) + + if error_info is not None: + message += ' (%s)' % (error_info,) + + try: + klass = _HTTP_CODE_TO_EXCEPTION[response.status] + except KeyError: + error = GCloudError(message, errors) + error.code = response.status + else: + error = klass(message, errors) + return error + + +def _walk_subclasses(klass): + """Recursively walk subclass tree.""" + for sub in klass.__subclasses__(): + yield sub + for subsub in _walk_subclasses(sub): + yield subsub + + +# Build the code->exception class mapping. +for _eklass in _walk_subclasses(GCloudError): + code = getattr(_eklass, 'code', None) + if code is not None: + _HTTP_CODE_TO_EXCEPTION[code] = _eklass diff --git a/env/Lib/site-packages/gcloud/iterator.py b/env/Lib/site-packages/gcloud/iterator.py new file mode 100644 index 0000000..f62d285 --- /dev/null +++ b/env/Lib/site-packages/gcloud/iterator.py @@ -0,0 +1,187 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Iterators for paging through API responses. + +These iterators simplify the process of paging through API responses +where the response is a list of results with a ``nextPageToken``. + +To make an iterator work, just override the ``get_items_from_response`` +method so that given a response (containing a page of results) it parses +those results into an iterable of the actual objects you want:: + + class MyIterator(Iterator): + def get_items_from_response(self, response): + items = response.get('items', []) + for item in items: + my_item = MyItemClass(other_arg=True) + my_item._set_properties(item) + yield my_item + +You then can use this to get **all** the results from a resource:: + + >>> iterator = MyIterator(...) + >>> list(iterator) # Convert to a list (consumes all values). + +Or you can walk your way through items and call off the search early if +you find what you're looking for (resulting in possibly fewer +requests):: + + >>> for item in MyIterator(...): + >>> print item.name + >>> if not item.is_valid: + >>> break +""" + + +class Iterator(object): + """A generic class for iterating through Cloud JSON APIs list responses. + + :type client: :class:`gcloud.client.Client` + :param client: The client, which owns a connection to make requests. + + :type path: string + :param path: The path to query for the list of items. + + :type extra_params: dict or None + :param extra_params: Extra query string parameters for the API call. + """ + + PAGE_TOKEN = 'pageToken' + RESERVED_PARAMS = frozenset([PAGE_TOKEN]) + + def __init__(self, client, path, extra_params=None): + self.client = client + self.path = path + self.page_number = 0 + self.next_page_token = None + self.extra_params = extra_params or {} + reserved_in_use = self.RESERVED_PARAMS.intersection( + self.extra_params) + if reserved_in_use: + raise ValueError(('Using a reserved parameter', + reserved_in_use)) + + def __iter__(self): + """Iterate through the list of items.""" + while self.has_next_page(): + response = self.get_next_page_response() + for item in self.get_items_from_response(response): + yield item + + def has_next_page(self): + """Determines whether or not this iterator has more pages. + + :rtype: boolean + :returns: Whether the iterator has more pages or not. + """ + if self.page_number == 0: + return True + + return self.next_page_token is not None + + def get_query_params(self): + """Getter for query parameters for the next request. + + :rtype: dict + :returns: A dictionary of query parameters. + """ + result = ({self.PAGE_TOKEN: self.next_page_token} + if self.next_page_token else {}) + result.update(self.extra_params) + return result + + def get_next_page_response(self): + """Requests the next page from the path provided. + + :rtype: dict + :returns: The parsed JSON response of the next page's contents. + """ + if not self.has_next_page(): + raise RuntimeError('No more pages. Try resetting the iterator.') + + response = self.client.connection.api_request( + method='GET', path=self.path, query_params=self.get_query_params()) + + self.page_number += 1 + self.next_page_token = response.get('nextPageToken') + + return response + + def reset(self): + """Resets the iterator to the beginning.""" + self.page_number = 0 + self.next_page_token = None + + def get_items_from_response(self, response): + """Factory method called while iterating. This should be overriden. + + This method should be overridden by a subclass. It should + accept the API response of a request for the next page of items, + and return a list (or other iterable) of items. + + Typically this method will construct a Bucket or a Blob from the + page of results in the response. + + :type response: dict + :param response: The response of asking for the next page of items. + + :rtype: iterable + :returns: Items that the iterator should yield. + """ + raise NotImplementedError + + +class MethodIterator(object): + """Method-based iterator iterating through Cloud JSON APIs list responses. + + :type method: instance method + :param method: ``list_foo`` method of a domain object, taking as arguments + ``page_token``, ``page_size``, and optional additional + keyword arguments. + + :type page_token: string or ``NoneType`` + :param page_token: Initial page token to pass. if ``None``, fetch the + first page from the ``method`` API call. + + :type page_size: integer or ``NoneType`` + :param page_size: Maximum number of items to return from the ``method`` + API call; if ``None``, uses the default for the API. + + :type max_calls: integer or ``NoneType`` + :param max_calls: Maximum number of times to make the ``method`` + API call; if ``None``, applies no limit. + + :type kw: dict + :param kw: optional keyword argments to be passed to ``method``. + """ + def __init__(self, method, page_token=None, page_size=None, + max_calls=None, **kw): + self._method = method + self._token = page_token + self._page_size = page_size + self._kw = kw + self._max_calls = max_calls + self._page_num = 0 + + def __iter__(self): + while self._max_calls is None or self._page_num < self._max_calls: + items, new_token = self._method( + page_token=self._token, page_size=self._page_size, **self._kw) + for item in items: + yield item + if new_token is None: + return + self._page_num += 1 + self._token = new_token diff --git a/env/Lib/site-packages/gcloud/logging/__init__.py b/env/Lib/site-packages/gcloud/logging/__init__.py new file mode 100644 index 0000000..67b0386 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Logging API wrapper.""" + +from gcloud.logging.client import Client +from gcloud.logging.connection import Connection + + +SCOPE = Connection.SCOPE +ASCENDING = 'timestamp asc' +DESCENDING = 'timestamp desc' diff --git a/env/Lib/site-packages/gcloud/logging/_gax.py b/env/Lib/site-packages/gcloud/logging/_gax.py new file mode 100644 index 0000000..52ec001 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/_gax.py @@ -0,0 +1,575 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GAX wrapper for Logging API requests.""" + +import json + +# pylint: disable=import-error +from google.gax import CallOptions +from google.gax import INITIAL_PAGE +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code +from google.logging.type.log_severity_pb2 import LogSeverity +from google.logging.v2.logging_config_pb2 import LogSink +from google.logging.v2.logging_metrics_pb2 import LogMetric +from google.logging.v2.log_entry_pb2 import LogEntry +from google.protobuf.json_format import Parse +from grpc.beta.interfaces import StatusCode +# pylint: enable=import-error + +from gcloud.exceptions import Conflict +from gcloud.exceptions import NotFound +from gcloud._helpers import _datetime_to_pb_timestamp +from gcloud._helpers import _datetime_to_rfc3339 +from gcloud._helpers import _pb_timestamp_to_datetime + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + :type gax_api: + :class:`google.logging.v2.logging_service_v2_api.LoggingServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_entries(self, projects, filter_='', order_by='', + page_size=0, page_token=None): + """Return a page of log entry resources. + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the API's client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more entries can be retrieved + with another call (pass that value as ``page_token``). + """ + options = _build_paging_options(page_token) + page_iter = self._gax_api.list_log_entries( + projects, filter_, order_by, page_size, options) + entries = [_log_entry_pb_to_mapping(entry_pb) + for entry_pb in page_iter.next()] + token = page_iter.page_token or None + return entries, token + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + """API call: log an entry resource via a POST request + + :type entries: sequence of mapping + :param entries: the log entry resources to log. + + :type logger_name: string + :param logger_name: name of default logger to which to log the entries; + individual entries may override. + + :type resource: mapping + :param resource: default resource to associate with entries; + individual entries may override. + + :type labels: mapping + :param labels: default labels to associate with entries; + individual entries may override. + """ + options = None + partial_success = False + entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + self._gax_api.write_log_entries(entry_pbs, logger_name, resource, + labels, partial_success, options) + + def logger_delete(self, project, logger_name): + """API call: delete all entries in a logger via a DELETE request + + :type project: string + :param project: ID of project containing the log entries to delete + + :type logger_name: string + :param logger_name: name of logger containing the log entries to delete + """ + options = None + path = 'projects/%s/logs/%s' % (project, logger_name) + self._gax_api.delete_log(path, options) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + :type gax_api: + :class:`google.logging.v2.config_service_v2_api.ConfigServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_sinks(self, project, page_size=0, page_token=None): + """List sinks for the project associated with this client. + + :type project: string + :param project: ID of the project whose sinks are to be listed. + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more sinks can be retrieved + with another call (pass that value as ``page_token``). + """ + options = _build_paging_options(page_token) + page_iter = self._gax_api.list_sinks(project, page_size, options) + sinks = [_log_sink_pb_to_mapping(log_sink_pb) + for log_sink_pb in page_iter.next()] + token = page_iter.page_token or None + return sinks, token + + def sink_create(self, project, sink_name, filter_, destination): + """API call: create a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type project: string + :param project: ID of the project in which to create the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + options = None + parent = 'projects/%s' % (project,) + sink_pb = LogSink(name=sink_name, filter=filter_, + destination=destination) + try: + self._gax_api.create_sink(parent, sink_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + path = 'projects/%s/sinks/%s' % (project, sink_name) + raise Conflict(path) + raise + + def sink_get(self, project, sink_name): + """API call: retrieve a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + try: + sink_pb = self._gax_api.get_sink(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_sink_pb_to_mapping(sink_pb) + + def sink_update(self, project, sink_name, filter_, destination): + """API call: update a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + sink_pb = LogSink(name=path, filter=filter_, destination=destination) + try: + self._gax_api.update_sink(path, sink_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_sink_pb_to_mapping(sink_pb) + + def sink_delete(self, project, sink_name): + """API call: delete a sink resource. + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + options = None + path = 'projects/%s/sinks/%s' % (project, sink_name) + try: + self._gax_api.delete_sink(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. + + :type gax_api: + :class:`google.logging.v2.metrics_service_v2_api.MetricsServiceV2Api` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_metrics(self, project, page_size=0, page_token=None): + """List metrics for the project associated with this client. + + :type project: string + :param project: ID of the project whose metrics are to be listed. + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more metrics can be retrieved + with another call (pass that value as ``page_token``). + """ + options = _build_paging_options(page_token) + page_iter = self._gax_api.list_log_metrics(project, page_size, options) + metrics = [_log_metric_pb_to_mapping(log_metric_pb) + for log_metric_pb in page_iter.next()] + token = page_iter.page_token or None + return metrics, token + + def metric_create(self, project, metric_name, filter_, description): + """API call: create a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type project: string + :param project: ID of the project in which to create the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + options = None + parent = 'projects/%s' % (project,) + metric_pb = LogMetric(name=metric_name, filter=filter_, + description=description) + try: + self._gax_api.create_log_metric(parent, metric_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + path = 'projects/%s/metrics/%s' % (project, metric_name) + raise Conflict(path) + raise + + def metric_get(self, project, metric_name): + """API call: retrieve a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + try: + metric_pb = self._gax_api.get_log_metric(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_metric_pb_to_mapping(metric_pb) + + def metric_update(self, project, metric_name, filter_, description): + """API call: update a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + metric_pb = LogMetric(name=path, filter=filter_, + description=description) + try: + self._gax_api.update_log_metric(path, metric_pb, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + return _log_metric_pb_to_mapping(metric_pb) + + def metric_delete(self, project, metric_name): + """API call: delete a metric resource. + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + options = None + path = 'projects/%s/metrics/%s' % (project, metric_name) + try: + self._gax_api.delete_log_metric(path, options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(path) + raise + + +def _build_paging_options(page_token=None): + """Helper for :meth:'_PublisherAPI.list_topics' et aliae.""" + if page_token is None: + page_token = INITIAL_PAGE + options = {'page_token': page_token} + return CallOptions(**options) + + +def _mon_resource_pb_to_mapping(resource_pb): + """Helper for :func:_log_entry_pb_to_mapping""" + mapping = { + 'type': resource_pb.type, + } + if resource_pb.labels: + mapping['labels'] = resource_pb.labels + return mapping + + +def _pb_timestamp_to_rfc3339(timestamp_pb): + """Helper for :func:_log_entry_pb_to_mapping""" + timestamp = _pb_timestamp_to_datetime(timestamp_pb) + return _datetime_to_rfc3339(timestamp) + + +def _log_entry_pb_to_mapping(entry_pb): + """Helper for :meth:`list_entries`, et aliae + + Ideally, would use a function from :mod:`protobuf.json_format`, but + the right one isn't public. See: + https://github.com/google/protobuf/issues/1351 + """ + mapping = { + 'logName': entry_pb.log_name, + 'resource': _mon_resource_pb_to_mapping(entry_pb.resource), + 'severity': entry_pb.severity, + 'insertId': entry_pb.insert_id, + 'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp), + 'labels': entry_pb.labels, + 'textPayload': entry_pb.text_payload, + 'jsonPayload': entry_pb.json_payload, + 'protoPayload': entry_pb.proto_payload, + } + + if entry_pb.http_request: + request = entry_pb.http_request + mapping['httpRequest'] = { + 'request_method': request.request_method, + 'request_url': request.request_url, + 'status': request.status, + 'referer': request.referer, + 'user_agent': request.user_agent, + 'cache_hit': request.cache_hit, + 'request_size': request.request_size, + 'response_size': request.response_size, + 'remote_ip': request.remote_ip, + } + + if entry_pb.operation: + operation = entry_pb.operation + mapping['operation'] = { + 'producer': operation.producer, + 'id': operation.id, + 'first': operation.first, + 'last': operation.last, + } + + return mapping + + +def _http_request_mapping_to_pb(info, request): + """Helper for _log_entry_mapping_to_pb""" + optional_request_keys = { + 'requestMethod': 'request_method', + 'requestUrl': 'request_url', + 'status': 'status', + 'referer': 'referer', + 'userAgent': 'user_agent', + 'cacheHit': 'cache_hit', + 'requestSize': 'request_size', + 'responseSize': 'response_size', + 'remoteIp': 'remote_ip', + } + for key, pb_name in optional_request_keys.items(): + if key in info: + setattr(request, pb_name, info[key]) + + +def _log_operation_mapping_to_pb(info, operation): + """Helper for _log_entry_mapping_to_pb""" + operation.producer = info['producer'] + operation.id = info['id'] + + if 'first' in info: + operation.first = info['first'] + + if 'last' in info: + operation.last = info['last'] + + +def _log_entry_mapping_to_pb(mapping): + """Helper for :meth:`write_entries`, et aliae + + Ideally, would use a function from :mod:`protobuf.json_format`, but + the right one isn't public. See: + https://github.com/google/protobuf/issues/1351 + """ + # pylint: disable=too-many-branches + entry_pb = LogEntry() + + optional_scalar_keys = { + 'logName': 'log_name', + 'insertId': 'insert_id', + 'textPayload': 'text_payload', + } + + for key, pb_name in optional_scalar_keys.items(): + if key in mapping: + setattr(entry_pb, pb_name, mapping[key]) + + if 'resource' in mapping: + entry_pb.resource.type = mapping['resource']['type'] + + if 'severity' in mapping: + severity = mapping['severity'] + if isinstance(severity, str): + severity = LogSeverity.Value(severity) + entry_pb.severity = severity + + if 'timestamp' in mapping: + timestamp = _datetime_to_pb_timestamp(mapping['timestamp']) + entry_pb.timestamp.CopyFrom(timestamp) + + if 'labels' in mapping: + for key, value in mapping['labels'].items(): + entry_pb.labels[key] = value + + if 'jsonPayload' in mapping: + for key, value in mapping['jsonPayload'].items(): + entry_pb.json_payload[key] = value + + if 'protoPayload' in mapping: + Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload) + + if 'httpRequest' in mapping: + _http_request_mapping_to_pb( + mapping['httpRequest'], entry_pb.http_request) + + if 'operation' in mapping: + _log_operation_mapping_to_pb( + mapping['operation'], entry_pb.operation) + + return entry_pb + # pylint: enable=too-many-branches + + +def _log_sink_pb_to_mapping(sink_pb): + """Helper for :meth:`list_sinks`, et aliae + + Ideally, would use a function from :mod:`protobuf.json_format`, but + the right one isn't public. See: + https://github.com/google/protobuf/issues/1351 + """ + return { + 'name': sink_pb.name, + 'destination': sink_pb.destination, + 'filter': sink_pb.filter, + } + + +def _log_metric_pb_to_mapping(metric_pb): + """Helper for :meth:`list_metrics`, et aliae + + Ideally, would use a function from :mod:`protobuf.json_format`, but + the right one isn't public. See: + https://github.com/google/protobuf/issues/1351 + """ + return { + 'name': metric_pb.name, + 'description': metric_pb.description, + 'filter': metric_pb.filter, + } diff --git a/env/Lib/site-packages/gcloud/logging/client.py b/env/Lib/site-packages/gcloud/logging/client.py new file mode 100644 index 0000000..8b4aae0 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/client.py @@ -0,0 +1,300 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Logging API.""" + +import os + +try: + from google.logging.v2.config_service_v2_api import ( + ConfigServiceV2Api as GeneratedSinksAPI) + from google.logging.v2.logging_service_v2_api import ( + LoggingServiceV2Api as GeneratedLoggingAPI) + from google.logging.v2.metrics_service_v2_api import ( + MetricsServiceV2Api as GeneratedMetricsAPI) + from gcloud.logging._gax import _LoggingAPI as GAXLoggingAPI + from gcloud.logging._gax import _MetricsAPI as GAXMetricsAPI + from gcloud.logging._gax import _SinksAPI as GAXSinksAPI +except ImportError: # pragma: NO COVER + _HAVE_GAX = False + GeneratedLoggingAPI = GAXLoggingAPI = None + GeneratedMetricsAPI = GAXMetricsAPI = None + GeneratedSinksAPI = GAXSinksAPI = None +else: + _HAVE_GAX = True + +from gcloud.client import JSONClient +from gcloud.logging.connection import Connection +from gcloud.logging.connection import _LoggingAPI as JSONLoggingAPI +from gcloud.logging.connection import _MetricsAPI as JSONMetricsAPI +from gcloud.logging.connection import _SinksAPI as JSONSinksAPI +from gcloud.logging.entries import ProtobufEntry +from gcloud.logging.entries import StructEntry +from gcloud.logging.entries import TextEntry +from gcloud.logging.logger import Logger +from gcloud.logging.metric import Metric +from gcloud.logging.sink import Sink + + +_USE_GAX = _HAVE_GAX and (os.environ.get('GCLOUD_ENABLE_GAX') is not None) + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: str + :param project: the project which the client acts on behalf of. + If not passed, falls back to the default inferred + from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + _logging_api = _sinks_api = _metrics_api = None + + @property + def logging_api(self): + """Helper for logging-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + """ + if self._logging_api is None: + if _USE_GAX: + generated = GeneratedLoggingAPI() + self._logging_api = GAXLoggingAPI(generated) + else: + self._logging_api = JSONLoggingAPI(self.connection) + return self._logging_api + + @property + def sinks_api(self): + """Helper for log sink-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + """ + if self._sinks_api is None: + if _USE_GAX: + generated = GeneratedSinksAPI() + self._sinks_api = GAXSinksAPI(generated) + else: + self._sinks_api = JSONSinksAPI(self.connection) + return self._sinks_api + + @property + def metrics_api(self): + """Helper for log metric-related API calls. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + """ + if self._metrics_api is None: + if _USE_GAX: + generated = GeneratedMetricsAPI() + self._metrics_api = GAXMetricsAPI(generated) + else: + self._metrics_api = JSONMetricsAPI(self.connection) + return self._metrics_api + + def logger(self, name): + """Creates a logger bound to the current client. + + :type name: str + :param name: the name of the logger to be constructed. + + :rtype: :class:`gcloud.logging.logger.Logger` + :returns: Logger created with the current client. + """ + return Logger(name, client=self) + + def _entry_from_resource(self, resource, loggers): + """Detect correct entry type from resource and instantiate. + + :type resource: dict + :param resource: one entry resource from API response + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: One of: + :class:`gcloud.logging.entries.TextEntry`, + :class:`gcloud.logging.entries.StructEntry`, + :class:`gcloud.logging.entries.ProtobufEntry` + :returns: the entry instance, constructed via the resource + """ + if 'textPayload' in resource: + return TextEntry.from_api_repr(resource, self, loggers) + elif 'jsonPayload' in resource: + return StructEntry.from_api_repr(resource, self, loggers) + elif 'protoPayload' in resource: + return ProtobufEntry.from_api_repr(resource, self, loggers) + raise ValueError('Cannot parse log entry resource') + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + if projects is None: + projects = [self.project] + + resources, token = self.logging_api.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) + loggers = {} + entries = [self._entry_from_resource(resource, loggers) + for resource in resources] + return entries, token + + def sink(self, name, filter_=None, destination=None): + """Creates a sink bound to the current client. + + :type name: str + :param name: the name of the sink to be constructed. + + :type filter_: str + :param filter_: (optional) the advanced logs filter expression + defining the entries exported by the sink. If not + passed, the instance should already exist, to be + refreshed via :meth:`Sink.reload`. + + :type destination: str + :param destination: destination URI for the entries exported by + the sink. If not passed, the instance should + already exist, to be refreshed via + :meth:`Sink.reload`. + + :rtype: :class:`gcloud.logging.sink.Sink` + :returns: Sink created with the current client. + """ + return Sink(name, filter_, destination, client=self) + + def list_sinks(self, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.sink.Sink`, plus a + "next page token" string: if not None, indicates that + more sinks can be retrieved with another call (pass that + value as ``page_token``). + """ + resources, token = self.sinks_api.list_sinks( + self.project, page_size, page_token) + sinks = [Sink.from_api_repr(resource, self) + for resource in resources] + return sinks, token + + def metric(self, name, filter_=None, description=''): + """Creates a metric bound to the current client. + + :type name: str + :param name: the name of the metric to be constructed. + + :type filter_: str + :param filter_: the advanced logs filter expression defining the + entries tracked by the metric. If not + passed, the instance should already exist, to be + refreshed via :meth:`Metric.reload`. + + :type description: str + :param description: the description of the metric to be constructed. + If not passed, the instance should already exist, + to be refreshed via :meth:`Metric.reload`. + + :rtype: :class:`gcloud.logging.metric.Metric` + :returns: Metric created with the current client. + """ + return Metric(name, filter_, client=self, description=description) + + def list_metrics(self, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.metric.Metric`, plus a + "next page token" string: if not None, indicates that + more metrics can be retrieved with another call (pass that + value as ``page_token``). + """ + resources, token = self.metrics_api.list_metrics( + self.project, page_size, page_token) + metrics = [Metric.from_api_repr(resource, self) + for resource in resources] + return metrics, token diff --git a/env/Lib/site-packages/gcloud/logging/connection.py b/env/Lib/site-packages/gcloud/logging/connection.py new file mode 100644 index 0000000..83e1ead --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/connection.py @@ -0,0 +1,435 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud logging connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud Logging via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: (Optional) HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to the value + :attr:`Connection.API_BASE_URL`. + """ + + API_BASE_URL = 'https://logging.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v2beta1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud Logging consumer.""" + + +class _LoggingAPI(object): + """Helper mapping logging-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_entries(self, projects, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entry resources. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: str + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: str + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more entries can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {'projectIds': projects} + + if filter_ is not None: + params['filter'] = filter_ + + if order_by is not None: + params['orderBy'] = order_by + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + resp = self._connection.api_request( + method='POST', path='/entries:list', data=params) + + return resp.get('entries', ()), resp.get('nextPageToken') + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + """API call: log an entry resource via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type entries: sequence of mapping + :param entries: the log entry resources to log. + + :type logger_name: string + :param logger_name: name of default logger to which to log the entries; + individual entries may override. + + :type resource: mapping + :param resource: default resource to associate with entries; + individual entries may override. + + :type labels: mapping + :param labels: default labels to associate with entries; + individual entries may override. + """ + data = {'entries': list(entries)} + + if logger_name is not None: + data['logName'] = logger_name + + if resource is not None: + data['resource'] = resource + + if labels is not None: + data['labels'] = labels + + self._connection.api_request(method='POST', path='/entries:write', + data=data) + + def logger_delete(self, project, logger_name): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type project: string + :param project: ID of project containing the log entries to delete + + :type logger_name: string + :param logger_name: name of logger containing the log entries to delete + """ + path = '/projects/%s/logs/%s' % (project, logger_name) + self._connection.api_request(method='DELETE', path=path) + + +class _SinksAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_sinks(self, project, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type project: string + :param project: ID of the project whose sinks are to be listed. + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more sinks can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/sinks' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + sinks = resp.get('sinks', ()) + return sinks, resp.get('nextPageToken') + + def sink_create(self, project, sink_name, filter_, destination): + """API call: create a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type project: string + :param project: ID of the project in which to create the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks' % (project,) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='POST', path=target, data=data) + + def sink_get(self, project, sink_name): + """API call: retrieve a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + return self._connection.api_request(method='GET', path=target) + + def sink_update(self, project, sink_name, filter_, destination): + """API call: update a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + data = { + 'name': sink_name, + 'filter': filter_, + 'destination': destination, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def sink_delete(self, project, sink_name): + """API call: delete a sink resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type project: string + :param project: ID of the project containing the sink. + + :type sink_name: string + :param sink_name: the name of the sink + """ + target = '/projects/%s/sinks/%s' % (project, sink_name) + self._connection.api_request(method='DELETE', path=target) + + +class _MetricsAPI(object): + """Helper mapping sink-related APIs. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type connection: :class:`gcloud.logging.connection.Connection` + :param connection: the connection used to make API requests. + """ + def __init__(self, connection): + self._connection = connection + + def list_metrics(self, project, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type project: string + :param project: ID of the project whose metrics are to be listed. + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of mappings, plus a "next page token" string: + if not None, indicates that more metrics can be retrieved + with another call (pass that value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/metrics' % (project,) + resp = self._connection.api_request( + method='GET', path=path, query_params=params) + metrics = resp.get('metrics', ()) + return metrics, resp.get('nextPageToken') + + def metric_create(self, project, metric_name, filter_, description=None): + """API call: create a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type project: string + :param project: ID of the project in which to create the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics' % (project,) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='POST', path=target, data=data) + + def metric_get(self, project, metric_name): + """API call: retrieve a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + return self._connection.api_request(method='GET', path=target) + + def metric_update(self, project, metric_name, filter_, description): + """API call: update a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the metric. + + :type description: string + :param description: description of the metric. + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + data = { + 'name': metric_name, + 'filter': filter_, + 'description': description, + } + self._connection.api_request(method='PUT', path=target, data=data) + + def metric_delete(self, project, metric_name): + """API call: delete a metric resource. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type project: string + :param project: ID of the project containing the metric. + + :type metric_name: string + :param metric_name: the name of the metric + """ + target = '/projects/%s/metrics/%s' % (project, metric_name) + self._connection.api_request(method='DELETE', path=target) diff --git a/env/Lib/site-packages/gcloud/logging/entries.py b/env/Lib/site-packages/gcloud/logging/entries.py new file mode 100644 index 0000000..ed492b8 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/entries.py @@ -0,0 +1,157 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Log entries within the Google Cloud Logging API.""" + +import json +import re + +from google.protobuf.json_format import Parse + +from gcloud._helpers import _name_from_project_path +from gcloud._helpers import _rfc3339_nanos_to_datetime + + +_LOGGER_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /logs/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def logger_name_from_path(path): + """Validate a logger URI path and get the logger name. + + :type path: str + :param path: URI path for a logger API request. + + :rtype: str + :returns: Logger name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, None, _LOGGER_TEMPLATE) + + +class _BaseEntry(object): + """Base class for TextEntry, StructEntry. + + :type payload: text or dict + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: text, or :class:`NoneType` + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` + :param timestamp: (optional) timestamp for the entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): + self.payload = payload + self.logger = logger + self.insert_id = insert_id + self.timestamp = timestamp + self.labels = labels + self.severity = severity + self.http_request = http_request + + @classmethod + def from_api_repr(cls, resource, client, loggers=None): + """Factory: construct an entry given its API representation + + :type resource: dict + :param resource: text entry resource representation returned from + the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration. + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: :class:`gcloud.logging.entries.TextEntry` + :returns: Text entry parsed from ``resource``. + """ + if loggers is None: + loggers = {} + logger_fullname = resource['logName'] + logger = loggers.get(logger_fullname) + if logger is None: + logger_name = logger_name_from_path(logger_fullname) + logger = loggers[logger_fullname] = client.logger(logger_name) + payload = resource[cls._PAYLOAD_KEY] + insert_id = resource.get('insertId') + timestamp = resource.get('timestamp') + if timestamp is not None: + timestamp = _rfc3339_nanos_to_datetime(timestamp) + labels = resource.get('labels') + severity = resource.get('severity') + http_request = resource.get('httpRequest') + return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) + + +class TextEntry(_BaseEntry): + """Entry created with ``textPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'textPayload' + + +class StructEntry(_BaseEntry): + """Entry created with ``jsonPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'jsonPayload' + + +class ProtobufEntry(_BaseEntry): + """Entry created with ``protoPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'protoPayload' + + def parse_message(self, message): + """Parse payload into a protobuf message. + + Mutates the passed-in ``message`` in place. + + :type message: Protobuf message + :param message: the message to be logged + """ + Parse(json.dumps(self.payload), message) diff --git a/env/Lib/site-packages/gcloud/logging/logger.py b/env/Lib/site-packages/gcloud/logging/logger.py new file mode 100644 index 0000000..ad2d4b7 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/logger.py @@ -0,0 +1,443 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Loggers.""" + +import json + +from google.protobuf.json_format import MessageToJson + + +class Logger(object): + """Loggers represent named targets for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type name: string + :param name: the name of the logger + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the logger (which requires a project). + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of default labels for entries written + via this logger. + """ + def __init__(self, name, client, labels=None): + self.name = name + self._client = client + self.labels = labels + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in logging APIs""" + return 'projects/%s/logs/%s' % (self.project, self.name) + + @property + def path(self): + """URI path for use in logging APIs""" + return '/%s' % (self.full_name,) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def batch(self, client=None): + """Return a batch to use as a context manager. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`Batch` + :returns: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + + def _make_entry_resource(self, text=None, info=None, message=None, + labels=None, insert_id=None, severity=None, + http_request=None): + """Return a log entry resource of the appropriate type. + + Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. + + Only one of ``text``, ``info``, or ``message`` should be passed. + + :type text: string or :class:`NoneType` + :param text: text payload + + :type info: dict or :class:`NoneType` + :param info: struct payload + + :type message: Protobuf message or :class:`NoneType` + :param message: protobuf payload + + :type labels: dict or :class:`NoneType` + :param labels: labels passed in to calling method. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + resource = { + 'logName': self.full_name, + 'resource': {'type': 'global'}, + } + + if text is not None: + resource['textPayload'] = text + + if info is not None: + resource['jsonPayload'] = info + + if message is not None: + as_json_str = MessageToJson(message) + as_json = json.loads(as_json_str) + resource['protoPayload'] = as_json + + if labels is None: + labels = self.labels + + if labels is not None: + resource['labels'] = labels + + if insert_id is not None: + resource['insertId'] = insert_id + + if severity is not None: + resource['severity'] = severity + + if http_request is not None: + resource['httpRequest'] = http_request + + return resource + + def log_text(self, text, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a text message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type text: text + :param text: the log message. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + text=text, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def log_struct(self, info, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a structured message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type info: dict + :param info: the log entry information + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + info=info, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def log_proto(self, message, client=None, labels=None, insert_id=None, + severity=None, http_request=None): + """API call: log a protobuf message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type message: Protobuf message + :param message: the message to be logged + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry + """ + client = self._require_client(client) + entry_resource = self._make_entry_resource( + message=message, labels=labels, insert_id=insert_id, + severity=severity, http_request=http_request) + client.logging_api.write_entries([entry_resource]) + + def delete(self, client=None): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + client.logging_api.logger_delete(self.project, self.name) + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of entries to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more entries can be retrieved with another call (pass that + value as ``page_token``). + """ + log_filter = 'logName=%s' % (self.full_name,) + if filter_ is not None: + filter_ = '%s AND %s' % (filter_, log_filter) + else: + filter_ = log_filter + return self.client.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) + + +class Batch(object): + """Context manager: collect entries to log via a single API call. + + Helper returned by :meth:`Logger.batch` + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger to which entries will be logged. + + :type client: :class:`gcloud.logging.client.Client` + :param client: The client to use. + """ + def __init__(self, logger, client): + self.logger = logger + self.entries = [] + self.client = client + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def log_text(self, text, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a text entry to be logged during :meth:`commit`. + + :type text: string + :param text: the text entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('text', text, labels, insert_id, severity, http_request)) + + def log_struct(self, info, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a struct entry to be logged during :meth:`commit`. + + :type info: dict + :param info: the struct entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('struct', info, labels, insert_id, severity, http_request)) + + def log_proto(self, message, labels=None, insert_id=None, severity=None, + http_request=None): + """Add a protobuf entry to be logged during :meth:`commit`. + + :type message: protobuf message + :param message: the protobuf entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. + """ + self.entries.append( + ('proto', message, labels, insert_id, severity, http_request)) + + def commit(self, client=None): + """Send saved log entries as a single API call. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if client is None: + client = self.client + + kwargs = { + 'logger_name': self.logger.path, + 'resource': {'type': 'global'}, + } + if self.logger.labels is not None: + kwargs['labels'] = self.logger.labels + + entries = [] + for entry_type, entry, labels, iid, severity, http_req in self.entries: + if entry_type == 'text': + info = {'textPayload': entry} + elif entry_type == 'struct': + info = {'jsonPayload': entry} + elif entry_type == 'proto': + as_json_str = MessageToJson(entry) + as_json = json.loads(as_json_str) + info = {'protoPayload': as_json} + else: + raise ValueError('Unknown entry type: %s' % (entry_type,)) + if labels is not None: + info['labels'] = labels + if iid is not None: + info['insertId'] = iid + if severity is not None: + info['severity'] = severity + if http_req is not None: + info['httpRequest'] = http_req + entries.append(info) + + client.logging_api.write_entries(entries, **kwargs) + del self.entries[:] diff --git a/env/Lib/site-packages/gcloud/logging/metric.py b/env/Lib/site-packages/gcloud/logging/metric.py new file mode 100644 index 0000000..b22ced4 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/metric.py @@ -0,0 +1,174 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Metrics.""" + +from gcloud.exceptions import NotFound + + +class Metric(object): + """Metrics represent named filters for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type name: string + :param name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + tracked by the metric. If not passed, the instance should + already exist, to be refreshed via :meth:`reload`. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the metric (which requires a project). + + :type description: string + :param description: an optional description of the metric. + """ + def __init__(self, name, filter_=None, client=None, description=''): + self.name = name + self._client = client + self.filter_ = filter_ + self.description = description + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in metric APIs""" + return 'projects/%s/metrics/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the metric's APIs""" + return '/%s' % (self.full_name,) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a metric given its API representation + + :type resource: dict + :param resource: metric resource representation returned from the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the metric. + + :rtype: :class:`gcloud.logging.metric.Metric` + :returns: Metric parsed from ``resource``. + """ + metric_name = resource['name'] + filter_ = resource['filter'] + description = resource.get('description', '') + return cls(metric_name, filter_, client=client, + description=description) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the metric via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_create( + self.project, self.name, self.filter_, self.description) + + def exists(self, client=None): + """API call: test for the existence of the metric via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + + try: + client.metrics_api.metric_get(self.project, self.name) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local metric configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = client.metrics_api.metric_get(self.project, self.name) + self.description = data.get('description', '') + self.filter_ = data['filter'] + + def update(self, client=None): + """API call: update metric configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_update( + self.project, self.name, self.filter_, self.description) + + def delete(self, client=None): + """API call: delete a metric via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.metrics_api.metric_delete(self.project, self.name) diff --git a/env/Lib/site-packages/gcloud/logging/sink.py b/env/Lib/site-packages/gcloud/logging/sink.py new file mode 100644 index 0000000..b590967 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/sink.py @@ -0,0 +1,178 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Sinks.""" + +from gcloud.exceptions import NotFound + + +class Sink(object): + """Sinks represent filtered exports for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type name: string + :param name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + exported by the sink. If not passed, the instance should + already exist, to be refreshed via :meth:`reload`. + + :type destination: string + :param destination: destination URI for the entries exported by the sink. + If not passed, the instance should already exist, to + be refreshed via :meth:`reload`. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the sink (which requires a project). + """ + def __init__(self, name, filter_=None, destination=None, client=None): + self.name = name + self.filter_ = filter_ + self.destination = destination + self._client = client + + @property + def client(self): + """Clent bound to the sink.""" + return self._client + + @property + def project(self): + """Project bound to the sink.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in sink APIs""" + return 'projects/%s/sinks/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the sink's APIs""" + return '/%s' % (self.full_name) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a sink given its API representation + + :type resource: dict + :param resource: sink resource representation returned from the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration for the sink. + + :rtype: :class:`gcloud.logging.sink.Sink` + :returns: Sink parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + sink_name = resource['name'] + filter_ = resource['filter'] + destination = resource['destination'] + return cls(sink_name, filter_, destination, client=client) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the sink via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_create( + self.project, self.name, self.filter_, self.destination) + + def exists(self, client=None): + """API call: test for the existence of the sink via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + + try: + client.sinks_api.sink_get(self.project, self.name) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local sink configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = client.sinks_api.sink_get(self.project, self.name) + self.filter_ = data['filter'] + self.destination = data['destination'] + + def update(self, client=None): + """API call: update sink configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_update( + self.project, self.name, self.filter_, self.destination) + + def delete(self, client=None): + """API call: delete a sink via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.sinks_api.sink_delete(self.project, self.name) diff --git a/env/Lib/site-packages/gcloud/logging/test__gax.py b/env/Lib/site-packages/gcloud/logging/test__gax.py new file mode 100644 index 0000000..3174771 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test__gax.py @@ -0,0 +1,1012 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +try: + # pylint: disable=unused-import + import gcloud.pubsub._gax + # pylint: enable=unused-import +except ImportError: # pragma: NO COVER + _HAVE_GAX = False +else: + _HAVE_GAX = True + + +class _Base(object): + PROJECT = 'PROJECT' + PROJECT_PATH = 'projects/%s' % (PROJECT,) + FILTER = 'logName:syslog AND severity>=ERROR' + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + +@unittest2.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_LoggingAPI(_Base, unittest2.TestCase): + LOG_NAME = 'log_name' + + def _getTargetClass(self): + from gcloud.logging._gax import _LoggingAPI + return _LoggingAPI + + def test_ctor(self): + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + self.assertTrue(api._gax_api is gax_api) + + def test_list_entries_no_paging(self): + from google.gax import INITIAL_PAGE + from gcloud.logging import DESCENDING + from gcloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + TEXT = 'TEXT' + response = _GAXPageIterator( + [_LogEntryPB(self.LOG_NAME, text_payload=TEXT)], TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], self.FILTER, DESCENDING) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], {'type': 'global'}) + self.assertEqual(entry['textPayload'], TEXT) + self.assertEqual(next_token, TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, self.FILTER) + self.assertEqual(order_by, DESCENDING) + self.assertEqual(page_size, 0) + self.assertTrue(options.page_token is INITIAL_PAGE) + + def test_list_entries_with_paging(self): + from gcloud._testing import _GAXPageIterator + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} + response = _GAXPageIterator( + [_LogEntryPB(self.LOG_NAME, json_payload=PAYLOAD)], NEW_TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], {'type': 'global'}) + self.assertEqual(entry['jsonPayload'], PAYLOAD) + self.assertEqual(next_token, NEW_TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, '') + self.assertEqual(order_by, '') + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_list_entries_with_extra_properties(self): + from datetime import datetime + from gcloud._testing import _GAXPageIterator + from gcloud._helpers import UTC + from gcloud._helpers import _datetime_to_rfc3339 + from gcloud._helpers import _datetime_to_pb_timestamp + NOW = datetime.utcnow().replace(tzinfo=UTC) + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'sunny'} + SEVERITY = 'WARNING' + LABELS = { + 'foo': 'bar', + } + IID = 'IID' + request = _HTTPRequestPB() + operation = _LogEntryOperationPB() + EXTRAS = { + 'severity': SEVERITY, + 'labels': LABELS, + 'insert_id': IID, + 'http_request': request, + 'operation': operation, + } + ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS) + ENTRY.resource.labels['foo'] = 'bar' + ENTRY.timestamp = _datetime_to_pb_timestamp(NOW) + response = _GAXPageIterator([ENTRY], NEW_TOKEN) + gax_api = _GAXLoggingAPI(_list_log_entries_response=response) + api = self._makeOne(gax_api) + + entries, next_token = api.list_entries( + [self.PROJECT], page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, dict) + self.assertEqual(entry['logName'], self.LOG_NAME) + self.assertEqual(entry['resource'], + {'type': 'global', 'labels': {'foo': 'bar'}}) + self.assertEqual(entry['protoPayload'], PAYLOAD) + self.assertEqual(entry['severity'], SEVERITY) + self.assertEqual(entry['labels'], LABELS) + self.assertEqual(entry['insertId'], IID) + self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW)) + EXPECTED_REQUEST = { + 'request_method': request.request_method, + 'request_url': request.request_url, + 'status': request.status, + 'request_size': request.request_size, + 'response_size': request.response_size, + 'referer': request.referer, + 'user_agent': request.user_agent, + 'remote_ip': request.remote_ip, + 'cache_hit': request.cache_hit, + } + self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST) + EXPECTED_OPERATION = { + 'producer': operation.producer, + 'id': operation.id, + 'first': operation.first, + 'last': operation.last, + } + self.assertEqual(entry['operation'], EXPECTED_OPERATION) + self.assertEqual(next_token, NEW_TOKEN) + + projects, filter_, order_by, page_size, options = ( + gax_api._list_log_entries_called_with) + self.assertEqual(projects, [self.PROJECT]) + self.assertEqual(filter_, '') + self.assertEqual(order_by, '') + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_write_entries_single(self): + from google.logging.v2.log_entry_pb2 import LogEntry + TEXT = 'TEXT' + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + ENTRY = { + 'logName': LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': TEXT, + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries([ENTRY]) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), 1) + + entry = entries[0] + self.assertTrue(isinstance(entry, LogEntry)) + self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.resource.type, 'global') + self.assertEqual(entry.labels, {}) + self.assertEqual(entry.text_payload, TEXT) + + self.assertEqual(log_name, None) + self.assertEqual(resource, None) + self.assertEqual(labels, None) + self.assertEqual(partial_success, False) + self.assertEqual(options, None) + + def test_write_entries_w_extra_properties(self): + # pylint: disable=too-many-statements + from datetime import datetime + from google.logging.type.log_severity_pb2 import WARNING + from google.logging.v2.log_entry_pb2 import LogEntry + from gcloud._helpers import UTC, _pb_timestamp_to_datetime + NOW = datetime.utcnow().replace(tzinfo=UTC) + TEXT = 'TEXT' + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + SEVERITY = 'WARNING' + LABELS = { + 'foo': 'bar', + } + IID = 'IID' + REQUEST_METHOD = 'GET' + REQUEST_URL = 'http://example.com/requested' + STATUS = 200 + REQUEST_SIZE = 256 + RESPONSE_SIZE = 1024 + REFERRER_URL = 'http://example.com/referer' + USER_AGENT = 'Agent/1.0' + REMOTE_IP = '1.2.3.4' + REQUEST = { + 'requestMethod': REQUEST_METHOD, + 'requestUrl': REQUEST_URL, + 'status': STATUS, + 'requestSize': REQUEST_SIZE, + 'responseSize': RESPONSE_SIZE, + 'referer': REFERRER_URL, + 'userAgent': USER_AGENT, + 'remoteIp': REMOTE_IP, + 'cacheHit': False, + } + PRODUCER = 'PRODUCER' + OPID = 'OPID' + OPERATION = { + 'producer': PRODUCER, + 'id': OPID, + 'first': False, + 'last': True, + } + ENTRY = { + 'logName': LOG_PATH, + 'resource': {'type': 'global'}, + 'textPayload': TEXT, + 'severity': SEVERITY, + 'labels': LABELS, + 'insertId': IID, + 'timestamp': NOW, + 'httpRequest': REQUEST, + 'operation': OPERATION, + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries([ENTRY]) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), 1) + + entry = entries[0] + self.assertTrue(isinstance(entry, LogEntry)) + self.assertEqual(entry.log_name, LOG_PATH) + self.assertEqual(entry.resource.type, 'global') + self.assertEqual(entry.text_payload, TEXT) + self.assertEqual(entry.severity, WARNING) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.insert_id, IID) + stamp = _pb_timestamp_to_datetime(entry.timestamp) + self.assertEqual(stamp, NOW) + + request = entry.http_request + self.assertEqual(request.request_method, REQUEST_METHOD) + self.assertEqual(request.request_url, REQUEST_URL) + self.assertEqual(request.status, STATUS) + self.assertEqual(request.request_size, REQUEST_SIZE) + self.assertEqual(request.response_size, RESPONSE_SIZE) + self.assertEqual(request.referer, REFERRER_URL) + self.assertEqual(request.user_agent, USER_AGENT) + self.assertEqual(request.remote_ip, REMOTE_IP) + self.assertEqual(request.cache_hit, False) + + operation = entry.operation + self.assertEqual(operation.producer, PRODUCER) + self.assertEqual(operation.id, OPID) + self.assertFalse(operation.first) + self.assertTrue(operation.last) + + self.assertEqual(log_name, None) + self.assertEqual(resource, None) + self.assertEqual(labels, None) + self.assertEqual(partial_success, False) + self.assertEqual(options, None) + # pylint: enable=too-many-statements + + def test_write_entries_multiple(self): + # pylint: disable=too-many-statements + import datetime + from google.logging.type.log_severity_pb2 import WARNING + from google.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf.any_pb2 import Any + from google.protobuf.struct_pb2 import Struct + from gcloud._helpers import _datetime_to_rfc3339, UTC + TEXT = 'TEXT' + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp' + JSON = {'payload': 'PAYLOAD', 'type': 'json'} + PROTO = { + '@type': TIMESTAMP_TYPE_URL, + 'value': _datetime_to_rfc3339(NOW), + } + PRODUCER = 'PRODUCER' + OPID = 'OPID' + URL = 'http://example.com/' + ENTRIES = [ + {'textPayload': TEXT, + 'severity': WARNING}, + {'jsonPayload': JSON, + 'operation': {'producer': PRODUCER, 'id': OPID}}, + {'protoPayload': PROTO, + 'httpRequest': {'requestUrl': URL}}, + ] + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + RESOURCE = { + 'type': 'global', + } + LABELS = { + 'foo': 'bar', + } + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.write_entries(ENTRIES, LOG_PATH, RESOURCE, LABELS) + + entries, log_name, resource, labels, partial_success, options = ( + gax_api._write_log_entries_called_with) + self.assertEqual(len(entries), len(ENTRIES)) + + entry = entries[0] + self.assertTrue(isinstance(entry, LogEntry)) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + self.assertEqual(entry.text_payload, TEXT) + self.assertEqual(entry.severity, WARNING) + + entry = entries[1] + self.assertTrue(isinstance(entry, LogEntry)) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + json_struct = entry.json_payload + self.assertTrue(isinstance(json_struct, Struct)) + self.assertEqual(json_struct.fields['payload'].string_value, + JSON['payload']) + operation = entry.operation + self.assertEqual(operation.producer, PRODUCER) + self.assertEqual(operation.id, OPID) + + entry = entries[2] + self.assertTrue(isinstance(entry, LogEntry)) + self.assertEqual(entry.log_name, '') + self.assertEqual(entry.resource.type, '') + self.assertEqual(entry.labels, {}) + proto = entry.proto_payload + self.assertTrue(isinstance(proto, Any)) + self.assertEqual(proto.type_url, TIMESTAMP_TYPE_URL) + request = entry.http_request + self.assertEqual(request.request_url, URL) + + self.assertEqual(log_name, LOG_PATH) + self.assertEqual(resource, RESOURCE) + self.assertEqual(labels, LABELS) + self.assertEqual(partial_success, False) + self.assertEqual(options, None) + # pylint: enable=too-many-statements + + def test_logger_delete(self): + LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME) + gax_api = _GAXLoggingAPI() + api = self._makeOne(gax_api) + + api.logger_delete(self.PROJECT, self.LOG_NAME) + + log_name, options = gax_api._delete_log_called_with + self.assertEqual(log_name, LOG_PATH) + self.assertEqual(options, None) + + +@unittest2.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_SinksAPI(_Base, unittest2.TestCase): + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (_Base.PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging._gax import _SinksAPI + return _SinksAPI + + def test_ctor(self): + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + self.assertTrue(api._gax_api is gax_api) + + def test_list_sinks_no_paging(self): + from google.gax import INITIAL_PAGE + from gcloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + SINKS = [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }] + response = _GAXPageIterator( + [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], + TOKEN) + gax_api = _GAXSinksAPI(_list_sinks_response=response) + api = self._makeOne(gax_api) + + sinks, token = api.list_sinks(self.PROJECT) + + self.assertEqual(sinks, SINKS) + self.assertEqual(token, TOKEN) + + project, page_size, options = gax_api._list_sinks_called_with + self.assertEqual(project, self.PROJECT) + self.assertEqual(page_size, 0) + self.assertEqual(options.page_token, INITIAL_PAGE) + + def test_list_sinks_w_paging(self): + from gcloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SINKS = [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }] + response = _GAXPageIterator( + [_LogSinkPB(self.SINK_PATH, self.DESTINATION_URI, self.FILTER)], + None) + gax_api = _GAXSinksAPI(_list_sinks_response=response) + api = self._makeOne(gax_api) + + sinks, token = api.list_sinks( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(sinks, SINKS) + self.assertEqual(token, None) + + project, page_size, options = gax_api._list_sinks_called_with + self.assertEqual(project, self.PROJECT) + self.assertEqual(page_size, PAGE_SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_sink_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_create_conflict(self): + from gcloud.exceptions import Conflict + gax_api = _GAXSinksAPI(_create_sink_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_create_ok(self): + from google.logging.v2.logging_config_pb2 import LogSink + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + parent, sink, options = ( + gax_api._create_sink_called_with) + self.assertEqual(parent, self.PROJECT_PATH) + self.assertTrue(isinstance(sink, LogSink)) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(options, None) + + def test_sink_get_error(self): + from gcloud.exceptions import NotFound + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_get(self.PROJECT, self.SINK_NAME) + + def test_sink_get_miss(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_get(self.PROJECT, self.SINK_NAME) + + def test_sink_get_hit(self): + RESPONSE = { + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + sink_pb = _LogSinkPB( + self.SINK_PATH, self.DESTINATION_URI, self.FILTER) + gax_api = _GAXSinksAPI(_get_sink_response=sink_pb) + api = self._makeOne(gax_api) + + response = api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(response, RESPONSE) + + sink_name, options = gax_api._get_sink_called_with + self.assertEqual(sink_name, self.SINK_PATH) + self.assertEqual(options, None) + + def test_sink_update_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_update_miss(self): + from gcloud.exceptions import NotFound + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + def test_sink_update_hit(self): + from google.logging.v2.logging_config_pb2 import LogSink + response = _LogSinkPB( + self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + gax_api = _GAXSinksAPI(_update_sink_response=response) + api = self._makeOne(gax_api) + + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + sink_name, sink, options = ( + gax_api._update_sink_called_with) + self.assertEqual(sink_name, self.SINK_PATH) + self.assertTrue(isinstance(sink, LogSink)) + self.assertEqual(sink.name, self.SINK_PATH) + self.assertEqual(sink.filter, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(options, None) + + def test_sink_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSinksAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + def test_sink_delete_miss(self): + from gcloud.exceptions import NotFound + gax_api = _GAXSinksAPI(_sink_not_found=True) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + def test_sink_delete_hit(self): + gax_api = _GAXSinksAPI() + api = self._makeOne(gax_api) + + api.sink_delete(self.PROJECT, self.SINK_NAME) + + sink_name, options = gax_api._delete_sink_called_with + self.assertEqual(sink_name, self.SINK_PATH) + self.assertEqual(options, None) + + +@unittest2.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_MetricsAPI(_Base, unittest2.TestCase): + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (_Base.PROJECT, METRIC_NAME) + DESCRIPTION = 'Description' + + def _getTargetClass(self): + from gcloud.logging._gax import _MetricsAPI + return _MetricsAPI + + def test_ctor(self): + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + self.assertTrue(api._gax_api is gax_api) + + def test_list_metrics_no_paging(self): + from google.gax import INITIAL_PAGE + from gcloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + METRICS = [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + response = _GAXPageIterator( + [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], + TOKEN) + gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) + api = self._makeOne(gax_api) + + metrics, token = api.list_metrics(self.PROJECT) + + self.assertEqual(metrics, METRICS) + self.assertEqual(token, TOKEN) + + project, page_size, options = gax_api._list_log_metrics_called_with + self.assertEqual(project, self.PROJECT) + self.assertEqual(page_size, 0) + self.assertEqual(options.page_token, INITIAL_PAGE) + + def test_list_metrics_w_paging(self): + from gcloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + METRICS = [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + response = _GAXPageIterator( + [_LogMetricPB(self.METRIC_PATH, self.DESCRIPTION, self.FILTER)], + None) + gax_api = _GAXMetricsAPI(_list_log_metrics_response=response) + api = self._makeOne(gax_api) + + metrics, token = api.list_metrics( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(metrics, METRICS) + self.assertEqual(token, None) + + project, page_size, options = gax_api._list_log_metrics_called_with + self.assertEqual(project, self.PROJECT) + self.assertEqual(page_size, PAGE_SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_metric_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_create_conflict(self): + from gcloud.exceptions import Conflict + gax_api = _GAXMetricsAPI(_create_log_metric_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_create_ok(self): + from google.logging.v2.logging_metrics_pb2 import LogMetric + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + parent, metric, options = ( + gax_api._create_log_metric_called_with) + self.assertEqual(parent, self.PROJECT_PATH) + self.assertTrue(isinstance(metric, LogMetric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(options, None) + + def test_metric_get_error(self): + from gcloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + def test_metric_get_miss(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + def test_metric_get_hit(self): + RESPONSE = { + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + metric_pb = _LogMetricPB( + self.METRIC_PATH, self.DESCRIPTION, self.FILTER) + gax_api = _GAXMetricsAPI(_get_log_metric_response=metric_pb) + api = self._makeOne(gax_api) + + response = api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(response, RESPONSE) + + metric_name, options = gax_api._get_log_metric_called_with + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertEqual(options, None) + + def test_metric_update_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_update_miss(self): + from gcloud.exceptions import NotFound + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + def test_metric_update_hit(self): + from google.logging.v2.logging_metrics_pb2 import LogMetric + response = _LogMetricPB( + self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + gax_api = _GAXMetricsAPI(_update_log_metric_response=response) + api = self._makeOne(gax_api) + + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + metric_name, metric, options = ( + gax_api._update_log_metric_called_with) + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertTrue(isinstance(metric, LogMetric)) + self.assertEqual(metric.name, self.METRIC_PATH) + self.assertEqual(metric.filter, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(options, None) + + def test_metric_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXMetricsAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + def test_metric_delete_miss(self): + from gcloud.exceptions import NotFound + gax_api = _GAXMetricsAPI(_log_metric_not_found=True) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + def test_metric_delete_hit(self): + gax_api = _GAXMetricsAPI() + api = self._makeOne(gax_api) + + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + metric_name, options = gax_api._delete_log_metric_called_with + self.assertEqual(metric_name, self.METRIC_PATH) + self.assertEqual(options, None) + + +class _GAXBaseAPI(object): + + _random_gax_error = False + + def __init__(self, **kw): + self.__dict__.update(kw) + + def _make_grpc_error(self, status_code): + from grpc.framework.interfaces.face.face import AbortionError + + class _DummyException(AbortionError): + code = status_code + + def __init__(self): + pass + + return _DummyException() + + def _make_grpc_not_found(self): + from grpc.beta.interfaces import StatusCode + return self._make_grpc_error(StatusCode.NOT_FOUND) + + def _make_grpc_failed_precondition(self): + from grpc.beta.interfaces import StatusCode + return self._make_grpc_error(StatusCode.FAILED_PRECONDITION) + + +class _GAXLoggingAPI(_GAXBaseAPI): + + def list_log_entries( + self, projects, filter_, order_by, page_size, options): + self._list_log_entries_called_with = ( + projects, filter_, order_by, page_size, options) + return self._list_log_entries_response + + def write_log_entries(self, entries, log_name, resource, labels, + partial_success, options): + self._write_log_entries_called_with = ( + entries, log_name, resource, labels, partial_success, options) + + def delete_log(self, log_name, options): + self._delete_log_called_with = log_name, options + + +class _GAXSinksAPI(_GAXBaseAPI): + + _create_sink_conflict = False + _sink_not_found = False + + def list_sinks(self, parent, page_size, options): + self._list_sinks_called_with = parent, page_size, options + return self._list_sinks_response + + def create_sink(self, parent, sink, options): + from google.gax.errors import GaxError + self._create_sink_called_with = parent, sink, options + if self._random_gax_error: + raise GaxError('error') + if self._create_sink_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + + def get_sink(self, sink_name, options): + from google.gax.errors import GaxError + self._get_sink_called_with = sink_name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_sink_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def update_sink(self, sink_name, sink, options=None): + from google.gax.errors import GaxError + self._update_sink_called_with = sink_name, sink, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._update_sink_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def delete_sink(self, sink_name, options=None): + from google.gax.errors import GaxError + self._delete_sink_called_with = sink_name, options + if self._random_gax_error: + raise GaxError('error') + if self._sink_not_found: + raise GaxError('notfound', self._make_grpc_not_found()) + + +class _GAXMetricsAPI(_GAXBaseAPI): + + _create_log_metric_conflict = False + _log_metric_not_found = False + + def list_log_metrics(self, parent, page_size, options): + self._list_log_metrics_called_with = parent, page_size, options + return self._list_log_metrics_response + + def create_log_metric(self, parent, metric, options): + from google.gax.errors import GaxError + self._create_log_metric_called_with = parent, metric, options + if self._random_gax_error: + raise GaxError('error') + if self._create_log_metric_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + + def get_log_metric(self, metric_name, options): + from google.gax.errors import GaxError + self._get_log_metric_called_with = metric_name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_log_metric_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def update_log_metric(self, metric_name, metric, options=None): + from google.gax.errors import GaxError + self._update_log_metric_called_with = metric_name, metric, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._update_log_metric_response + except AttributeError: + raise GaxError('notfound', self._make_grpc_not_found()) + + def delete_log_metric(self, metric_name, options=None): + from google.gax.errors import GaxError + self._delete_log_metric_called_with = metric_name, options + if self._random_gax_error: + raise GaxError('error') + if self._log_metric_not_found: + raise GaxError('notfound', self._make_grpc_not_found()) + + +class _HTTPRequestPB(object): + + request_url = 'http://example.com/requested' + request_method = 'GET' + status = 200 + referer = 'http://example.com/referer' + user_agent = 'AGENT' + cache_hit = False + request_size = 256 + response_size = 1024 + remote_ip = '1.2.3.4' + + +class _LogEntryOperationPB(object): + + producer = 'PRODUCER' + first = last = False + id = 'OPID' + + +class _ResourcePB(object): + + def __init__(self, type_='global', **labels): + self.type = type_ + self.labels = labels + + +class _LogEntryPB(object): + + severity = 'DEFAULT' + http_request = operation = insert_id = None + text_payload = json_payload = proto_payload = None + + def __init__(self, log_name, **kw): + self.log_name = log_name + self.resource = _ResourcePB() + self.timestamp = self._make_timestamp() + self.labels = kw.pop('labels', {}) + self.__dict__.update(kw) + + @staticmethod + def _make_timestamp(): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud._helpers import _datetime_to_pb_timestamp + NOW = datetime.utcnow().replace(tzinfo=UTC) + return _datetime_to_pb_timestamp(NOW) + + +class _LogSinkPB(object): + + def __init__(self, name, destination, filter_): + self.name = name + self.destination = destination + self.filter = filter_ + + +class _LogMetricPB(object): + + def __init__(self, name, description, filter_): + self.name = name + self.description = description + self.filter = filter_ diff --git a/env/Lib/site-packages/gcloud/logging/test_client.py b/env/Lib/site-packages/gcloud/logging/test_client.py new file mode 100644 index 0000000..4d42a6b --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_client.py @@ -0,0 +1,495 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestClient(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + SINK_NAME = 'SINK_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + DESTINATION_URI = 'faux.googleapis.com/destination' + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + self.assertEqual(client.project, self.PROJECT) + + def test_logging_api_wo_gax(self): + from gcloud.logging.connection import _LoggingAPI + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.logging_api + + self.assertTrue(isinstance(api, _LoggingAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.logging_api + self.assertTrue(again is api) + + def test_logging_api_w_gax(self): + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxLoggingAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedLoggingAPI=_generated_api, + GAXLoggingAPI=_GaxLoggingAPI): + api = client.logging_api + + self.assertIsInstance(api, _GaxLoggingAPI) + self.assertTrue(api._wrapped is wrapped) + # API instance is cached + again = client.logging_api + self.assertTrue(again is api) + + def test_sinks_api_wo_gax(self): + from gcloud.logging.connection import _SinksAPI + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.sinks_api + + self.assertTrue(isinstance(api, _SinksAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.sinks_api + self.assertTrue(again is api) + + def test_sinks_api_w_gax(self): + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxSinksAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedSinksAPI=_generated_api, + GAXSinksAPI=_GaxSinksAPI): + api = client.sinks_api + + self.assertIsInstance(api, _GaxSinksAPI) + self.assertTrue(api._wrapped is wrapped) + # API instance is cached + again = client.sinks_api + self.assertTrue(again is api) + + def test_metrics_api_wo_gax(self): + from gcloud.logging.connection import _MetricsAPI + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.metrics_api + + self.assertTrue(isinstance(api, _MetricsAPI)) + self.assertTrue(api._connection is conn) + # API instance is cached + again = client.metrics_api + self.assertTrue(again is api) + + def test_metrics_api_w_gax(self): + from gcloud.logging import client as MUT + from gcloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxMetricsAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + GeneratedMetricsAPI=_generated_api, + GAXMetricsAPI=_GaxMetricsAPI): + api = client.metrics_api + + self.assertIsInstance(api, _GaxMetricsAPI) + self.assertTrue(api._wrapped is wrapped) + # API instance is cached + again = client.metrics_api + self.assertTrue(again is api) + + def test_logger(self): + from gcloud.logging.logger import Logger + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + logger = client.logger(self.LOGGER_NAME) + self.assertTrue(isinstance(logger, Logger)) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + def test__entry_from_resource_unknown_type(self): + PROJECT = 'PROJECT' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + loggers = {} + with self.assertRaises(ValueError): + client._entry_from_resource({'unknownPayload': {}}, loggers) + + def test_list_entries_defaults(self): + from gcloud.logging.entries import TextEntry + IID = 'IID' + TEXT = 'TEXT' + TOKEN = 'TOKEN' + ENTRIES = [{ + 'textPayload': TEXT, + 'insertId': IID, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }] + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = ENTRIES, TOKEN + + entries, token = client.list_entries() + + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertTrue(isinstance(entry, TextEntry)) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.payload, TEXT) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(token, TOKEN) + + self.assertEqual( + api._list_entries_called_with, + ([self.PROJECT], None, None, None, None)) + + def test_list_entries_explicit(self): + from gcloud.logging import DESCENDING + from gcloud.logging.entries import ProtobufEntry + from gcloud.logging.entries import StructEntry + from gcloud.logging.logger import Logger + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'logName:LOGNAME' + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + ENTRIES = [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }] + client = self._makeOne(self.PROJECT, credentials=_Credentials()) + api = client._logging_api = _DummyLoggingAPI() + api._list_entries_response = ENTRIES, None + + entries, token = client.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 2) + + entry = entries[0] + self.assertTrue(isinstance(entry, StructEntry)) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertTrue(isinstance(logger, Logger)) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertTrue(isinstance(entry, ProtobufEntry)) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertTrue(entries[0].logger is entries[1].logger) + + self.assertEqual(token, None) + self.assertEqual( + api._list_entries_called_with, + ([PROJECT1, PROJECT2], FILTER, DESCENDING, PAGE_SIZE, TOKEN)) + + def test_sink_defaults(self): + from gcloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME) + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, None) + self.assertEqual(sink.destination, None) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + + def test_sink_explicit(self): + from gcloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + + def test_list_sinks_no_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINKS = [{ + 'name': SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = SINKS, TOKEN + + sinks, token = client.list_sinks() + + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, None, None)) + + def test_list_sinks_with_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SINKS = [{ + 'name': SINK_NAME, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._sinks_api = _DummySinksAPI() + api._list_sinks_response = SINKS, None + + sinks, token = client.list_sinks(PAGE_SIZE, TOKEN) + + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(token, None) + self.assertEqual(api._list_sinks_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) + + def test_metric_defaults(self): + from gcloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME) + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, None) + self.assertEqual(metric.description, '') + self.assertTrue(metric.client is client_obj) + self.assertEqual(metric.project, self.PROJECT) + + def test_metric_explicit(self): + from gcloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME, self.FILTER, + description=self.DESCRIPTION) + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client_obj) + self.assertEqual(metric.project, self.PROJECT) + + def test_list_metrics_no_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + METRICS = [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = METRICS, TOKEN + + metrics, token = client.list_metrics() + + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(token, TOKEN) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, None, None)) + + def test_list_metrics_with_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + METRICS = [{ + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + }] + client = self._makeOne(project=PROJECT, credentials=_Credentials()) + api = client._metrics_api = _DummyMetricsAPI() + api._list_metrics_response = METRICS, None + + # Execute request. + metrics, token = client.list_metrics(PAGE_SIZE, TOKEN) + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(token, None) + self.assertEqual(api._list_metrics_called_with, + (PROJECT, PAGE_SIZE, TOKEN)) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _DummyLoggingAPI(object): + + def list_entries(self, projects, filter_, order_by, page_size, page_token): + self._list_entries_called_with = ( + projects, filter_, order_by, page_size, page_token) + return self._list_entries_response + + +class _DummySinksAPI(object): + + def list_sinks(self, project, page_size, page_token): + self._list_sinks_called_with = (project, page_size, page_token) + return self._list_sinks_response + + +class _DummyMetricsAPI(object): + + def list_metrics(self, project, page_size, page_token): + self._list_metrics_called_with = (project, page_size, page_token) + return self._list_metrics_response diff --git a/env/Lib/site-packages/gcloud/logging/test_connection.py b/env/Lib/site-packages/gcloud/logging/test_connection.py new file mode 100644 index 0000000..89cda74 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_connection.py @@ -0,0 +1,633 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + + def _getTargetClass(self): + from gcloud.logging.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_default_url(self): + creds = _Credentials() + conn = self._makeOne(creds) + klass = self._getTargetClass() + self.assertEqual(conn.credentials._scopes, klass.SCOPE) + + +class Test_LoggingAPI(unittest2.TestCase): + + PROJECT = 'project' + LIST_ENTRIES_PATH = 'entries:list' + WRITE_ENTRIES_PATH = 'entries:write' + LOGGER_NAME = 'LOGGER_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + + def _getTargetClass(self): + from gcloud.logging.connection import _LoggingAPI + return _LoggingAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertTrue(api._connection is connection) + + @staticmethod + def _make_timestamp(): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + NOW = datetime.utcnow().replace(tzinfo=UTC) + return _datetime_to_rfc3339_w_nanos(NOW) + + def test_list_entries_no_paging(self): + TIMESTAMP = self._make_timestamp() + IID = 'IID' + TEXT = 'TEXT' + SENT = { + 'projectIds': [self.PROJECT], + } + TOKEN = 'TOKEN' + RETURNED = { + 'entries': [{ + 'textPayload': TEXT, + 'insertId': IID, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries([self.PROJECT]) + + self.assertEqual(entries, RETURNED['entries']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_list_entries_w_paging(self): + from gcloud.logging import DESCENDING + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + TIMESTAMP = self._make_timestamp() + IID1 = 'IID1' + IID2 = 'IID2' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SENT = { + 'projectIds': [PROJECT1, PROJECT2], + 'filter': self.FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + } + RETURNED = { + 'entries': [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + entries, token = api.list_entries( + projects=[PROJECT1, PROJECT2], filter_=self.FILTER, + order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(entries, RETURNED['entries']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.LIST_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_single(self): + TEXT = 'TEXT' + ENTRY = { + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + } + SENT = { + 'entries': [ENTRY], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY]) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_write_entries_multiple(self): + TEXT = 'TEXT' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + RESOURCE = { + 'type': 'global', + } + LABELS = { + 'baz': 'qux', + 'spam': 'eggs', + } + ENTRY1 = { + 'textPayload': TEXT, + } + ENTRY2 = { + 'jsonPayload': {'foo': 'bar'}, + } + SENT = { + 'logName': LOG_NAME, + 'resource': RESOURCE, + 'labels': LABELS, + 'entries': [ENTRY1, ENTRY2], + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/%s' % self.WRITE_ENTRIES_PATH + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_logger_delete(self): + path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn = _Connection({}) + api = self._makeOne(conn) + + api.logger_delete(self.PROJECT, self.LOGGER_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + self.assertEqual(conn._called_with['path'], path) + + +class Test_SinksAPI(unittest2.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_SINKS_PATH = 'projects/%s/sinks' % (PROJECT,) + SINK_NAME = 'sink_name' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging.connection import _SinksAPI + return _SinksAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = object() + api = self._makeOne(connection) + self.assertTrue(api._connection is connection) + + def test_list_sinks_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks(self.PROJECT) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], {}) + + def test_list_sinks_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'sinks': [{ + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + sinks, token = api.list_sinks( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(sinks, RETURNED['sinks']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SINKS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_sink_create_conflict(self): + from gcloud.exceptions import Conflict + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_create_ok(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_create( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/sinks' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_get_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_get_hit(self): + RESPONSE = { + 'name': self.SINK_PATH, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.sink_get(self.PROJECT, self.SINK_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_update_miss(self): + from gcloud.exceptions import NotFound + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, + self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_update_hit(self): + SENT = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_update( + self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_sink_delete_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_sink_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.sink_delete(self.PROJECT, self.SINK_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + self.assertEqual(conn._called_with['path'], path) + + +class Test_MetricsAPI(unittest2.TestCase): + + PROJECT = 'project' + FILTER = 'logName:syslog AND severity>=ERROR' + LIST_METRICS_PATH = 'projects/%s/metrics' % (PROJECT,) + METRIC_NAME = 'metric_name' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.connection import _MetricsAPI + return _MetricsAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_list_metrics_no_paging(self): + TOKEN = 'TOKEN' + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + 'nextPageToken': TOKEN, + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics(self.PROJECT) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertEqual(token, TOKEN) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + + def test_list_metrics_w_paging(self): + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + RETURNED = { + 'metrics': [{ + 'name': self.METRIC_PATH, + 'filter': self.FILTER, + }], + } + conn = _Connection(RETURNED) + api = self._makeOne(conn) + + metrics, token = api.list_metrics( + self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(metrics, RETURNED['metrics']) + self.assertEqual(token, None) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/%s' % (self.LIST_METRICS_PATH,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) + + def test_metric_create_conflict(self): + from gcloud.exceptions import Conflict + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + conn._raise_conflict = True + api = self._makeOne(conn) + + with self.assertRaises(Conflict): + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_create_ok(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_create( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'POST') + path = '/projects/%s/metrics' % (self.PROJECT,) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_get_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_get_hit(self): + RESPONSE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection(RESPONSE) + api = self._makeOne(conn) + + response = api.metric_get(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(response, RESPONSE) + self.assertEqual(conn._called_with['method'], 'GET') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_update_miss(self): + from gcloud.exceptions import NotFound + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, + self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_update_hit(self): + SENT = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_update( + self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION) + + self.assertEqual(conn._called_with['method'], 'PUT') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + self.assertEqual(conn._called_with['data'], SENT) + + def test_metric_delete_miss(self): + from gcloud.exceptions import NotFound + conn = _Connection() + api = self._makeOne(conn) + + with self.assertRaises(NotFound): + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + def test_metric_delete_hit(self): + conn = _Connection({}) + api = self._makeOne(conn) + + api.metric_delete(self.PROJECT, self.METRIC_NAME) + + self.assertEqual(conn._called_with['method'], 'DELETE') + path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + self.assertEqual(conn._called_with['path'], path) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _Connection(object): + + _called_with = None + _raise_conflict = False + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + from gcloud.exceptions import Conflict + from gcloud.exceptions import NotFound + self._called_with = kw + if self._raise_conflict: + raise Conflict('oops') + try: + response, self._responses = self._responses[0], self._responses[1:] + except IndexError: + raise NotFound('miss') + return response diff --git a/env/Lib/site-packages/gcloud/logging/test_entries.py b/env/Lib/site-packages/gcloud/logging/test_entries.py new file mode 100644 index 0000000..cde7d1f --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_entries.py @@ -0,0 +1,235 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_logger_name_from_path(unittest2.TestCase): + + def _callFUT(self, path): + from gcloud.logging.entries import logger_name_from_path + return logger_name_from_path(path) + + def test_w_simple_name(self): + LOGGER_NAME = 'LOGGER_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH) + self.assertEqual(logger_name, LOGGER_NAME) + + def test_w_name_w_all_extras(self): + LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH) + self.assertEqual(logger_name, LOGGER_NAME) + + +class Test_BaseEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import _BaseEntry + + class _Dummy(_BaseEntry): + _PAYLOAD_KEY = 'dummyPayload' + + return _Dummy + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = 'PAYLOAD' + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.labels is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = 'PAYLOAD' + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + klass = self._getTargetClass() + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + SEVERITY = 'CRITICAL' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + 'severity': SEVERITY, + 'httpRequest': { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + }, + } + loggers = {} + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} + API_REPR = { + 'dummyPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + 'labels': LABELS, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) + self.assertTrue(entry.logger is LOGGER) + + +class TestProtobufEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import ProtobufEntry + return ProtobufEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_parse_message(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + LOGGER = object() + message = Struct(fields={'foo': Value(bool_value=False)}) + with_true = Struct(fields={'foo': Value(bool_value=True)}) + PAYLOAD = json.loads(MessageToJson(with_true)) + entry = self._makeOne(PAYLOAD, LOGGER) + entry.parse_message(message) + self.assertTrue(message.fields['foo']) + + +def _datetime_to_rfc3339_w_nanos(value): + from gcloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) + return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + + +class _Logger(object): + + def __init__(self, name, client): + self.name = name + self.client = client + + +class _Client(object): + + def __init__(self, project): + self.project = project + + def logger(self, name): + return _Logger(name, self) diff --git a/env/Lib/site-packages/gcloud/logging/test_logger.py b/env/Lib/site-packages/gcloud/logging/test_logger.py new file mode 100644 index 0000000..fd7938f --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_logger.py @@ -0,0 +1,704 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestLogger(unittest2.TestCase): + + PROJECT = 'test-project' + LOGGER_NAME = 'logger-name' + + def _getTargetClass(self): + from gcloud.logging.logger import Logger + return Logger + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, None) + + def test_ctor_explicit(self): + LABELS = {'foo': 'bar', 'baz': 'qux'} + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, LABELS) + + def test_batch_w_bound_client(self): + from gcloud.logging.logger import Batch + conn = object() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + batch = logger.batch() + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client) + + def test_batch_w_alternate_client(self): + from gcloud.logging.logger import Batch + conn1 = object() + conn2 = object() + client1 = _Client(self.PROJECT, conn1) + client2 = _Client(self.PROJECT, conn2) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + batch = logger.batch(client2) + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client2) + + def test_log_text_w_str_implicit_client(self): + TEXT = 'TEXT' + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_text(TEXT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_text_w_default_labels(self): + TEXT = 'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_text(TEXT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): + TEXT = u'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_text(TEXT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_implicit_client(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_default_labels(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_struct(STRUCT) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_struct_w_explicit_client_labels_severity_httpreq(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_struct(STRUCT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_implicit_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.log_proto(message) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_default_labels(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + + logger.log_proto(message) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_log_proto_w_explicit_client_labels_severity_httpreq(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + ENTRIES = [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, + }] + client1 = _Client(self.PROJECT) + client2 = _Client(self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + + logger.log_proto(message, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + + self.assertEqual(api._write_entries_called_with, + (ENTRIES, None, None, None)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client) + + logger.delete() + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = self._makeOne(self.LOGGER_NAME, client=client1) + + logger.delete(client=client2) + + self.assertEqual(api._logger_delete_called_with, + (self.PROJECT, self.LOGGER_NAME)) + + def test_list_entries_defaults(self): + LISTED = { + 'projects': None, + 'filter_': 'logName=projects/%s/logs/%s' % + (self.PROJECT, self.LOGGER_NAME), + 'order_by': None, + 'page_size': None, + 'page_token': None, + } + TOKEN = 'TOKEN' + client = _Client(self.PROJECT) + client._token = TOKEN + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries() + self.assertEqual(len(entries), 0) + self.assertEqual(token, TOKEN) + self.assertEqual(client._listed, LISTED) + + def test_list_entries_explicit(self): + from gcloud.logging import DESCENDING + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'resource.type:global' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + LISTED = { + 'projects': ['PROJECT1', 'PROJECT2'], + 'filter_': '%s AND logName=projects/%s/logs/%s' % + (FILTER, self.PROJECT, self.LOGGER_NAME), + 'order_by': DESCENDING, + 'page_size': PAGE_SIZE, + 'page_token': TOKEN, + } + client = _Client(self.PROJECT) + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 0) + self.assertEqual(token, None) + self.assertEqual(client._listed, LISTED) + + +class TestBatch(unittest2.TestCase): + + PROJECT = 'test-project' + + def _getTargetClass(self): + from gcloud.logging.logger import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_defaults(self): + logger = _Logger() + client = _Client(project=self.PROJECT) + batch = self._makeOne(logger, client) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client) + self.assertEqual(len(batch.entries), 0) + + def test_log_text_defaults(self): + TEXT = 'This is the entry text' + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_text(TEXT) + self.assertEqual(batch.entries, + [('text', TEXT, None, None, None, None)]) + + def test_log_text_explicit(self): + TEXT = 'This is the entry text' + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, + http_request=REQUEST) + self.assertEqual(batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_struct_defaults(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_struct(STRUCT) + self.assertEqual(batch.entries, + [('struct', STRUCT, None, None, None, None)]) + + def test_log_struct_explicit(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) + + def test_log_proto_defaults(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_proto(message) + self.assertEqual(batch.entries, + [('proto', message, None, None, None, None)]) + + def test_log_proto_explicit(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT, connection=object()) + logger = _Logger() + batch = self._makeOne(logger, client=client) + batch.log_proto(message, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) + self.assertEqual(batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) + + def test_commit_w_invalid_entry_type(self): + logger = _Logger() + client = _Client(project=self.PROJECT, connection=object()) + batch = self._makeOne(logger, client) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) + with self.assertRaises(ValueError): + batch.commit() + + def test_commit_w_bound_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + IID1 = 'IID1' + IID2 = 'IID2' + IID3 = 'IID3' + RESOURCE = { + 'type': 'global', + } + ENTRIES = [ + {'textPayload': TEXT, 'insertId': IID1}, + {'jsonPayload': STRUCT, 'insertId': IID2}, + {'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3}, + ] + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = _Logger() + batch = self._makeOne(logger, client=client) + + batch.log_text(TEXT, insert_id=IID1) + batch.log_struct(STRUCT, insert_id=IID2) + batch.log_proto(message, insert_id=IID3) + batch.commit() + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, None)) + + def test_commit_w_alternate_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = { + 'foo': 'bar', + 'baz': 'qux', + } + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) + RESOURCE = {'type': 'global'} + ENTRIES = [ + {'textPayload': TEXT, 'labels': LABELS}, + {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'protoPayload': json.loads(MessageToJson(message)), + 'httpRequest': REQUEST}, + ] + batch = self._makeOne(logger, client=client1) + + batch.log_text(TEXT, labels=LABELS) + batch.log_struct(STRUCT, severity=SEVERITY) + batch.log_proto(message, http_request=REQUEST) + batch.commit(client=client2) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS)) + + def test_context_mgr_success(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = Logger('logger_name', client, labels=DEFAULT_LABELS) + RESOURCE = { + 'type': 'global', + } + ENTRIES = [ + {'textPayload': TEXT, 'httpRequest': REQUEST}, + {'jsonPayload': STRUCT, 'labels': LABELS}, + {'protoPayload': json.loads(MessageToJson(message)), + 'severity': SEVERITY}, + ] + batch = self._makeOne(logger, client=client) + + with batch as other: + other.log_text(TEXT, http_request=REQUEST) + other.log_struct(STRUCT, labels=LABELS) + other.log_proto(message, severity=SEVERITY) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(api._write_entries_called_with, + (ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS)) + + def test_context_mgr_failure(self): + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + message = Struct(fields={'foo': Value(bool_value=True)}) + client = _Client(project=self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = _Logger() + UNSENT = [ + ('text', TEXT, None, IID, None, None), + ('struct', STRUCT, None, None, SEVERITY, None), + ('proto', message, LABELS, None, None, REQUEST), + ] + batch = self._makeOne(logger, client=client) + + try: + with batch as other: + other.log_text(TEXT, insert_id=IID) + other.log_struct(STRUCT, severity=SEVERITY) + other.log_proto(message, labels=LABELS, http_request=REQUEST) + raise _Bugout() + except _Bugout: + pass + + self.assertEqual(list(batch.entries), UNSENT) + self.assertEqual(api._write_entries_called_with, None) + + +class _Logger(object): + + labels = None + + def __init__(self, name="NAME", project="PROJECT"): + self.path = '/projects/%s/logs/%s' % (project, name) + + +class _DummyLoggingAPI(object): + + _write_entries_called_with = None + + def write_entries(self, entries, logger_name=None, resource=None, + labels=None): + self._write_entries_called_with = ( + entries, logger_name, resource, labels) + + def logger_delete(self, project, logger_name): + self._logger_delete_called_with = (project, logger_name) + + +class _Client(object): + + _listed = _token = None + _entries = () + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection + + def list_entries(self, **kw): + self._listed = kw + return self._entries, self._token + + +class _Bugout(Exception): + pass diff --git a/env/Lib/site-packages/gcloud/logging/test_metric.py b/env/Lib/site-packages/gcloud/logging/test_metric.py new file mode 100644 index 0000000..f4558d0 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_metric.py @@ -0,0 +1,251 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestMetric(unittest2.TestCase): + + PROJECT = 'test-project' + METRIC_NAME = 'metric-name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.metric import Metric + return Metric + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + client = _Client(self.PROJECT) + metric = self._makeOne(self.METRIC_NAME, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, None) + self.assertEqual(metric.description, '') + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + client = _Client(self.PROJECT) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, + client=client, description=self.DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertTrue(metric._client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_from_api_repr_w_description(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': DESCRIPTION, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertTrue(metric._client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_create_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.create() + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + + def test_create_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + + metric.create(client=client2) + + self.assertEqual( + api._metric_create_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + self.assertFalse(metric.exists()) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_exists_hit_w_alternate_client(self): + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + self.assertTrue(metric.exists(client=client2)) + + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_reload_w_bound_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': NEW_FILTER, + } + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client, + description=self.DESCRIPTION) + + metric.reload() + + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, '') + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_reload_w_alternate_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': self.DESCRIPTION, + 'filter': NEW_FILTER, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + api._metric_get_response = RESOURCE + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + metric.reload(client=client2) + + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertEqual(api._metric_get_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_update_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.update() + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, '')) + + def test_update_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + + metric.update(client=client2) + + self.assertEqual( + api._metric_update_called_with, + (self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + + metric.delete() + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.metrics_api = _DummyMetricsAPI() + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1) + + metric.delete(client=client2) + + self.assertEqual(api._metric_delete_called_with, + (self.PROJECT, self.METRIC_NAME)) + + +class _Client(object): + + def __init__(self, project): + self.project = project + + +class _DummyMetricsAPI(object): + + def metric_create(self, project, metric_name, filter_, description): + self._metric_create_called_with = ( + project, metric_name, filter_, description) + + def metric_get(self, project, metric_name): + from gcloud.exceptions import NotFound + self._metric_get_called_with = (project, metric_name) + try: + return self._metric_get_response + except AttributeError: + raise NotFound('miss') + + def metric_update(self, project, metric_name, filter_, description): + self._metric_update_called_with = ( + project, metric_name, filter_, description) + + def metric_delete(self, project, metric_name): + self._metric_delete_called_with = (project, metric_name) diff --git a/env/Lib/site-packages/gcloud/logging/test_sink.py b/env/Lib/site-packages/gcloud/logging/test_sink.py new file mode 100644 index 0000000..2a04561 --- /dev/null +++ b/env/Lib/site-packages/gcloud/logging/test_sink.py @@ -0,0 +1,262 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestSink(unittest2.TestCase): + + PROJECT = 'test-project' + SINK_NAME = 'sink-name' + FILTER = 'logName:syslog AND severity>=INFO' + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging.sink import Sink + return Sink + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + client = _Client(self.PROJECT) + sink = self._makeOne(self.SINK_NAME, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, None) + self.assertEqual(sink.destination, None) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + client = _Client(self.PROJECT) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_from_api_repr_minimal(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_w_description(self): + client = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_create_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.create() + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_create_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + api = client2.sinks_api = _DummySinksAPI() + + sink.create(client=client2) + + self.assertEqual( + api._sink_create_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + self.assertFalse(sink.exists()) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_exists_hit_w_alternate_client(self): + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + self.assertTrue(sink.exists(client=client2)) + + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_reload_w_bound_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.reload() + + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_reload_w_alternate_client(self): + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + api._sink_get_response = RESOURCE + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.reload(client=client2) + + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(api._sink_get_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_update_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.update() + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_update_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.update(client=client2) + + self.assertEqual( + api._sink_update_called_with, + (self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + + sink.delete() + + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.sinks_api = _DummySinksAPI() + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + + sink.delete(client=client2) + + self.assertEqual(api._sink_delete_called_with, + (self.PROJECT, self.SINK_NAME)) + + +class _Client(object): + + def __init__(self, project): + self.project = project + + +class _DummySinksAPI(object): + + def sink_create(self, project, sink_name, filter_, destination): + self._sink_create_called_with = ( + project, sink_name, filter_, destination) + + def sink_get(self, project, sink_name): + from gcloud.exceptions import NotFound + self._sink_get_called_with = (project, sink_name) + try: + return self._sink_get_response + except AttributeError: + raise NotFound('miss') + + def sink_update(self, project, sink_name, filter_, destination): + self._sink_update_called_with = ( + project, sink_name, filter_, destination) + + def sink_delete(self, project, sink_name): + self._sink_delete_called_with = (project, sink_name) diff --git a/env/Lib/site-packages/gcloud/monitoring/__init__.py b/env/Lib/site-packages/gcloud/monitoring/__init__.py new file mode 100644 index 0000000..31191d1 --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/__init__.py @@ -0,0 +1,45 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Monitoring API wrapper.""" + +from gcloud.monitoring.client import Client +from gcloud.monitoring.connection import Connection +from gcloud.monitoring.label import LabelDescriptor +from gcloud.monitoring.label import LabelValueType +from gcloud.monitoring.metric import Metric +from gcloud.monitoring.metric import MetricDescriptor +from gcloud.monitoring.metric import MetricKind +from gcloud.monitoring.metric import ValueType +from gcloud.monitoring.query import Aligner +from gcloud.monitoring.query import Query +from gcloud.monitoring.query import Reducer +from gcloud.monitoring.resource import Resource +from gcloud.monitoring.resource import ResourceDescriptor +from gcloud.monitoring.timeseries import Point +from gcloud.monitoring.timeseries import TimeSeries + +__all__ = ( + 'Client', + 'Connection', + 'LabelDescriptor', 'LabelValueType', + 'Metric', 'MetricDescriptor', 'MetricKind', 'ValueType', + 'Aligner', 'Query', 'Reducer', + 'Resource', 'ResourceDescriptor', + 'Point', 'TimeSeries', + 'SCOPE', +) + + +SCOPE = Connection.SCOPE diff --git a/env/Lib/site-packages/gcloud/monitoring/_dataframe.py b/env/Lib/site-packages/gcloud/monitoring/_dataframe.py new file mode 100644 index 0000000..1b5b194 --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/_dataframe.py @@ -0,0 +1,116 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Time series as :mod:`pandas` dataframes.""" + +import itertools + +TOP_RESOURCE_LABELS = ( + 'project_id', + 'aws_account', + 'location', + 'region', + 'zone', +) + + +def _build_dataframe(time_series_iterable, + label=None, labels=None): # pragma: NO COVER + """Build a :mod:`pandas` dataframe out of time series. + + :type time_series_iterable: + iterable over :class:`~gcloud.monitoring.timeseries.TimeSeries` + :param time_series_iterable: + An iterable (e.g., a query object) yielding time series. + + :type label: string or None + :param label: + The label name to use for the dataframe header. This can be the name + of a resource label or metric label (e.g., ``"instance_name"``), or + the string ``"resource_type"``. + + :type labels: list of strings, or None + :param labels: + A list or tuple of label names to use for the dataframe header. + If more than one label name is provided, the resulting dataframe + will have a multi-level column header. + + Specifying neither ``label`` or ``labels`` results in a dataframe + with a multi-level column header including the resource type and + all available resource and metric labels. + + Specifying both ``label`` and ``labels`` is an error. + + :rtype: :class:`pandas.DataFrame` + :returns: A dataframe where each column represents one time series. + """ + import pandas # pylint: disable=import-error + + if labels is not None: + if label is not None: + raise ValueError('Cannot specify both "label" and "labels".') + elif not labels: + raise ValueError('"labels" must be non-empty or None.') + + columns = [] + headers = [] + for time_series in time_series_iterable: + pandas_series = pandas.Series( + data=[point.value for point in time_series.points], + index=[point.end_time for point in time_series.points], + ) + columns.append(pandas_series) + headers.append(time_series.header()) + + # Implement a smart default of using all available labels. + if label is None and labels is None: + resource_labels = set(itertools.chain.from_iterable( + header.resource.labels for header in headers)) + metric_labels = set(itertools.chain.from_iterable( + header.metric.labels for header in headers)) + labels = (['resource_type'] + + _sorted_resource_labels(resource_labels) + + sorted(metric_labels)) + + # Assemble the columns into a DataFrame. + dataframe = pandas.DataFrame.from_records(columns).T + + # Convert the timestamp strings into a DatetimeIndex. + dataframe.index = pandas.to_datetime(dataframe.index) + + # Build a multi-level stack of column headers. Some labels may + # be undefined for some time series. + levels = [] + for key in labels or [label]: + level = [header.labels.get(key, '') for header in headers] + levels.append(level) + + # Build a column Index or MultiIndex. Do not include level names + # in the column header if the user requested a single-level header + # by specifying "label". + dataframe.columns = pandas.MultiIndex.from_arrays( + levels, + names=labels or None) + + # Sort the rows just in case (since the API doesn't guarantee the + # ordering), and sort the columns lexicographically. + return dataframe.sort_index(axis=0).sort_index(axis=1) + + +def _sorted_resource_labels(labels): + """Sort label names, putting well-known resource labels first.""" + head = [label for label in TOP_RESOURCE_LABELS if label in labels] + tail = sorted(label for label in labels + if label not in TOP_RESOURCE_LABELS) + return head + tail diff --git a/env/Lib/site-packages/gcloud/monitoring/client.py b/env/Lib/site-packages/gcloud/monitoring/client.py new file mode 100644 index 0000000..fb7a06d --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/client.py @@ -0,0 +1,280 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the `Google Monitoring API (V3)`_. + +Example:: + + >>> from gcloud import monitoring + >>> client = monitoring.Client() + >>> query = client.query(minutes=5) + >>> print(query.as_dataframe()) # Requires pandas. + +At present, the client supports querying of time series, metric descriptors, +and monitored resource descriptors. + +.. _Google Monitoring API (V3): https://cloud.google.com/monitoring/api/v3/ +""" + +from gcloud.client import JSONClient +from gcloud.monitoring.connection import Connection +from gcloud.monitoring.metric import MetricDescriptor +from gcloud.monitoring.metric import MetricKind +from gcloud.monitoring.metric import ValueType +from gcloud.monitoring.query import Query +from gcloud.monitoring.resource import ResourceDescriptor + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: string + :param project: The target project. If not passed, falls back to the + default inferred from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()`` + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + + def query(self, + metric_type=Query.DEFAULT_METRIC_TYPE, + end_time=None, + days=0, hours=0, minutes=0): + """Construct a query object for retrieving metric data. + + Example:: + + >>> query = client.query(minutes=5) + >>> print(query.as_dataframe()) # Requires pandas. + + :type metric_type: string + :param metric_type: The metric type name. The default value is + :data:`Query.DEFAULT_METRIC_TYPE + `, + but please note that this default value is provided only for + demonstration purposes and is subject to change. See the + `supported metrics`_. + + :type end_time: :class:`datetime.datetime` or None + :param end_time: The end time (inclusive) of the time interval + for which results should be returned, as a datetime object. + The default is the start of the current minute. + + The start time (exclusive) is determined by combining the + values of ``days``, ``hours``, and ``minutes``, and + subtracting the resulting duration from the end time. + + It is also allowed to omit the end time and duration here, + in which case + :meth:`~gcloud.monitoring.query.Query.select_interval` + must be called before the query is executed. + + :type days: integer + :param days: The number of days in the time interval. + + :type hours: integer + :param hours: The number of hours in the time interval. + + :type minutes: integer + :param minutes: The number of minutes in the time interval. + + :rtype: :class:`~gcloud.monitoring.query.Query` + :returns: The query object. + + :raises: :exc:`ValueError` if ``end_time`` is specified but + ``days``, ``hours``, and ``minutes`` are all zero. + If you really want to specify a point in time, use + :meth:`~gcloud.monitoring.query.Query.select_interval`. + + .. _supported metrics: https://cloud.google.com/monitoring/api/metrics + """ + return Query(self, metric_type, + end_time=end_time, + days=days, hours=hours, minutes=minutes) + + def metric_descriptor(self, type_, + metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED, + value_type=ValueType.VALUE_TYPE_UNSPECIFIED, + labels=(), unit='', description='', display_name=''): + """Construct a metric descriptor object. + + Metric descriptors specify the schema for a particular metric type. + + This factory method is used most often in conjunction with the metric + descriptor :meth:`~gcloud.monitoring.metric.MetricDescriptor.create` + method to define custom metrics:: + + >>> descriptor = client.metric_descriptor( + ... 'custom.googleapis.com/my_metric', + ... metric_kind=MetricKind.GAUGE, + ... value_type=ValueType.DOUBLE, + ... description='This is a simple example of a custom metric.') + >>> descriptor.create() + + Here is an example where the custom metric is parameterized by a + metric label:: + + >>> label = LabelDescriptor('response_code', LabelValueType.INT64, + ... description='HTTP status code') + >>> descriptor = client.metric_descriptor( + ... 'custom.googleapis.com/my_app/response_count', + ... metric_kind=MetricKind.CUMULATIVE, + ... value_type=ValueType.INT64, + ... labels=[label], + ... description='Cumulative count of HTTP responses.') + >>> descriptor.create() + + :type type_: string + :param type_: + The metric type including a DNS name prefix. For example: + ``"custom.googleapis.com/my_metric"`` + + :type metric_kind: string + :param metric_kind: + The kind of measurement. It must be one of + :data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`, + or :data:`MetricKind.CUMULATIVE`. + See :class:`~gcloud.monitoring.metric.MetricKind`. + + :type value_type: string + :param value_type: + The value type of the metric. It must be one of + :data:`ValueType.BOOL`, :data:`ValueType.INT64`, + :data:`ValueType.DOUBLE`, :data:`ValueType.STRING`, + or :data:`ValueType.DISTRIBUTION`. + See :class:`ValueType`. + + :type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor` + :param labels: + A sequence of zero or more label descriptors specifying the labels + used to identify a specific instance of this metric. + + :type unit: string + :param unit: An optional unit in which the metric value is reported. + + :type description: string + :param description: An optional detailed description of the metric. + + :type display_name: string + :param display_name: An optional concise name for the metric. + """ + return MetricDescriptor( + self, type_, + metric_kind=metric_kind, + value_type=value_type, + labels=labels, + unit=unit, + description=description, + display_name=display_name, + ) + + def fetch_metric_descriptor(self, metric_type): + """Look up a metric descriptor by type. + + Example:: + + >>> METRIC = 'compute.googleapis.com/instance/cpu/utilization' + >>> print(client.fetch_metric_descriptor(METRIC)) + + :type metric_type: string + :param metric_type: The metric type name. + + :rtype: :class:`~gcloud.monitoring.metric.MetricDescriptor` + :returns: The metric descriptor instance. + + :raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor + is not found. + """ + return MetricDescriptor._fetch(self, metric_type) + + def list_metric_descriptors(self, filter_string=None, type_prefix=None): + """List all metric descriptors for the project. + + Examples:: + + >>> for descriptor in client.list_metric_descriptors(): + ... print(descriptor.type) + + >>> for descriptor in client.list_metric_descriptors( + ... type_prefix='custom.'): + ... print(descriptor.type) + + :type filter_string: string or None + :param filter_string: + An optional filter expression describing the metric descriptors + to be returned. See the `filter documentation`_. + + :type type_prefix: string or None + :param type_prefix: An optional prefix constraining the selected + metric types. This adds ``metric.type = starts_with("")`` + to the filter. + + :rtype: list of :class:`~gcloud.monitoring.metric.MetricDescriptor` + :returns: A list of metric descriptor instances. + + .. _filter documentation: + https://cloud.google.com/monitoring/api/v3/filters + """ + return MetricDescriptor._list(self, filter_string, + type_prefix=type_prefix) + + def fetch_resource_descriptor(self, resource_type): + """Look up a monitored resource descriptor by type. + + Example:: + + >>> print(client.fetch_resource_descriptor('gce_instance')) + + :type resource_type: string + :param resource_type: The resource type name. + + :rtype: :class:`~gcloud.monitoring.resource.ResourceDescriptor` + :returns: The resource descriptor instance. + + :raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor + is not found. + """ + return ResourceDescriptor._fetch(self, resource_type) + + def list_resource_descriptors(self, filter_string=None): + """List all monitored resource descriptors for the project. + + Example:: + + >>> for descriptor in client.list_resource_descriptors(): + ... print(descriptor.type) + + :type filter_string: string or None + :param filter_string: + An optional filter expression describing the resource descriptors + to be returned. See the `filter documentation`_. + + :rtype: list of :class:`~gcloud.monitoring.resource.ResourceDescriptor` + :returns: A list of resource descriptor instances. + + .. _filter documentation: + https://cloud.google.com/monitoring/api/v3/filters + """ + return ResourceDescriptor._list(self, filter_string) diff --git a/env/Lib/site-packages/gcloud/monitoring/connection.py b/env/Lib/site-packages/gcloud/monitoring/connection.py new file mode 100644 index 0000000..5887da6 --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/connection.py @@ -0,0 +1,47 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud monitoring connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Monitoring via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()`` + :param http: (Optional) HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to the value + :attr:`Connection.API_BASE_URL`. + """ + + API_BASE_URL = 'https://monitoring.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v3' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/monitoring.read', + 'https://www.googleapis.com/auth/monitoring', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Monitoring consumer.""" diff --git a/env/Lib/site-packages/gcloud/monitoring/label.py b/env/Lib/site-packages/gcloud/monitoring/label.py new file mode 100644 index 0000000..3be74ee --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/label.py @@ -0,0 +1,99 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Label Descriptors for the `Google Monitoring API (V3)`_. + +.. _Google Monitoring API (V3): + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/LabelDescriptor +""" + + +class LabelValueType(object): + """Allowed values for the `type of a label`_. + + .. _type of a label: + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ + LabelDescriptor#ValueType + """ + + STRING = 'STRING' + BOOL = 'BOOL' + INT64 = 'INT64' + + +class LabelDescriptor(object): + """Schema specification and documentation for a single label. + + :type key: string + :param key: The name of the label. + + :type value_type: string + :param value_type: + The type of the label. It must be one of :data:`LabelValueType.STRING`, + :data:`LabelValueType.BOOL`, or :data:`LabelValueType.INT64`. + See :class:`LabelValueType`. + + :type description: string + :param description: A human-readable description for the label. + """ + + def __init__(self, key, value_type=LabelValueType.STRING, description=''): + self.key = key + self.value_type = value_type + self.description = description + + @classmethod + def _from_dict(cls, info): + """Construct a label descriptor from the parsed JSON representation. + + :type info: dict + :param info: + A ``dict`` parsed from the JSON wire-format representation. + + :rtype: :class:`LabelDescriptor` + :returns: A label descriptor. + """ + return cls( + info['key'], + info.get('valueType', LabelValueType.STRING), + info.get('description', ''), + ) + + def _to_dict(self): + """Build a dictionary ready to be serialized to the JSON wire format. + + :rtype: dict + :returns: A dictionary. + """ + info = { + 'key': self.key, + 'valueType': self.value_type, + } + + if self.description: + info['description'] = self.description + + return info + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + return self.__dict__ != other.__dict__ + + def __repr__(self): + return ( + 'LabelDescriptor(key={key!r}, value_type={value_type!r},' + ' description={description!r})' + ).format(**self.__dict__) diff --git a/env/Lib/site-packages/gcloud/monitoring/metric.py b/env/Lib/site-packages/gcloud/monitoring/metric.py new file mode 100644 index 0000000..87e2a18 --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/metric.py @@ -0,0 +1,345 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Metric Descriptors for the `Google Monitoring API (V3)`_. + +.. _Google Monitoring API (V3): + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ + projects.metricDescriptors +""" + +import collections + +from gcloud.monitoring.label import LabelDescriptor + + +class MetricKind(object): + """Choices for the `kind of measurement`_. + + .. _kind of measurement: + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ + projects.metricDescriptors#MetricKind + """ + + METRIC_KIND_UNSPECIFIED = 'METRIC_KIND_UNSPECIFIED' + """.. note:: An unspecified kind is not allowed in metric descriptors.""" + + GAUGE = 'GAUGE' + DELTA = 'DELTA' + CUMULATIVE = 'CUMULATIVE' + + +class ValueType(object): + """Choices for the `metric value type`_. + + .. _metric value type: + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ + projects.metricDescriptors#ValueType + """ + + VALUE_TYPE_UNSPECIFIED = 'VALUE_TYPE_UNSPECIFIED' + """.. note:: An unspecified type is not allowed in metric descriptors.""" + + BOOL = 'BOOL' + INT64 = 'INT64' + DOUBLE = 'DOUBLE' + STRING = 'STRING' + DISTRIBUTION = 'DISTRIBUTION' + + +class MetricDescriptor(object): + """Specification of a metric type and its schema. + + The preferred way to construct a metric descriptor object is using the + :meth:`~gcloud.monitoring.client.Client.metric_descriptor` factory method + of the :class:`~gcloud.monitoring.client.Client` class. + + :type client: :class:`gcloud.monitoring.client.Client` + :param client: A client for operating on the metric descriptor. + + :type type_: string + :param type_: + The metric type including a DNS name prefix. For example: + ``"compute.googleapis.com/instance/cpu/utilization"`` + + :type metric_kind: string + :param metric_kind: + The kind of measurement. It must be one of + :data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`, + or :data:`MetricKind.CUMULATIVE`. See :class:`MetricKind`. + + :type value_type: string + :param value_type: + The value type of the metric. It must be one of + :data:`ValueType.BOOL`, :data:`ValueType.INT64`, + :data:`ValueType.DOUBLE`, :data:`ValueType.STRING`, + or :data:`ValueType.DISTRIBUTION`. + See :class:`ValueType`. + + :type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor` + :param labels: + A sequence of zero or more label descriptors specifying the labels + used to identify a specific instance of this metric. + + :type unit: string + :param unit: An optional unit in which the metric value is reported. + + :type description: string + :param description: An optional detailed description of the metric. + + :type display_name: string + :param display_name: An optional concise name for the metric. + + :type name: string or None + :param name: + The "resource name" of the metric descriptor. For example: + ``"projects//metricDescriptors/"``. As + retrieved from the service, this will always be specified. + You can and should omit it when constructing an instance for + the purpose of creating a new metric descriptor. + """ + + def __init__(self, client, type_, + metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED, + value_type=ValueType.VALUE_TYPE_UNSPECIFIED, + labels=(), + unit='', description='', display_name='', + name=None): + self.client = client + self.name = name + self.type = type_ + self.labels = labels + self.metric_kind = metric_kind + self.value_type = value_type + self.unit = unit + self.description = description + self.display_name = display_name + + def create(self): + """Create a new metric descriptor based on this object. + + Example:: + + >>> descriptor = client.metric_descriptor( + ... 'custom.googleapis.com/my_metric', + ... metric_kind=MetricKind.GAUGE, + ... value_type=ValueType.DOUBLE, + ... description='This is a simple example of a custom metric.') + >>> descriptor.create() + + The metric kind must not be :data:`MetricKind.METRIC_KIND_UNSPECIFIED`, + and the value type must not be + :data:`ValueType.VALUE_TYPE_UNSPECIFIED`. + + The ``name`` attribute is ignored in preparing the creation request. + All attributes are overwritten by the values received in the response + (normally affecting only ``name``). + """ + path = '/projects/{project}/metricDescriptors/'.format( + project=self.client.project) + response = self.client.connection.api_request(method='POST', path=path, + data=self._to_dict()) + self._init_from_dict(response) + + def delete(self): + """Delete the metric descriptor identified by this object. + + Example:: + + >>> descriptor = client.metric_descriptor( + ... 'custom.googleapis.com/my_metric') + >>> descriptor.delete() + + Only the ``client`` and ``type`` attributes are used. + """ + path = '/projects/{project}/metricDescriptors/{type}'.format( + project=self.client.project, + type=self.type) + self.client.connection.api_request(method='DELETE', path=path) + + @classmethod + def _fetch(cls, client, metric_type): + """Look up a metric descriptor by type. + + :type client: :class:`gcloud.monitoring.client.Client` + :param client: The client to use. + + :type metric_type: string + :param metric_type: The metric type name. + + :rtype: :class:`MetricDescriptor` + :returns: The metric descriptor instance. + + :raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor + is not found. + """ + path = '/projects/{project}/metricDescriptors/{type}'.format( + project=client.project, + type=metric_type) + info = client.connection.api_request(method='GET', path=path) + return cls._from_dict(client, info) + + @classmethod + def _list(cls, client, filter_string=None, type_prefix=None): + """List all metric descriptors for the project. + + :type client: :class:`gcloud.monitoring.client.Client` + :param client: The client to use. + + :type filter_string: string or None + :param filter_string: + An optional filter expression describing the metric descriptors + to be returned. See the `filter documentation`_. + + :type type_prefix: string or None + :param type_prefix: An optional prefix constraining the selected + metric types. This adds ``metric.type = starts_with("")`` + to the filter. + + :rtype: list of :class:`MetricDescriptor` + :returns: A list of metric descriptor instances. + + .. _filter documentation: + https://cloud.google.com/monitoring/api/v3/filters + """ + path = '/projects/{project}/metricDescriptors/'.format( + project=client.project) + + filters = [] + if filter_string is not None: + filters.append(filter_string) + + if type_prefix is not None: + filters.append('metric.type = starts_with("{prefix}")'.format( + prefix=type_prefix)) + + descriptors = [] + page_token = None + while True: + params = {} + + if filters: + params['filter'] = ' AND '.join(filters) + + if page_token is not None: + params['pageToken'] = page_token + + response = client.connection.api_request( + method='GET', path=path, query_params=params) + for info in response.get('metricDescriptors', ()): + descriptors.append(cls._from_dict(client, info)) + + page_token = response.get('nextPageToken') + if not page_token: + break + + return descriptors + + @classmethod + def _from_dict(cls, client, info): + """Construct a metric descriptor from the parsed JSON representation. + + :type client: :class:`gcloud.monitoring.client.Client` + :param client: A client to be included in the returned object. + + :type info: dict + :param info: + A ``dict`` parsed from the JSON wire-format representation. + + :rtype: :class:`MetricDescriptor` + :returns: A metric descriptor. + """ + descriptor = cls(client, None) + descriptor._init_from_dict(info) + return descriptor + + def _init_from_dict(self, info): + """Initialize attributes from the parsed JSON representation. + + :type info: dict + :param info: + A ``dict`` parsed from the JSON wire-format representation. + """ + self.name = info['name'] + self.type = info['type'] + self.labels = tuple(LabelDescriptor._from_dict(label) + for label in info.get('labels', [])) + self.metric_kind = info['metricKind'] + self.value_type = info['valueType'] + self.unit = info.get('unit', '') + self.description = info.get('description', '') + self.display_name = info.get('displayName', '') + + def _to_dict(self): + """Build a dictionary ready to be serialized to the JSON wire format. + + :rtype: dict + :returns: A dictionary. + """ + info = { + 'type': self.type, + 'metricKind': self.metric_kind, + 'valueType': self.value_type, + } + + if self.labels: + info['labels'] = [label._to_dict() for label in self.labels] + if self.unit: + info['unit'] = self.unit + if self.description: + info['description'] = self.description + if self.display_name: + info['displayName'] = self.display_name + + return info + + def __repr__(self): + return ( + '' + ).format(**self.__dict__) + + +class Metric(collections.namedtuple('Metric', 'type labels')): + """A specific metric identified by specifying values for all labels. + + :type type: string + :param type: The metric type name. + + :type labels: dict + :param labels: A mapping from label names to values for all labels + enumerated in the associated :class:`MetricDescriptor`. + """ + __slots__ = () + + @classmethod + def _from_dict(cls, info): + """Construct a metric object from the parsed JSON representation. + + :type info: dict + :param info: + A ``dict`` parsed from the JSON wire-format representation. + + :rtype: :class:`Metric` + :returns: A metric object. + """ + return cls( + type=info['type'], + labels=info.get('labels', {}), + ) diff --git a/env/Lib/site-packages/gcloud/monitoring/query.py b/env/Lib/site-packages/gcloud/monitoring/query.py new file mode 100644 index 0000000..91838b6 --- /dev/null +++ b/env/Lib/site-packages/gcloud/monitoring/query.py @@ -0,0 +1,673 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Time series query for the `Google Monitoring API (V3)`_. + +.. _Google Monitoring API (V3): + https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\ + projects.timeSeries/list +""" + +import copy +import datetime +import itertools + +import six + +from gcloud.monitoring._dataframe import _build_dataframe +from gcloud.monitoring.timeseries import TimeSeries + +_UTCNOW = datetime.datetime.utcnow # To be replaced by tests. + + +class Aligner(object): + """Allowed values for the `supported aligners`_.""" + + ALIGN_NONE = 'ALIGN_NONE' + ALIGN_DELTA = 'ALIGN_DELTA' + ALIGN_RATE = 'ALIGN_RATE' + ALIGN_INTERPOLATE = 'ALIGN_INTERPOLATE' + ALIGN_NEXT_OLDER = 'ALIGN_NEXT_OLDER' + ALIGN_MIN = 'ALIGN_MIN' + ALIGN_MAX = 'ALIGN_MAX' + ALIGN_MEAN = 'ALIGN_MEAN' + ALIGN_COUNT = 'ALIGN_COUNT' + ALIGN_SUM = 'ALIGN_SUM' + ALIGN_STDDEV = 'ALIGN_STDDEV' + ALIGN_COUNT_TRUE = 'ALIGN_COUNT_TRUE' + ALIGN_FRACTION_TRUE = 'ALIGN_FRACTION_TRUE' + + +class Reducer(object): + """Allowed values for the `supported reducers`_.""" + + REDUCE_NONE = 'REDUCE_NONE' + REDUCE_MEAN = 'REDUCE_MEAN' + REDUCE_MIN = 'REDUCE_MIN' + REDUCE_MAX = 'REDUCE_MAX' + REDUCE_SUM = 'REDUCE_SUM' + REDUCE_STDDEV = 'REDUCE_STDDEV' + REDUCE_COUNT = 'REDUCE_COUNT' + REDUCE_COUNT_TRUE = 'REDUCE_COUNT_TRUE' + REDUCE_FRACTION_TRUE = 'REDUCE_FRACTION_TRUE' + REDUCE_PERCENTILE_99 = 'REDUCE_PERCENTILE_99' + REDUCE_PERCENTILE_95 = 'REDUCE_PERCENTILE_95' + REDUCE_PERCENTILE_50 = 'REDUCE_PERCENTILE_50' + REDUCE_PERCENTILE_05 = 'REDUCE_PERCENTILE_05' + + +class Query(object): + """Query object for retrieving metric data. + + The preferred way to construct a query object is using the + :meth:`~gcloud.monitoring.client.Client.query` method + of the :class:`~gcloud.monitoring.client.Client` class. + + :type client: :class:`gcloud.monitoring.client.Client` + :param client: The client to use. + + :type metric_type: string + :param metric_type: The metric type name. The default value is + :data:`Query.DEFAULT_METRIC_TYPE + `, + but please note that this default value is provided only for + demonstration purposes and is subject to change. See the + `supported metrics`_. + + :type end_time: :class:`datetime.datetime` or None + :param end_time: The end time (inclusive) of the time interval + for which results should be returned, as a datetime object. + The default is the start of the current minute. + + The start time (exclusive) is determined by combining the + values of ``days``, ``hours``, and ``minutes``, and + subtracting the resulting duration from the end time. + + It is also allowed to omit the end time and duration here, + in which case + :meth:`~gcloud.monitoring.query.Query.select_interval` + must be called before the query is executed. + + :type days: integer + :param days: The number of days in the time interval. + + :type hours: integer + :param hours: The number of hours in the time interval. + + :type minutes: integer + :param minutes: The number of minutes in the time interval. + + :raises: :exc:`ValueError` if ``end_time`` is specified but + ``days``, ``hours``, and ``minutes`` are all zero. + If you really want to specify a point in time, use + :meth:`~gcloud.monitoring.query.Query.select_interval`. + + .. _supported metrics: https://cloud.google.com/monitoring/api/metrics + """ + + DEFAULT_METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization' + + def __init__(self, client, + metric_type=DEFAULT_METRIC_TYPE, + end_time=None, days=0, hours=0, minutes=0): + start_time = None + if days or hours or minutes: + if end_time is None: + end_time = _UTCNOW().replace(second=0, microsecond=0) + start_time = end_time - datetime.timedelta(days=days, + hours=hours, + minutes=minutes) + elif end_time is not None: + raise ValueError('Non-zero duration required for time interval.') + + self._client = client + self._end_time = end_time + self._start_time = start_time + self._filter = _Filter(metric_type) + + self._per_series_aligner = None + self._alignment_period_seconds = None + self._cross_series_reducer = None + self._group_by_fields = () + + def __iter__(self): + return self.iter() + + @property + def metric_type(self): + """The metric type name.""" + return self._filter.metric_type + + @property + def filter(self): + """The filter string. + + This is constructed from the metric type, the resource type, and + selectors for the group ID, monitored projects, resource labels, + and metric labels. + """ + return str(self._filter) + + def select_interval(self, end_time, start_time=None): + """Copy the query and set the query time interval. + + Example:: + + import datetime + + now = datetime.datetime.utcnow() + query = query.select_interval( + end_time=now, + start_time=now - datetime.timedelta(minutes=5)) + + As a convenience, you can alternatively specify the end time and + an interval duration when you create the query initially. + + :type end_time: :class:`datetime.datetime` + :param end_time: The end time (inclusive) of the time interval + for which results should be returned, as a datetime object. + + :type start_time: :class:`datetime.datetime` or None + :param start_time: The start time (exclusive) of the time interval + for which results should be returned, as a datetime object. + If not specified, the interval is a point in time. + + :rtype: :class:`Query` + :returns: The new query object. + """ + new_query = self.copy() + new_query._end_time = end_time + new_query._start_time = start_time + return new_query + + def select_group(self, group_id): + """Copy the query and add filtering by group. + + Example:: + + query = query.select_group('1234567') + + :type group_id: string + :param group_id: The ID of a group to filter by. + + :rtype: :class:`Query` + :returns: The new query object. + """ + new_query = self.copy() + new_query._filter.group_id = group_id + return new_query + + def select_projects(self, *args): + """Copy the query and add filtering by monitored projects. + + This is only useful if the target project represents a Stackdriver + account containing the specified monitored projects. + + Examples:: + + query = query.select_projects('project-1') + query = query.select_projects('project-1', 'project-2') + + :type args: tuple + :param args: Project IDs limiting the resources to be included + in the query. + + :rtype: :class:`Query` + :returns: The new query object. + """ + new_query = self.copy() + new_query._filter.projects = args + return new_query + + def select_resources(self, *args, **kwargs): + """Copy the query and add filtering by resource labels. + + Examples:: + + query = query.select_resources(zone='us-central1-a') + query = query.select_resources(zone_prefix='europe-') + query = query.select_resources(resource_type='gce_instance') + + A keyword argument ``