diff --git a/botocore-1.19.25.tar.gz b/botocore-1.19.25.tar.gz deleted file mode 100644 index ec225d9..0000000 --- a/botocore-1.19.25.tar.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ccaf3979590b72625b3699d93dabf48f350f9a3304c127fc6830e8ac842b0d96 -size 7125010 diff --git a/botocore-1.20.9.tar.gz b/botocore-1.20.9.tar.gz new file mode 100644 index 0000000..22a5528 --- /dev/null +++ b/botocore-1.20.9.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8614c230e7a8e042a8c07d47caea50ad21cb51415289bd34fa6d0382beddad7 +size 7461420 diff --git a/python-botocore.changes b/python-botocore.changes index 3965ef1..30c2df9 100644 --- a/python-botocore.changes +++ b/python-botocore.changes @@ -1,3 +1,365 @@ +------------------------------------------------------------------- +Thu Feb 18 14:10:23 UTC 2021 - John Paul Adrian Glaubitz + +- Version update to 1.20.9 (bsc#1182421, bsc#1182422, jsc#ECO-3352, jsc#PM-2485) + * api-change:``devops-guru``: Update devops-guru client to latest version + * api-change:``codebuild``: Update codebuild client to latest version +- from version 1.20.8 + * api-change:``lightsail``: Update lightsail client to latest version + * api-change:``medialive``: Update medialive client to latest version + * api-change:``kinesis-video-archived-media``: Update kinesis-video-archived-media + client to latest version + * api-change:``config``: Update config client to latest version + * api-change:``pinpoint``: Update pinpoint client to latest version + * api-change:``redshift-data``: Update redshift-data client to latest version + * api-change:``workmailmessageflow``: Update workmailmessageflow client to latest version + * api-change:``mediatailor``: Update mediatailor client to latest version +- from version 1.20.7 + * api-change:``personalize-events``: Update personalize-events client to latest version + * api-change:``eks``: Update eks client to latest version + * api-change:``iam``: Update iam client to latest version + * api-change:``codepipeline``: Update codepipeline client to latest version + * api-change:``detective``: Update detective client to latest version + * api-change:``macie2``: Update macie2 client to latest version + * api-change:``wafv2``: Update wafv2 client to latest version + * api-change:``elbv2``: Update elbv2 client to latest version + * api-change:``appsync``: Update appsync client to latest version + * api-change:``rds``: Update rds client to latest version +- from version 1.20.6 + * api-change:``databrew``: Update databrew client to latest version + * api-change:``rds``: Update rds client to latest version +- from version 1.20.5 + * api-change:``quicksight``: Update quicksight client to latest version + * api-change:``mediaconvert``: Update mediaconvert client to latest version + * api-change:``qldb-session``: Update qldb-session client to latest version + * api-change:``sagemaker``: Update sagemaker client to latest version + * api-change:``gamelift``: Update gamelift client to latest version +- from version 1.20.4 + * api-change:``dataexchange``: Update dataexchange client to latest version + * api-change:``cloudtrail``: Update cloudtrail client to latest version + * api-change:``elbv2``: Update elbv2 client to latest version + * api-change:``ivs``: Update ivs client to latest version + * api-change:``macie2``: Update macie2 client to latest version + * api-change:``globalaccelerator``: Update globalaccelerator client to latest version + * api-change:``iotsitewise``: Update iotsitewise client to latest version + * api-change:``elasticache``: Update elasticache client to latest version +- from version 1.20.3 + * api-change:``macie``: Update macie client to latest version + * api-change:``elbv2``: Update elbv2 client to latest version + * api-change:``organizations``: Update organizations client to latest version +- from version 1.20.2 + * api-change:``quicksight``: Update quicksight client to latest version + * api-change:``appflow``: Update appflow client to latest version + * api-change:``emr-containers``: Update emr-containers client to latest version + * api-change:``dlm``: Update dlm client to latest version + * api-change:``athena``: Update athena client to latest version + * api-change:``ec2``: Update ec2 client to latest version +- from version 1.20.1 + * api-change:``lambda``: Update lambda client to latest version + * api-change:``codebuild``: Update codebuild client to latest version + * api-change:``ec2``: Update ec2 client to latest version + * api-change:``ce``: Update ce client to latest version + * api-change:``databrew``: Update databrew client to latest version + * api-change:``securityhub``: Update securityhub client to latest version + * api-change:``workmail``: Update workmail client to latest version + * api-change:``auditmanager``: Update auditmanager client to latest version + * api-change:``compute-optimizer``: Update compute-optimizer client to latest version + * api-change:``iotsitewise``: Update iotsitewise client to latest version +- from version 1.20.0 + * api-change:``appmesh``: Update appmesh client to latest version + * api-change:``application-autoscaling``: Update application-autoscaling client to latest version + * api-change:``lookoutvision``: Update lookoutvision client to latest version + * api-change:``organizations``: Update organizations client to latest version + * feature:Python: Dropped support for Python 3.4 and 3.5 + * api-change:``s3control``: Update s3control client to latest version + * api-change:``rds-data``: Update rds-data client to latest version + * api-change:``medialive``: Update medialive client to latest version + * api-change:``route53``: Update route53 client to latest version + * api-change:``location``: Update location client to latest version + * enhancement:s3: Amazon S3 now supports AWS PrivateLink, providing direct + access to S3 via a private endpoint within your virtual private network. + * api-change:``iotwireless``: Update iotwireless client to latest version +- from version 1.19.63 + * api-change:``macie2``: Update macie2 client to latest version + * api-change:``connect``: Update connect client to latest version + * api-change:``medialive``: Update medialive client to latest version +- from version 1.19.62 + * api-change:``wellarchitected``: Update wellarchitected client to latest version + * api-change:``managedblockchain``: Update managedblockchain client to latest version + * api-change:``cloudwatch``: Update cloudwatch client to latest version + * api-change:``databrew``: Update databrew client to latest version + * bugfix:Validator: Fix showing incorrect max-value in error message for + range and length value validation + * api-change:``iot``: Update iot client to latest version + * api-change:``robomaker``: Update robomaker client to latest version +- from version 1.19.61 + * api-change:``elasticache``: Update elasticache client to latest version + * api-change:``customer-profiles``: Update customer-profiles client to latest version + * api-change:``sesv2``: Update sesv2 client to latest version + * api-change:``accessanalyzer``: Update accessanalyzer client to latest version + * api-change:``lightsail``: Update lightsail client to latest version + * api-change:``es``: Update es client to latest version +- from version 1.19.60 + * api-change:``backup``: Update backup client to latest version +- from version 1.19.59 + * api-change:``greengrassv2``: Update greengrassv2 client to latest version + * api-change:``redshift``: Update redshift client to latest version + * api-change:``lexv2-runtime``: Update lexv2-runtime client to latest version + * api-change:``rds``: Update rds client to latest version + * api-change:``lexv2-models``: Update lexv2-models client to latest version + * api-change:``ssm``: Update ssm client to latest version + * api-change:``ec2``: Update ec2 client to latest version +- from version 1.19.58 + * api-change:``kafka``: Update kafka client to latest version + * api-change:``resourcegroupstaggingapi``: Update resourcegroupstaggingapi + client to latest version +- from version 1.19.57 + * api-change:``acm-pca``: Update acm-pca client to latest version + * api-change:``chime``: Update chime client to latest version + * api-change:``ecs``: Update ecs client to latest version +- from version 1.19.56 + * api-change:``sns``: Update sns client to latest version +- from version 1.19.55 + * api-change:``pinpoint``: Update pinpoint client to latest version + * api-change:``cognito-identity``: Update cognito-identity client to latest version + * api-change:``s3control``: Update s3control client to latest version + * api-change:``sagemaker``: Update sagemaker client to latest version +- from version 1.19.54 + * api-change:``frauddetector``: Update frauddetector client to latest version + * api-change:``personalize``: Update personalize client to latest version +- from version 1.19.53 + * api-change:``appstream``: Update appstream client to latest version + * api-change:``auditmanager``: Update auditmanager client to latest version + * api-change:``ssm``: Update ssm client to latest version + * api-change:``elasticache``: Update elasticache client to latest version + * api-change:``lightsail``: Update lightsail client to latest version +- from version 1.19.52 + * api-change:``rds``: Update rds client to latest version + * api-change:``kms``: Update kms client to latest version +- from version 1.19.51 + * api-change:``devops-guru``: Update devops-guru client to latest version + * api-change:``codepipeline``: Update codepipeline client to latest version + * api-change:``mediaconvert``: Update mediaconvert client to latest version +- from version 1.19.50 + * api-change:``autoscaling``: Update autoscaling client to latest version + * api-change:``transfer``: Update transfer client to latest version + * api-change:``autoscaling-plans``: Update autoscaling-plans client to latest version +- from version 1.19.49 + * api-change:``ce``: Update ce client to latest version + * api-change:``application-autoscaling``: Update application-autoscaling + client to latest version +- from version 1.19.48 + * api-change:``healthlake``: Update healthlake client to latest version + * api-change:``cloudsearch``: Update cloudsearch client to latest version +- Add python-nose to BuildRequires +- Drop python-pytest from BuildRequires +- Drop patch which no longer applies but hasn't been merged upstream yet + + remove_nose.patch +- Switch testsuite invocation back to python-nose + +------------------------------------------------------------------- +Sat Jan 2 15:41:25 UTC 2021 - Benjamin Greiner + +- Update to 1.19.47 + * api-change:servicecatalog: Update servicecatalog client to + latest version +- Changes in 1.19.46 + * api-change:macie2: Update macie2 client to latest version + * api-change:elasticache: Update elasticache client to latest + version +- Changes in 1.19.45 + * api-change:acm-pca: Update acm-pca client to latest version + * api-change:apigatewayv2: Update apigatewayv2 client to latest + version +- Changes in 1.19.44 + * api-change:cloudfront: Update cloudfront client to latest + version +- Changes in 1.19.43 + * api-change:compute-optimizer: Update compute-optimizer client + to latest version + * api-change:resource-groups: Update resource-groups client to + latest version + * api-change:dms: Update dms client to latest version +- Changes in 1.19.42 + * api-change:ssm: Update ssm client to latest version + * api-change:iotwireless: Update iotwireless client to latest + version + * api-change:rds: Update rds client to latest version + * api-change:glue: Update glue client to latest version + * api-change:ce: Update ce client to latest version + * api-change:connect: Update connect client to latest version + * api-change:elasticache: Update elasticache client to latest + version +- Changes in 1.19.41 + * api-change:config: Update config client to latest version + * api-change:ec2: Update ec2 client to latest version + * api-change:glue: Update glue client to latest version + * api-change:batch: Update batch client to latest version + * api-change:managedblockchain: Update managedblockchain client + to latest version + * api-change:service-quotas: Update service-quotas client to + latest version + * api-change:s3: Update s3 client to latest version + * api-change:connectparticipant: Update connectparticipant + client to latest version + * api-change:securityhub: Update securityhub client to latest + version + * api-change:qldb-session: Update qldb-session client to latest + version + * api-change:outposts: Update outposts client to latest version + * api-change:servicecatalog-appregistry: Update servicecatalog- + appregistry client to latest version + * api-change:dms: Update dms client to latest version + * api-change:apigateway: Update apigateway client to latest + version +- Changes in 1.19.40 + * api-change:rds: Update rds client to latest version + * bugfix:SSO: Fixed timestamp format for SSO credential + expirations + * api-change:personalize-runtime: Update personalize-runtime + client to latest version + * api-change:ec2: Update ec2 client to latest version +- Changes in 1.19.39 + * api-change:ec2: Update ec2 client to latest version + * api-change:servicecatalog: Update servicecatalog client to + latest version + * api-change:dlm: Update dlm client to latest version + * api-change:kms: Update kms client to latest version + * api-change:route53resolver: Update route53resolver client to + latest version + * api-change:sqs: Update sqs client to latest version + * api-change:config: Update config client to latest version + * api-change:imagebuilder: Update imagebuilder client to latest + version + * api-change:route53: Update route53 client to latest version +- Changes in 1.19.38 + * api-change:ce: Update ce client to latest version + * api-change:amp: Update amp client to latest version + * api-change:location: Update location client to latest version + * api-change:wellarchitected: Update wellarchitected client to + latest version + * api-change:quicksight: Update quicksight client to latest + version +- Changes in 1.19.37 + * api-change:iotwireless: Update iotwireless client to latest + version + * api-change:lambda: Update lambda client to latest version + * api-change:greengrassv2: Update greengrassv2 client to latest + version + * api-change:ssm: Update ssm client to latest version + * api-change:iotdeviceadvisor: Update iotdeviceadvisor client to + latest version + * api-change:iot: Update iot client to latest version + * api-change:iotanalytics: Update iotanalytics client to latest + version + * api-change:amp: Update amp client to latest version + * api-change:iotfleethub: Update iotfleethub client to latest + version +- Changes in 1.19.36 + * api-change:ec2: Update ec2 client to latest version + * api-change:globalaccelerator: Update globalaccelerator client + to latest version + * api-change:devops-guru: Update devops-guru client to latest + version +- Changes in 1.19.35 + * api-change:guardduty: Update guardduty client to latest + version + * api-change:iotsitewise: Update iotsitewise client to latest + version + * api-change:autoscaling: Update autoscaling client to latest + version + * api-change:cloudwatch: Update cloudwatch client to latest + version + * api-change:pi: Update pi client to latest version + * api-change:cloudtrail: Update cloudtrail client to latest + version +- Changes in 1.19.34 + * api-change:networkmanager: Update networkmanager client to + latest version + * api-change:kendra: Update kendra client to latest version + * api-change:ec2: Update ec2 client to latest version +- Changes in 1.19.33 + * api-change:globalaccelerator: Update globalaccelerator client + to latest version + * api-change:ec2: Update ec2 client to latest version + * api-change:redshift: Update redshift client to latest version +- Changes in 1.19.32 + * api-change:ecr: Update ecr client to latest version + * api-change:sagemaker: Update sagemaker client to latest + version + * api-change:kendra: Update kendra client to latest version + * api-change:quicksight: Update quicksight client to latest + version + * api-change:auditmanager: Update auditmanager client to latest + version + * api-change:sagemaker-runtime: Update sagemaker-runtime client + to latest version + * api-change:sagemaker-edge: Update sagemaker-edge client to + latest version + * api-change:forecast: Update forecast client to latest version + * api-change:healthlake: Update healthlake client to latest + version + * api-change:emr-containers: Update emr-containers client to + latest version +- Changes in 1.19.31 + * api-change:dms: Update dms client to latest version + * api-change:servicecatalog-appregistry: Update servicecatalog- + appregistry client to latest version +- Changes in 1.19.30 + * api-change:ssm: Update ssm client to latest version + * api-change:ec2: Update ec2 client to latest version + * api-change:workspaces: Update workspaces client to latest + version + * api-change:license-manager: Update license-manager client to + latest version + * api-change:lambda: Update lambda client to latest version + * api-change:ds: Update ds client to latest version + * api-change:kafka: Update kafka client to latest version + * api-change:medialive: Update medialive client to latest + version + * api-change:rds: Update rds client to latest version +- Changes in 1.19.29 + * api-change:license-manager: Update license-manager client to + latest version + * api-change:compute-optimizer: Update compute-optimizer client + to latest version + * api-change:amplifybackend: Update amplifybackend client to + latest version + * api-change:batch: Update batch client to latest version +- Changes in 1.19.28 + * api-change:customer-profiles: Update customer-profiles client + to latest version +- Changes in 1.19.27 + * api-change:sagemaker-featurestore-runtime: Update sagemaker- + featurestore-runtime client to latest version + * api-change:ecr-public: Update ecr-public client to latest + version + * api-change:honeycode: Update honeycode client to latest + version + * api-change:eks: Update eks client to latest version + * api-change:amplifybackend: Update amplifybackend client to + latest version + * api-change:lambda: Update lambda client to latest version + * api-change:sagemaker: Update sagemaker client to latest + version + * api-change:lookoutvision: Update lookoutvision client to + latest version + * api-change:ec2: Update ec2 client to latest version + * api-change:connect: Update connect client to latest version + * api-change:connect-contact-lens: Update connect-contact-lens + client to latest version + * api-change:profile: Update profile client to latest version + * api-change:s3: Update s3 client to latest version + * api-change:appintegrations: Update appintegrations client to + latest version + * api-change:ds: Update ds client to latest version + * api-change:devops-guru: Update devops-guru client to latest + version +- Changes in 1.19.26 + * api-change:ec2: Update ec2 client to latest version +- Unpin upper versions +- Refresh remove_nose.patch + ------------------------------------------------------------------- Mon Nov 30 11:10:10 UTC 2020 - John Paul Adrian Glaubitz diff --git a/python-botocore.spec b/python-botocore.spec index f854901..d10570d 100644 --- a/python-botocore.spec +++ b/python-botocore.spec @@ -1,7 +1,7 @@ # # spec file for package python-botocore # -# Copyright (c) 2020 SUSE LLC +# Copyright (c) 2021 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -21,18 +21,15 @@ %define skip_python2 1 %endif Name: python-botocore -Version: 1.19.25 +Version: 1.20.9 Release: 0 Summary: Python interface for AWS License: Apache-2.0 URL: https://github.com/boto/botocore Source: https://files.pythonhosted.org/packages/source/b/botocore/botocore-%{version}.tar.gz -# PATCH-FEATURE-UPSTREAM remove_nose.patch gh#boto/botocore#2134 mcepl@suse.com -# Port test suite from nose to pytest (mostly just plain unittest) -Patch0: remove_nose.patch BuildRequires: %{python_module jmespath < 1.0.0} BuildRequires: %{python_module jmespath >= 0.7.1} -BuildRequires: %{python_module python-dateutil <= 3.0.0} +BuildRequires: %{python_module python-dateutil < 3.0.0} BuildRequires: %{python_module python-dateutil >= 2.1} BuildRequires: %{python_module setuptools} BuildRequires: %{python_module urllib3 < 1.27} @@ -41,7 +38,7 @@ BuildRequires: fdupes BuildRequires: python-rpm-macros Requires: python-jmespath < 1.0.0 Requires: python-jmespath >= 0.7.1 -Requires: python-python-dateutil <= 3.0.0 +Requires: python-python-dateutil < 3.0.0 Requires: python-python-dateutil >= 2.1 Requires: python-requests Requires: python-six @@ -49,14 +46,14 @@ Requires: python-urllib3 < 1.27 Requires: python-urllib3 >= 1.25.4 BuildArch: noarch %if 0%{?suse_version} <= 1315 -# We need the ssl module, which is delivers by python and not python-base +# We need the ssl module, which is provided by python and not python-base BuildRequires: python %endif # SECTION Testing requirements BuildRequires: %{python_module mock >= 1.3.0} +BuildRequires: %{python_module nose} BuildRequires: %{python_module pluggy >= 0.7} BuildRequires: %{python_module py >= 1.5.0} -BuildRequires: %{python_module pytest >= 4.6} BuildRequires: %{python_module requests} BuildRequires: %{python_module six} # /SECTION @@ -67,7 +64,6 @@ A low-level interface to a growing number of Amazon Web Services. %prep %setup -q -n botocore-%{version} -%autopatch -p1 # remove bundled cacert.pem rm botocore/cacert.pem @@ -86,7 +82,7 @@ sed -i 's/botocore\.vendored\.//' botocore/*.py tests/functional/*.py tests/inte %python_expand %fdupes %{buildroot}%{$python_sitelib} %check -%pytest tests/unit +%python_expand nosetests-%{$python_bin_suffix} -v tests/unit %files %{python_files} %doc README.rst diff --git a/remove_nose.patch b/remove_nose.patch deleted file mode 100644 index 4cdeb1a..0000000 --- a/remove_nose.patch +++ /dev/null @@ -1,6409 +0,0 @@ -diff -Nru botocore-1.18.15.orig/requirements.txt botocore-1.18.15/requirements.txt ---- botocore-1.18.15.orig/requirements.txt 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/requirements.txt 2020-10-09 10:18:32.608259889 +0200 -@@ -1,6 +1,7 @@ - tox>=2.5.0,<3.0.0 --nose==1.3.7 -+pytest>=4.6 -+pluggy>=0.7 -+py>=1.5.0 -+pytest-cov - mock==1.3.0 - wheel==0.24.0 --behave==1.2.5 --jsonschema==2.5.1 -diff -Nru botocore-1.18.15.orig/setup.cfg botocore-1.18.15/setup.cfg ---- botocore-1.18.15.orig/setup.cfg 2020-10-08 20:10:09.000000000 +0200 -+++ botocore-1.18.15/setup.cfg 2020-10-09 10:13:49.504471764 +0200 -@@ -12,3 +12,5 @@ - tag_build = - tag_date = 0 - -+[tool:pytest] -+markers = slow: marks tests as slow -\ No newline at end of file -diff -Nru botocore-1.18.15.orig/setup.cfg.orig botocore-1.18.15/setup.cfg.orig ---- botocore-1.18.15.orig/setup.cfg.orig 1970-01-01 01:00:00.000000000 +0100 -+++ botocore-1.18.15/setup.cfg.orig 2020-10-08 20:10:09.000000000 +0200 -@@ -0,0 +1,14 @@ -+[bdist_wheel] -+universal = 1 -+ -+[metadata] -+requires-dist = -+ python-dateutil>=2.1,<3.0.0 -+ jmespath>=0.7.1,<1.0.0 -+ urllib3>=1.20,<1.25.8; python_version=='3.4' -+ urllib3>=1.20,<1.26; python_version!='3.4' -+ -+[egg_info] -+tag_build = -+tag_date = 0 -+ -diff -Nru botocore-1.18.15.orig/tests/acceptance/features/steps/base.py botocore-1.18.15/tests/acceptance/features/steps/base.py ---- botocore-1.18.15.orig/tests/acceptance/features/steps/base.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/acceptance/features/steps/base.py 2020-10-09 10:13:49.504471764 +0200 -@@ -4,7 +4,6 @@ - from botocore.exceptions import ClientError - - from behave import when, then --from nose.tools import assert_equal - - - def _params_from_table(table): -@@ -72,7 +71,7 @@ - - @then(u'I expect the response error code to be "{}"') - def then_expected_error(context, code): -- assert_equal(context.error_response.response['Error']['Code'], code) -+ assert context.error_response.response['Error']['Code'] == code - - - @then(u'the value at "{}" should be a list') -diff -Nru botocore-1.18.15.orig/tests/functional/csm/test_monitoring.py botocore-1.18.15/tests/functional/csm/test_monitoring.py ---- botocore-1.18.15.orig/tests/functional/csm/test_monitoring.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/csm/test_monitoring.py 2020-10-09 10:13:49.504471764 +0200 -@@ -18,8 +18,7 @@ - import socket - import threading - --import mock --from nose.tools import assert_equal -+from tests import mock - - from tests import temporary_file - from tests import ClientHTTPStubber -@@ -50,7 +49,7 @@ - def test_client_monitoring(): - test_cases = _load_test_cases() - for case in test_cases: -- yield _run_test_case, case -+ _run_test_case(case) - - - def _load_test_cases(): -@@ -121,8 +120,7 @@ - case['configuration'], listener.port) as session: - for api_call in case['apiCalls']: - _make_api_call(session, api_call) -- assert_equal( -- listener.received_events, case['expectedMonitoringEvents']) -+ assert listener.received_events == case['expectedMonitoringEvents'] - - - def _make_api_call(session, api_call): -diff -Nru botocore-1.18.15.orig/tests/functional/docs/test_shared_example_config.py botocore-1.18.15/tests/functional/docs/test_shared_example_config.py ---- botocore-1.18.15.orig/tests/functional/docs/test_shared_example_config.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/docs/test_shared_example_config.py 2020-10-09 10:13:49.544472317 +0200 -@@ -27,7 +27,7 @@ - examples = example_config.get("examples", {}) - for operation, operation_examples in examples.items(): - for example in operation_examples: -- yield _lint_single_example, operation, example, service_model -+ _lint_single_example(operation, example, service_model) - - - def _lint_single_example(operation_name, example_config, service_model): -diff -Nru botocore-1.18.15.orig/tests/functional/test_alias.py botocore-1.18.15/tests/functional/test_alias.py ---- botocore-1.18.15.orig/tests/functional/test_alias.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_alias.py 2020-10-09 10:13:49.544472317 +0200 -@@ -49,13 +49,13 @@ - def test_can_use_alias(): - session = botocore.session.get_session() - for case in ALIAS_CASES: -- yield _can_use_parameter_in_client_call, session, case -+ _can_use_parameter_in_client_call(session, case) - - - def test_can_use_original_name(): - session = botocore.session.get_session() - for case in ALIAS_CASES: -- yield _can_use_parameter_in_client_call, session, case, False -+ _can_use_parameter_in_client_call(session, case, False) - - - def _can_use_parameter_in_client_call(session, case, use_alias=True): -diff -Nru botocore-1.18.15.orig/tests/functional/test_apigateway.py botocore-1.18.15/tests/functional/test_apigateway.py ---- botocore-1.18.15.orig/tests/functional/test_apigateway.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_apigateway.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - - from tests import BaseSessionTest, ClientHTTPStubber - -diff -Nru botocore-1.18.15.orig/tests/functional/test_client_class_names.py botocore-1.18.15/tests/functional/test_client_class_names.py ---- botocore-1.18.15.orig/tests/functional/test_client_class_names.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_client_class_names.py 2020-10-09 10:13:49.504471764 +0200 -@@ -10,11 +10,9 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --from nose.tools import assert_equal -- -+from tests import unittest - import botocore.session - -- - REGION = 'us-east-1' - - SERVICE_TO_CLASS_NAME = { -@@ -69,13 +67,10 @@ - } - - --def test_client_has_correct_class_name(): -- session = botocore.session.get_session() -- for service_name in SERVICE_TO_CLASS_NAME: -- client = session.create_client(service_name, REGION) -- yield (_assert_class_name_matches_ref_class_name, client, -- SERVICE_TO_CLASS_NAME[service_name]) -- -- --def _assert_class_name_matches_ref_class_name(client, ref_class_name): -- assert_equal(client.__class__.__name__, ref_class_name) -+class TestClientClassNames(unittest.TestCase): -+ def test_client_has_correct_class_name(self): -+ session = botocore.session.get_session() -+ for service_name in SERVICE_TO_CLASS_NAME: -+ client = session.create_client(service_name, REGION) -+ self.assertEqual(client.__class__.__name__, -+ SERVICE_TO_CLASS_NAME[service_name]) -diff -Nru botocore-1.18.15.orig/tests/functional/test_cloudsearchdomain.py botocore-1.18.15/tests/functional/test_cloudsearchdomain.py ---- botocore-1.18.15.orig/tests/functional/test_cloudsearchdomain.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_cloudsearchdomain.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - - from tests import BaseSessionTest, ClientHTTPStubber - -diff -Nru botocore-1.18.15.orig/tests/functional/test_cognito_idp.py botocore-1.18.15/tests/functional/test_cognito_idp.py ---- botocore-1.18.15.orig/tests/functional/test_cognito_idp.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_cognito_idp.py 2020-10-09 10:13:49.504471764 +0200 -@@ -10,9 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -- --from nose.tools import assert_false -+from tests import mock - - from tests import create_session, ClientHTTPStubber - -@@ -95,8 +93,7 @@ - client = session.create_client('cognito-idp', 'us-west-2') - - for operation, params in operation_params.items(): -- test_case = UnsignedOperationTestCase(client, operation, params) -- yield test_case.run -+ UnsignedOperationTestCase(client, operation, params).run() - - - class UnsignedOperationTestCase(object): -@@ -114,7 +111,5 @@ - operation(**self._parameters) - request = self._http_stubber.requests[0] - -- assert_false( -- 'authorization' in request.headers, -+ assert 'authorization' not in request.headers, \ - 'authorization header found in unsigned operation' -- ) -diff -Nru botocore-1.18.15.orig/tests/functional/test_credentials.py botocore-1.18.15/tests/functional/test_credentials.py ---- botocore-1.18.15.orig/tests/functional/test_credentials.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_credentials.py 2020-10-09 10:13:49.528472096 +0200 -@@ -15,7 +15,7 @@ - import os - import math - import time --import mock -+from tests import mock - import tempfile - import shutil - from datetime import datetime, timedelta -@@ -41,7 +41,7 @@ - from botocore.exceptions import InvalidConfigError, InfiniteLoopConfigError - from botocore.stub import Stubber - from botocore.utils import datetime2timestamp -- -+from botocore.compat import six - - class TestCredentialRefreshRaces(unittest.TestCase): - def assert_consistent_credentials_seen(self, creds, func): -@@ -826,7 +826,7 @@ - # Finally `(?s)` at the beginning makes dots match newlines so - # we can handle a multi-line string. - reg = r"(?s)^((?!b').)*$" -- with self.assertRaisesRegexp(CredentialRetrievalError, reg): -+ with six.assertRaisesRegex(self, CredentialRetrievalError, reg): - session.get_credentials() - - -diff -Nru botocore-1.18.15.orig/tests/functional/test_docdb.py botocore-1.18.15/tests/functional/test_docdb.py ---- botocore-1.18.15.orig/tests/functional/test_docdb.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_docdb.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - from contextlib import contextmanager - - import botocore.session -diff -Nru botocore-1.18.15.orig/tests/functional/test_ec2.py botocore-1.18.15/tests/functional/test_ec2.py ---- botocore-1.18.15.orig/tests/functional/test_ec2.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_ec2.py 2020-10-09 10:13:49.548472372 +0200 -@@ -11,7 +11,7 @@ - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. - import datetime --import mock -+from tests import mock - - from tests import unittest, ClientHTTPStubber, BaseSessionTest - from botocore.compat import parse_qs, urlparse -diff -Nru botocore-1.18.15.orig/tests/functional/test_endpoints.py botocore-1.18.15/tests/functional/test_endpoints.py ---- botocore-1.18.15.orig/tests/functional/test_endpoints.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_endpoints.py 2020-10-09 10:13:49.504471764 +0200 -@@ -10,7 +10,6 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --from nose.tools import assert_equal - from botocore.session import get_session - - -@@ -134,9 +133,7 @@ - # prefix. - endpoint_prefix = ENDPOINT_PREFIX_OVERRIDE.get(endpoint_prefix, - endpoint_prefix) -- yield (_assert_known_endpoint_prefix, -- endpoint_prefix, -- known_endpoint_prefixes) -+ _assert_known_endpoint_prefix(endpoint_prefix, known_endpoint_prefixes) - - - def _assert_known_endpoint_prefix(endpoint_prefix, known_endpoint_prefixes): -@@ -156,7 +153,7 @@ - services = loader.list_available_services('service-2') - - for service in services: -- yield _assert_service_name_matches_endpoint_prefix, session, service -+ _assert_service_name_matches_endpoint_prefix(session, service) - - - def _assert_service_name_matches_endpoint_prefix(session, service_name): -@@ -166,8 +163,6 @@ - # Handle known exceptions where we have renamed the service directory - # for one reason or another. - actual_service_name = SERVICE_RENAMES.get(service_name, service_name) -- assert_equal( -- computed_name, actual_service_name, -- "Actual service name `%s` does not match expected service name " -- "we computed: `%s`" % ( -- actual_service_name, computed_name)) -+ assert computed_name == actual_service_name, \ -+ ("Actual service name `%s` does not match expected service name " + -+ "we computed: `%s`") % (actual_service_name, computed_name) -diff -Nru botocore-1.18.15.orig/tests/functional/test_event_alias.py botocore-1.18.15/tests/functional/test_event_alias.py ---- botocore-1.18.15.orig/tests/functional/test_event_alias.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_event_alias.py 2020-10-09 10:13:49.544472317 +0200 -@@ -584,8 +584,8 @@ - service_id = SERVICES[client_name]['service_id'] - if endpoint_prefix is not None: - yield _assert_handler_called, client_name, endpoint_prefix -- yield _assert_handler_called, client_name, service_id -- yield _assert_handler_called, client_name, client_name -+ _assert_handler_called(client_name, service_id) -+ _assert_handler_called(client_name, client_name) - - - def _assert_handler_called(client_name, event_part): -diff -Nru botocore-1.18.15.orig/tests/functional/test_h2_required.py botocore-1.18.15/tests/functional/test_h2_required.py ---- botocore-1.18.15.orig/tests/functional/test_h2_required.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_h2_required.py 2020-10-09 10:13:49.544472317 +0200 -@@ -29,12 +29,12 @@ - service_model = session.get_service_model(service) - h2_config = service_model.metadata.get('protocolSettings', {}).get('h2') - if h2_config == 'required': -- yield _assert_h2_service_is_known, service -+ _assert_h2_service_is_known(service) - elif h2_config == 'eventstream': - for operation in service_model.operation_names: - operation_model = service_model.operation_model(operation) - if operation_model.has_event_stream_output: -- yield _assert_h2_operation_is_known, service, operation -+ _assert_h2_operation_is_known(service, operation) - - - def _assert_h2_service_is_known(service): -diff -Nru botocore-1.18.15.orig/tests/functional/test_history.py botocore-1.18.15/tests/functional/test_history.py ---- botocore-1.18.15.orig/tests/functional/test_history.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_history.py 2020-10-09 10:13:49.528472096 +0200 -@@ -1,6 +1,6 @@ - from contextlib import contextmanager - --import mock -+from tests import mock - - from tests import BaseSessionTest, ClientHTTPStubber - from botocore.history import BaseHistoryHandler -@@ -87,10 +87,10 @@ - self.assertIsNone(body) - - streaming = payload['streaming'] -- self.assertEquals(streaming, False) -+ self.assertEqual(streaming, False) - - url = payload['url'] -- self.assertEquals(url, 'https://s3.us-west-2.amazonaws.com/') -+ self.assertEqual(url, 'https://s3.us-west-2.amazonaws.com/') - - self.assertEqual(source, 'BOTOCORE') - -diff -Nru botocore-1.18.15.orig/tests/functional/test_lex.py botocore-1.18.15/tests/functional/test_lex.py ---- botocore-1.18.15.orig/tests/functional/test_lex.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_lex.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - from datetime import datetime - - from tests import BaseSessionTest, ClientHTTPStubber -diff -Nru botocore-1.18.15.orig/tests/functional/test_machinelearning.py botocore-1.18.15/tests/functional/test_machinelearning.py ---- botocore-1.18.15.orig/tests/functional/test_machinelearning.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_machinelearning.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - - from tests import BaseSessionTest, ClientHTTPStubber - -diff -Nru botocore-1.18.15.orig/tests/functional/test_model_backcompat.py botocore-1.18.15/tests/functional/test_model_backcompat.py ---- botocore-1.18.15.orig/tests/functional/test_model_backcompat.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_model_backcompat.py 2020-10-09 10:13:49.508471818 +0200 -@@ -12,7 +12,6 @@ - # language governing permissions and limitations under the License. - import os - --from nose.tools import assert_equal - from botocore.session import Session - from tests import ClientHTTPStubber - from tests.functional import TEST_MODELS_DIR -@@ -56,8 +55,7 @@ - 'Content-Type': 'application/x-amz-json-1.1'}, - body=b'{"CertificateSummaryList":[]}') - response = client.list_certificates() -- assert_equal( -- response, -+ assert response == \ - {'CertificateSummaryList': [], - 'ResponseMetadata': { - 'HTTPHeaders': { -@@ -69,8 +67,7 @@ - 'RequestId': 'abcd', - 'RetryAttempts': 0} - } -- ) - - # Also verify we can use the paginators as well. -- assert_equal(client.can_paginate('list_certificates'), True) -- assert_equal(client.waiter_names, ['certificate_validated']) -+ assert client.can_paginate('list_certificates') -+ assert client.waiter_names == ['certificate_validated'] -diff -Nru botocore-1.18.15.orig/tests/functional/test_model_completeness.py botocore-1.18.15/tests/functional/test_model_completeness.py ---- botocore-1.18.15.orig/tests/functional/test_model_completeness.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_model_completeness.py 2020-10-09 10:13:49.544472317 +0200 -@@ -38,5 +38,6 @@ - versions = Loader().list_api_versions(service_name, 'service-2') - if len(versions) > 1: - for type_name in ['paginators-1', 'waiters-2']: -- yield (_test_model_is_not_lost, service_name, -- type_name, versions[-2], versions[-1]) -+ _test_model_is_not_lost(service_name, -+ type_name, -+ versions[-2], versions[-1]) -diff -Nru botocore-1.18.15.orig/tests/functional/test_neptune.py botocore-1.18.15/tests/functional/test_neptune.py ---- botocore-1.18.15.orig/tests/functional/test_neptune.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_neptune.py 2020-10-09 10:13:49.548472372 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - from contextlib import contextmanager - - import botocore.session -diff -Nru botocore-1.18.15.orig/tests/functional/test_paginate.py botocore-1.18.15/tests/functional/test_paginate.py ---- botocore-1.18.15.orig/tests/functional/test_paginate.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_paginate.py 2020-10-09 10:13:49.508471818 +0200 -@@ -14,9 +14,7 @@ - from math import ceil - from datetime import datetime - --from nose.tools import assert_equal -- --from tests import random_chars -+from tests import random_chars, unittest - from tests import BaseSessionTest - from botocore.stub import Stubber, StubAssertionError - from botocore.paginate import TokenDecoder, TokenEncoder -@@ -79,7 +77,7 @@ - self.stubber.activate() - - def _setup_scaling_pagination(self, page_size=200, max_items=100, -- total_items=600): -+ total_items=600): - """ - Add to the stubber to test paginating describe_scaling_activities. - -@@ -217,22 +215,22 @@ - self.assertEqual(len(result['events']), 1) - - --def test_token_encoding(): -- cases = [ -- {'foo': 'bar'}, -- {'foo': b'bar'}, -- {'foo': {'bar': b'baz'}}, -- {'foo': ['bar', b'baz']}, -- {'foo': b'\xff'}, -- {'foo': {'bar': b'baz', 'bin': [b'bam']}}, -- ] -- -- for token_dict in cases: -- yield assert_token_encodes_and_decodes, token_dict -- -- --def assert_token_encodes_and_decodes(token_dict): -- encoded = TokenEncoder().encode(token_dict) -- assert isinstance(encoded, six.string_types) -- decoded = TokenDecoder().decode(encoded) -- assert_equal(decoded, token_dict) -+class TestTokenEncoding(unittest.TestCase): -+ def test_token_encoding(self): -+ cases = [ -+ {'foo': 'bar'}, -+ {'foo': b'bar'}, -+ {'foo': {'bar': b'baz'}}, -+ {'foo': ['bar', b'baz']}, -+ {'foo': b'\xff'}, -+ {'foo': {'bar': b'baz', 'bin': [b'bam']}}, -+ ] -+ -+ for token_dict in cases: -+ self.assert_token_encodes_and_decodes(token_dict) -+ -+ def assert_token_encodes_and_decodes(self, token_dict): -+ encoded = TokenEncoder().encode(token_dict) -+ assert isinstance(encoded, six.string_types) -+ decoded = TokenDecoder().decode(encoded) -+ self.assertEqual(decoded, token_dict) -diff -Nru botocore-1.18.15.orig/tests/functional/test_paginator_config.py botocore-1.18.15/tests/functional/test_paginator_config.py ---- botocore-1.18.15.orig/tests/functional/test_paginator_config.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_paginator_config.py 2020-10-09 10:13:49.544472317 +0200 -@@ -140,12 +140,7 @@ - 'paginators-1', - service_model.api_version) - for op_name, single_config in page_config['pagination'].items(): -- yield ( -- _lint_single_paginator, -- op_name, -- single_config, -- service_model -- ) -+ _lint_single_paginator(op_name, single_config, service_model) - - - def _lint_single_paginator(operation_name, page_config, -diff -Nru botocore-1.18.15.orig/tests/functional/test_public_apis.py botocore-1.18.15/tests/functional/test_public_apis.py ---- botocore-1.18.15.orig/tests/functional/test_public_apis.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_public_apis.py 2020-10-09 10:13:49.544472317 +0200 -@@ -12,7 +12,7 @@ - # language governing permissions and limitations under the License. - from collections import defaultdict - --import mock -+from tests import mock - - from tests import ClientHTTPStubber - from botocore.session import Session -@@ -73,4 +73,4 @@ - for operation_name in PUBLIC_API_TESTS[service_name]: - kwargs = PUBLIC_API_TESTS[service_name][operation_name] - method = getattr(client, xform_name(operation_name)) -- yield _test_public_apis_will_not_be_signed, client, method, kwargs -+ _test_public_apis_will_not_be_signed(client, method, kwargs) -diff -Nru botocore-1.18.15.orig/tests/functional/test_rds.py botocore-1.18.15/tests/functional/test_rds.py ---- botocore-1.18.15.orig/tests/functional/test_rds.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_rds.py 2020-10-09 10:13:49.552472426 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - from contextlib import contextmanager - - import botocore.session -diff -Nru botocore-1.18.15.orig/tests/functional/test_regions.py botocore-1.18.15/tests/functional/test_regions.py ---- botocore-1.18.15.orig/tests/functional/test_regions.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_regions.py 2020-10-09 10:13:49.508471818 +0200 -@@ -10,10 +10,9 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --from tests import create_session -+from tests import create_session, unittest - --import mock --from nose.tools import assert_equal, assert_raises -+from tests import mock - - from botocore.client import ClientEndpointBridge - from botocore.exceptions import NoRegionError -@@ -448,64 +447,62 @@ - return session - - --def test_known_endpoints(): -- # Verify the actual values from the partition files. While -- # TestEndpointHeuristics verified the generic functionality given any -- # endpoints file, this test actually verifies the partition data against a -- # fixed list of known endpoints. This list doesn't need to be kept 100% up -- # to date, but serves as a basis for regressions as the endpoint data -- # logic evolves. -- resolver = _get_patched_session()._get_internal_component( -- 'endpoint_resolver') -- for region_name, service_dict in KNOWN_REGIONS.items(): -- for service_name, endpoint in service_dict.items(): -- yield (_test_single_service_region, service_name, -- region_name, endpoint, resolver) -- -- --def _test_single_service_region(service_name, region_name, -- expected_endpoint, resolver): -- bridge = ClientEndpointBridge(resolver, None, None) -- result = bridge.resolve(service_name, region_name) -- expected = 'https://%s' % expected_endpoint -- assert_equal(result['endpoint_url'], expected) -- -- --# Ensure that all S3 regions use s3v4 instead of v4 --def test_all_s3_endpoints_have_s3v4(): -- session = _get_patched_session() -- partitions = session.get_available_partitions() -- resolver = session._get_internal_component('endpoint_resolver') -- for partition_name in partitions: -- for endpoint in session.get_available_regions('s3', partition_name): -- resolved = resolver.construct_endpoint('s3', endpoint) -- assert 's3v4' in resolved['signatureVersions'] -- assert 'v4' not in resolved['signatureVersions'] -- -- --def test_known_endpoints(): -- resolver = _get_patched_session()._get_internal_component( -- 'endpoint_resolver') -- for service_name, endpoint in KNOWN_AWS_PARTITION_WIDE.items(): -- yield (_test_single_service_partition_endpoint, service_name, -- endpoint, resolver) -- -- --def _test_single_service_partition_endpoint(service_name, expected_endpoint, -- resolver): -- bridge = ClientEndpointBridge(resolver) -- result = bridge.resolve(service_name) -- assert_equal(result['endpoint_url'], expected_endpoint) -- -- --def test_non_partition_endpoint_requires_region(): -- resolver = _get_patched_session()._get_internal_component( -- 'endpoint_resolver') -- assert_raises(NoRegionError, resolver.construct_endpoint, 'ec2') -+class TestRegions(unittest.TestCase): -+ def test_known_endpoints(self): -+ # Verify the actual values from the partition files. While -+ # TestEndpointHeuristics verified the generic functionality given -+ # any endpoints file, this test actually verifies the partition -+ # data against a fixed list of known endpoints. This list doesn't -+ # need to be kept 100% up to date, but serves as a basis for -+ # regressions as the endpoint data logic evolves. -+ resolver = _get_patched_session()._get_internal_component( -+ 'endpoint_resolver') -+ for region_name, service_dict in KNOWN_REGIONS.items(): -+ for service_name, endpoint in service_dict.items(): -+ self._test_single_service_region(service_name, -+ region_name, endpoint, -+ resolver) -+ -+ def _test_single_service_region(self, service_name, region_name, -+ expected_endpoint, resolver): -+ bridge = ClientEndpointBridge(resolver, None, None) -+ result = bridge.resolve(service_name, region_name) -+ expected = 'https://%s' % expected_endpoint -+ self.assertEqual(result['endpoint_url'], expected) -+ -+ # Ensure that all S3 regions use s3v4 instead of v4 -+ def test_all_s3_endpoints_have_s3v4(self): -+ session = _get_patched_session() -+ partitions = session.get_available_partitions() -+ resolver = session._get_internal_component('endpoint_resolver') -+ for partition_name in partitions: -+ for endpoint in session.get_available_regions('s3', partition_name): -+ resolved = resolver.construct_endpoint('s3', endpoint) -+ assert 's3v4' in resolved['signatureVersions'] -+ assert 'v4' not in resolved['signatureVersions'] -+ -+ def _test_single_service_partition_endpoint(self, service_name, -+ expected_endpoint, -+ resolver): -+ bridge = ClientEndpointBridge(resolver) -+ result = bridge.resolve(service_name) -+ assert result['endpoint_url'] == expected_endpoint -+ -+ def test_known_endpoints_other(self): -+ resolver = _get_patched_session()._get_internal_component( -+ 'endpoint_resolver') -+ for service_name, endpoint in KNOWN_AWS_PARTITION_WIDE.items(): -+ self._test_single_service_partition_endpoint(service_name, -+ endpoint, resolver) -+ -+ def test_non_partition_endpoint_requires_region(self): -+ resolver = _get_patched_session()._get_internal_component( -+ 'endpoint_resolver') -+ with self.assertRaises(NoRegionError): -+ resolver.construct_endpoint('ec2') - - - class TestEndpointResolution(BaseSessionTest): -- - def setUp(self): - super(TestEndpointResolution, self).setUp() - self.xml_response = ( -@@ -526,7 +523,7 @@ - client, stubber = self.create_stubbed_client('s3', 'us-east-2') - stubber.add_response() - client.list_buckets() -- self.assertEquals( -+ self.assertEqual( - stubber.requests[0].url, - 'https://s3.us-east-2.amazonaws.com/' - ) -@@ -537,7 +534,7 @@ - client.list_buckets() - # Validate we don't fall back to partition endpoint for - # regionalized services. -- self.assertEquals( -+ self.assertEqual( - stubber.requests[0].url, - 'https://s3.not-real.amazonaws.com/' - ) -diff -Nru botocore-1.18.15.orig/tests/functional/test_response_shadowing.py botocore-1.18.15/tests/functional/test_response_shadowing.py ---- botocore-1.18.15.orig/tests/functional/test_response_shadowing.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_response_shadowing.py 2020-10-09 10:13:49.508471818 +0200 -@@ -11,7 +11,6 @@ - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. - from botocore.session import Session --from nose.tools import assert_false - - - def _all_services(): -@@ -33,17 +32,17 @@ - msg = ( - 'Found shape "%s" that shadows the botocore response key "%s"' - ) -- assert_false(key in shape.members, msg % (shape.name, key)) -+ assert key not in shape.members, msg % (shape.name, key) - - - def test_response_metadata_is_not_shadowed(): - for operation_model in _all_operations(): - shape = operation_model.output_shape -- yield _assert_not_shadowed, 'ResponseMetadata', shape -+ _assert_not_shadowed('ResponseMetadata', shape) - - - def test_exceptions_do_not_shadow(): - for service_model in _all_services(): - for shape in service_model.error_shapes: -- yield _assert_not_shadowed, 'ResponseMetadata', shape -- yield _assert_not_shadowed, 'Error', shape -+ _assert_not_shadowed('ResponseMetadata', shape) -+ _assert_not_shadowed('Error', shape) -diff -Nru botocore-1.18.15.orig/tests/functional/test_retry.py botocore-1.18.15/tests/functional/test_retry.py ---- botocore-1.18.15.orig/tests/functional/test_retry.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_retry.py 2020-10-09 10:13:49.528472096 +0200 -@@ -16,6 +16,7 @@ - - from botocore.exceptions import ClientError - from botocore.config import Config -+from botocore.compat import six - - - class BaseRetryTest(BaseSessionTest): -@@ -38,7 +39,7 @@ - with ClientHTTPStubber(client) as http_stubber: - for _ in range(num_responses): - http_stubber.add_response(status=status, body=body) -- with self.assertRaisesRegexp( -+ with six.assertRaisesRegex(self, - ClientError, 'reached max retries: %s' % num_retries): - yield - self.assertEqual(len(http_stubber.requests), num_responses) -diff -Nru botocore-1.18.15.orig/tests/functional/test_s3.py botocore-1.18.15/tests/functional/test_s3.py ---- botocore-1.18.15.orig/tests/functional/test_s3.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_s3.py 2020-10-09 11:13:26.998182410 +0200 -@@ -14,7 +14,6 @@ - - from tests import temporary_file - from tests import unittest, mock, BaseSessionTest, create_session, ClientHTTPStubber --from nose.tools import assert_equal - - import botocore.session - from botocore.config import Config -@@ -447,8 +446,8 @@ - ) - - # Validate we retried and got second body -- self.assertEquals(len(self.http_stubber.requests), 2) -- self.assertEquals(response['ResponseMetadata']['HTTPStatusCode'], 200) -+ self.assertEqual(len(self.http_stubber.requests), 2) -+ self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200) - self.assertTrue('CopyObjectResult' in response) - - def test_s3_copy_object_with_incomplete_response(self): -@@ -1284,48 +1283,49 @@ - 'get_object', {'Bucket': 'mybucket', 'Key': 'mykey'}) - self.assert_is_v2_presigned_url(url) - -+ - def test_checksums_included_in_expected_operations(): - """Validate expected calls include Content-MD5 header""" - - t = S3ChecksumCases(_verify_checksum_in_headers) -- yield t.case('put_bucket_tagging', -- {"Bucket": "foo", "Tagging":{"TagSet":[]}}) -- yield t.case('put_bucket_lifecycle', -- {"Bucket": "foo", "LifecycleConfiguration":{"Rules":[]}}) -- yield t.case('put_bucket_lifecycle_configuration', -- {"Bucket": "foo", "LifecycleConfiguration":{"Rules":[]}}) -- yield t.case('put_bucket_cors', -- {"Bucket": "foo", "CORSConfiguration":{"CORSRules": []}}) -- yield t.case('delete_objects', -- {"Bucket": "foo", "Delete": {"Objects": [{"Key": "bar"}]}}) -- yield t.case('put_bucket_replication', -- {"Bucket": "foo", -- "ReplicationConfiguration": {"Role":"", "Rules": []}}) -- yield t.case('put_bucket_acl', -- {"Bucket": "foo", "AccessControlPolicy":{}}) -- yield t.case('put_bucket_logging', -- {"Bucket": "foo", -- "BucketLoggingStatus":{}}) -- yield t.case('put_bucket_notification', -- {"Bucket": "foo", "NotificationConfiguration":{}}) -- yield t.case('put_bucket_policy', -- {"Bucket": "foo", "Policy": ""}) -- yield t.case('put_bucket_request_payment', -- {"Bucket": "foo", "RequestPaymentConfiguration":{"Payer": ""}}) -- yield t.case('put_bucket_versioning', -- {"Bucket": "foo", "VersioningConfiguration":{}}) -- yield t.case('put_bucket_website', -- {"Bucket": "foo", -- "WebsiteConfiguration":{}}) -- yield t.case('put_object_acl', -- {"Bucket": "foo", "Key": "bar", "AccessControlPolicy":{}}) -- yield t.case('put_object_legal_hold', -- {"Bucket": "foo", "Key": "bar", "LegalHold":{"Status": "ON"}}) -- yield t.case('put_object_retention', -- {"Bucket": "foo", "Key": "bar", -- "Retention":{"RetainUntilDate":"2020-11-05"}}) -- yield t.case('put_object_lock_configuration', -- {"Bucket": "foo", "ObjectLockConfiguration":{}}) -+ t.case('put_bucket_tagging', -+ {"Bucket": "foo", "Tagging": {"TagSet": []}}) -+ t.case('put_bucket_lifecycle', -+ {"Bucket": "foo", "LifecycleConfiguration": {"Rules": []}}) -+ t.case('put_bucket_lifecycle_configuration', -+ {"Bucket": "foo", "LifecycleConfiguration": {"Rules": []}}) -+ t.case('put_bucket_cors', -+ {"Bucket": "foo", "CORSConfiguration": {"CORSRules": []}}) -+ t.case('delete_objects', -+ {"Bucket": "foo", "Delete": {"Objects": [{"Key": "bar"}]}}) -+ t.case('put_bucket_replication', -+ {"Bucket": "foo", -+ "ReplicationConfiguration": {"Role": "", "Rules": []}}) -+ t.case('put_bucket_acl', -+ {"Bucket": "foo", "AccessControlPolicy": {}}) -+ t.case('put_bucket_logging', -+ {"Bucket": "foo", -+ "BucketLoggingStatus": {}}) -+ t.case('put_bucket_notification', -+ {"Bucket": "foo", "NotificationConfiguration": {}}) -+ t.case('put_bucket_policy', -+ {"Bucket": "foo", "Policy": ""}) -+ t.case('put_bucket_request_payment', -+ {"Bucket": "foo", "RequestPaymentConfiguration": {"Payer": ""}}) -+ t.case('put_bucket_versioning', -+ {"Bucket": "foo", "VersioningConfiguration": {}}) -+ t.case('put_bucket_website', -+ {"Bucket": "foo", -+ "WebsiteConfiguration": {}}) -+ t.case('put_object_acl', -+ {"Bucket": "foo", "Key": "bar", "AccessControlPolicy": {}}) -+ t.case('put_object_legal_hold', -+ {"Bucket": "foo", "Key": "bar", "LegalHold": {"Status": "ON"}}) -+ t.case('put_object_retention', -+ {"Bucket": "foo", "Key": "bar", -+ "Retention": {"RetainUntilDate": "2020-11-05"}}) -+ t.case('put_object_lock_configuration', -+ {"Bucket": "foo", "ObjectLockConfiguration": {}}) - - - def _verify_checksum_in_headers(operation, operation_kwargs): -@@ -1350,36 +1350,36 @@ - t = S3AddressingCases(_verify_expected_endpoint_url) - - # The default behavior for sigv2. DNS compatible buckets -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.us-west-2.amazonaws.com/key') -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-west-1', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.us-west-1.amazonaws.com/key') -- yield t.case(region='us-west-1', bucket='bucket', key='key', -- signature_version='s3', is_secure=False, -- expected_url='http://bucket.s3.us-west-1.amazonaws.com/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.us-west-2.amazonaws.com/key') -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-1', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.us-west-1.amazonaws.com/key') -+ t.case(region='us-west-1', bucket='bucket', key='key', -+ signature_version='s3', is_secure=False, -+ expected_url='http://bucket.s3.us-west-1.amazonaws.com/key') - - # Virtual host addressing is independent of signature version. -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url=( -- 'https://bucket.s3.us-west-2.amazonaws.com/key')) -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-west-1', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url=( -- 'https://bucket.s3.us-west-1.amazonaws.com/key')) -- yield t.case(region='us-west-1', bucket='bucket', key='key', -- signature_version='s3v4', is_secure=False, -- expected_url=( -- 'http://bucket.s3.us-west-1.amazonaws.com/key')) -- yield t.case( -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url=( -+ 'https://bucket.s3.us-west-2.amazonaws.com/key')) -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-1', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url=( -+ 'https://bucket.s3.us-west-1.amazonaws.com/key')) -+ t.case(region='us-west-1', bucket='bucket', key='key', -+ signature_version='s3v4', is_secure=False, -+ expected_url=( -+ 'http://bucket.s3.us-west-1.amazonaws.com/key')) -+ t.case( - region='us-west-1', bucket='bucket-with-num-1', key='key', - signature_version='s3v4', is_secure=False, - expected_url='http://bucket-with-num-1.s3.us-west-1.amazonaws.com/key') -@@ -1387,121 +1387,121 @@ - # Regions outside of the 'aws' partition. - # These should still default to virtual hosted addressing - # unless explicitly configured otherwise. -- yield t.case(region='cn-north-1', bucket='bucket', key='key', -+ t.case(region='cn-north-1', bucket='bucket', key='key', - signature_version='s3v4', - expected_url=( - 'https://bucket.s3.cn-north-1.amazonaws.com.cn/key')) - # This isn't actually supported because cn-north-1 is sigv4 only, - # but we'll still double check that our internal logic is correct - # when building the expected url. -- yield t.case(region='cn-north-1', bucket='bucket', key='key', -+ t.case(region='cn-north-1', bucket='bucket', key='key', - signature_version='s3', - expected_url=( - 'https://bucket.s3.cn-north-1.amazonaws.com.cn/key')) - # If the request is unsigned, we should have the default - # fix_s3_host behavior which is to use virtual hosting where - # possible but fall back to path style when needed. -- yield t.case(region='cn-north-1', bucket='bucket', key='key', -+ t.case(region='cn-north-1', bucket='bucket', key='key', - signature_version=UNSIGNED, - expected_url=( - 'https://bucket.s3.cn-north-1.amazonaws.com.cn/key')) -- yield t.case(region='cn-north-1', bucket='bucket.dot', key='key', -+ t.case(region='cn-north-1', bucket='bucket.dot', key='key', - signature_version=UNSIGNED, - expected_url=( - 'https://s3.cn-north-1.amazonaws.com.cn/bucket.dot/key')) - - # And of course you can explicitly specify which style to use. - virtual_hosting = {'addressing_style': 'virtual'} -- yield t.case(region='cn-north-1', bucket='bucket', key='key', -+ t.case(region='cn-north-1', bucket='bucket', key='key', - signature_version=UNSIGNED, - s3_config=virtual_hosting, - expected_url=( - 'https://bucket.s3.cn-north-1.amazonaws.com.cn/key')) - path_style = {'addressing_style': 'path'} -- yield t.case(region='cn-north-1', bucket='bucket', key='key', -+ t.case(region='cn-north-1', bucket='bucket', key='key', - signature_version=UNSIGNED, - s3_config=path_style, - expected_url=( - 'https://s3.cn-north-1.amazonaws.com.cn/bucket/key')) - - # If you don't have a DNS compatible bucket, we use path style. -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket.dot', key='key', - expected_url='https://s3.us-west-2.amazonaws.com/bucket.dot/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket.dot', key='key', - expected_url='https://s3.amazonaws.com/bucket.dot/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='BucketName', key='key', - expected_url='https://s3.amazonaws.com/BucketName/key') -- yield t.case( -+ t.case( - region='us-west-1', bucket='bucket_name', key='key', - expected_url='https://s3.us-west-1.amazonaws.com/bucket_name/key') -- yield t.case( -+ t.case( - region='us-west-1', bucket='-bucket-name', key='key', - expected_url='https://s3.us-west-1.amazonaws.com/-bucket-name/key') -- yield t.case( -+ t.case( - region='us-west-1', bucket='bucket-name-', key='key', - expected_url='https://s3.us-west-1.amazonaws.com/bucket-name-/key') -- yield t.case( -+ t.case( - region='us-west-1', bucket='aa', key='key', - expected_url='https://s3.us-west-1.amazonaws.com/aa/key') -- yield t.case( -+ t.case( - region='us-west-1', bucket='a'*64, key='key', - expected_url=('https://s3.us-west-1.amazonaws.com/%s/key' % ('a' * 64)) - ) - - # Custom endpoint url should always be used. -- yield t.case( -+ t.case( - customer_provided_endpoint='https://my-custom-s3/', - bucket='foo', key='bar', - expected_url='https://my-custom-s3/foo/bar') -- yield t.case( -+ t.case( - customer_provided_endpoint='https://my-custom-s3/', - bucket='bucket.dots', key='bar', - expected_url='https://my-custom-s3/bucket.dots/bar') - # Doesn't matter what region you specify, a custom endpoint url always - # wins. -- yield t.case( -+ t.case( - customer_provided_endpoint='https://my-custom-s3/', - region='us-west-2', bucket='foo', key='bar', - expected_url='https://my-custom-s3/foo/bar') - - # Explicitly configuring "virtual" addressing_style. - virtual_hosting = {'addressing_style': 'virtual'} -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=virtual_hosting, - expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - s3_config=virtual_hosting, - expected_url='https://bucket.s3.us-west-2.amazonaws.com/key') -- yield t.case( -+ t.case( - region='eu-central-1', bucket='bucket', key='key', - s3_config=virtual_hosting, - expected_url='https://bucket.s3.eu-central-1.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=virtual_hosting, - customer_provided_endpoint='https://foo.amazonaws.com', - expected_url='https://bucket.foo.amazonaws.com/key') -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=virtual_hosting, - expected_url='https://bucket.s3.unknown.amazonaws.com/key') - - # Test us-gov with virtual addressing. -- yield t.case( -+ t.case( - region='us-gov-west-1', bucket='bucket', key='key', - s3_config=virtual_hosting, - expected_url='https://bucket.s3.us-gov-west-1.amazonaws.com/key') - -- yield t.case( -+ t.case( - region='us-gov-west-1', bucket='bucket', key='key', - signature_version='s3', - expected_url='https://bucket.s3.us-gov-west-1.amazonaws.com/key') -- yield t.case( -+ t.case( - region='fips-us-gov-west-1', bucket='bucket', key='key', - signature_version='s3', - expected_url='https://bucket.s3-fips.us-gov-west-1.amazonaws.com/key') -@@ -1509,67 +1509,67 @@ - - # Test path style addressing. - path_style = {'addressing_style': 'path'} -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=path_style, - expected_url='https://s3.amazonaws.com/bucket/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=path_style, - customer_provided_endpoint='https://foo.amazonaws.com/', - expected_url='https://foo.amazonaws.com/bucket/key') -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=path_style, - expected_url='https://s3.unknown.amazonaws.com/bucket/key') - - # S3 accelerate - use_accelerate = {'use_accelerate_endpoint': True} -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_accelerate, - expected_url='https://bucket.s3-accelerate.amazonaws.com/key') -- yield t.case( -+ t.case( - # region is ignored with S3 accelerate. - region='us-west-2', bucket='bucket', key='key', - s3_config=use_accelerate, - expected_url='https://bucket.s3-accelerate.amazonaws.com/key') - # Provided endpoints still get recognized as accelerate endpoints. -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - customer_provided_endpoint='https://s3-accelerate.amazonaws.com', - expected_url='https://bucket.s3-accelerate.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - customer_provided_endpoint='http://s3-accelerate.amazonaws.com', - expected_url='http://bucket.s3-accelerate.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_accelerate, is_secure=False, - # Note we're using http:// because is_secure=False. - expected_url='http://bucket.s3-accelerate.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # s3-accelerate must be the first part of the url. - customer_provided_endpoint='https://foo.s3-accelerate.amazonaws.com', - expected_url='https://foo.s3-accelerate.amazonaws.com/bucket/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # The endpoint must be an Amazon endpoint. - customer_provided_endpoint='https://s3-accelerate.notamazon.com', - expected_url='https://s3-accelerate.notamazon.com/bucket/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # Extra components must be whitelisted. - customer_provided_endpoint='https://s3-accelerate.foo.amazonaws.com', - expected_url='https://s3-accelerate.foo.amazonaws.com/bucket/key') -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=use_accelerate, - expected_url='https://bucket.s3-accelerate.amazonaws.com/key') - # Use virtual even if path is specified for s3 accelerate because - # path style will not work with S3 accelerate. -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config={'use_accelerate_endpoint': True, - 'addressing_style': 'path'}, -@@ -1577,17 +1577,17 @@ - - # S3 dual stack endpoints. - use_dualstack = {'use_dualstack_endpoint': True} -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_dualstack, signature_version='s3', - # Still default to virtual hosted when possible on sigv2. - expected_url='https://bucket.s3.dualstack.us-east-1.amazonaws.com/key') -- yield t.case( -+ t.case( - region=None, bucket='bucket', key='key', - s3_config=use_dualstack, - # Uses us-east-1 for no region set. - expected_url='https://bucket.s3.dualstack.us-east-1.amazonaws.com/key') -- yield t.case( -+ t.case( - region='aws-global', bucket='bucket', key='key', - s3_config=use_dualstack, - # Pseudo-regions should not have any special resolving logic even when -@@ -1596,32 +1596,32 @@ - # region name. - expected_url=( - 'https://bucket.s3.dualstack.aws-global.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - s3_config=use_dualstack, signature_version='s3', - # Still default to virtual hosted when possible on sigv2. - expected_url='https://bucket.s3.dualstack.us-west-2.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_dualstack, signature_version='s3v4', - expected_url='https://bucket.s3.dualstack.us-east-1.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - s3_config=use_dualstack, signature_version='s3v4', - expected_url='https://bucket.s3.dualstack.us-west-2.amazonaws.com/key') -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=use_dualstack, signature_version='s3v4', - expected_url='https://bucket.s3.dualstack.unknown.amazonaws.com/key') - # Non DNS compatible buckets use path style for dual stack. -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket.dot', key='key', - s3_config=use_dualstack, - # Still default to virtual hosted when possible. - expected_url=( - 'https://s3.dualstack.us-west-2.amazonaws.com/bucket.dot/key')) - # Supports is_secure (use_ssl=False in create_client()). -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket.dot', key='key', is_secure=False, - s3_config=use_dualstack, - # Still default to virtual hosted when possible. -@@ -1634,7 +1634,7 @@ - 'use_dualstack_endpoint': True, - 'addressing_style': 'path', - } -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - s3_config=force_path_style, - # Still default to virtual hosted when possible. -@@ -1645,32 +1645,32 @@ - 'use_accelerate_endpoint': True, - 'use_dualstack_endpoint': True, - } -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_accelerate_dualstack, - expected_url=( - 'https://bucket.s3-accelerate.dualstack.amazonaws.com/key')) -- yield t.case( -+ t.case( - # Region is ignored with S3 accelerate. - region='us-west-2', bucket='bucket', key='key', - s3_config=use_accelerate_dualstack, - expected_url=( - 'https://bucket.s3-accelerate.dualstack.amazonaws.com/key')) - # Only s3-accelerate overrides a customer endpoint. -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_dualstack, - customer_provided_endpoint='https://s3-accelerate.amazonaws.com', - expected_url=( - 'https://bucket.s3-accelerate.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # Dualstack is whitelisted. - customer_provided_endpoint=( - 'https://s3-accelerate.dualstack.amazonaws.com'), - expected_url=( - 'https://bucket.s3-accelerate.dualstack.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # Even whitelisted parts cannot be duplicated. - customer_provided_endpoint=( -@@ -1678,7 +1678,7 @@ - expected_url=( - 'https://s3-accelerate.dualstack.dualstack' - '.amazonaws.com/bucket/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # More than two extra parts is not allowed. - customer_provided_endpoint=( -@@ -1687,12 +1687,12 @@ - expected_url=( - 'https://s3-accelerate.dualstack.dualstack.dualstack.amazonaws.com' - '/bucket/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - # Extra components must be whitelisted. - customer_provided_endpoint='https://s3-accelerate.foo.amazonaws.com', - expected_url='https://s3-accelerate.foo.amazonaws.com/bucket/key') -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_accelerate_dualstack, is_secure=False, - # Note we're using http:// because is_secure=False. -@@ -1701,7 +1701,7 @@ - # Use virtual even if path is specified for s3 accelerate because - # path style will not work with S3 accelerate. - use_accelerate_dualstack['addressing_style'] = 'path' -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=use_accelerate_dualstack, - expected_url=( -@@ -1711,14 +1711,14 @@ - accesspoint_arn = ( - 'arn:aws:s3:us-west-2:123456789012:accesspoint:myendpoint' - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': True}, - expected_url=( -@@ -1726,21 +1726,21 @@ - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='myendpoint/key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/myendpoint/key' - ) - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='foo/myendpoint/key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/foo/myendpoint/key' - ) - ) -- yield t.case( -+ t.case( - # Note: The access-point arn has us-west-2 and the client's region is - # us-east-1, for the default case the access-point arn region is used. - region='us-east-1', bucket=accesspoint_arn, key='key', -@@ -1749,7 +1749,7 @@ - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-east-1', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1757,14 +1757,14 @@ - 'us-east-1.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='s3-external-1', bucket=accesspoint_arn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='s3-external-1', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1772,14 +1772,14 @@ - 's3-external-1.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='aws-global', bucket=accesspoint_arn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='aws-global', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1787,7 +1787,7 @@ - 'aws-global.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='unknown', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1795,7 +1795,7 @@ - 'unknown.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='unknown', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': True}, - expected_url=( -@@ -1806,21 +1806,21 @@ - accesspoint_arn_cn = ( - 'arn:aws-cn:s3:cn-north-1:123456789012:accesspoint:myendpoint' - ) -- yield t.case( -+ t.case( - region='cn-north-1', bucket=accesspoint_arn_cn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'cn-north-1.amazonaws.com.cn/key' - ) - ) -- yield t.case( -+ t.case( - region='cn-northwest-1', bucket=accesspoint_arn_cn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'cn-north-1.amazonaws.com.cn/key' - ) - ) -- yield t.case( -+ t.case( - region='cn-northwest-1', bucket=accesspoint_arn_cn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1831,21 +1831,21 @@ - accesspoint_arn_gov = ( - 'arn:aws-us-gov:s3:us-gov-east-1:123456789012:accesspoint:myendpoint' - ) -- yield t.case( -+ t.case( - region='us-gov-east-1', bucket=accesspoint_arn_gov, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-gov-east-1.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='fips-us-gov-west-1', bucket=accesspoint_arn_gov, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-gov-east-1.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='fips-us-gov-west-1', bucket=accesspoint_arn_gov, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -1854,7 +1854,7 @@ - ) - ) - -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', is_secure=False, - expected_url=( - 'http://myendpoint-123456789012.s3-accesspoint.' -@@ -1862,7 +1862,7 @@ - ) - ) - # Dual-stack with access-point arn -- yield t.case( -+ t.case( - # Note: The access-point arn has us-west-2 and the client's region is - # us-east-1, for the default case the access-point arn region is used. - region='us-east-1', bucket=accesspoint_arn, key='key', -@@ -1874,7 +1874,7 @@ - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-east-1', bucket=accesspoint_arn, key='key', - s3_config={ - 'use_dualstack_endpoint': True, -@@ -1885,7 +1885,7 @@ - 'us-east-1.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-gov-east-1', bucket=accesspoint_arn_gov, key='key', - s3_config={ - 'use_dualstack_endpoint': True, -@@ -1898,7 +1898,7 @@ - - # None of the various s3 settings related to paths should affect what - # endpoint to use when an access-point is provided. -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - s3_config={'adressing_style': 'auto'}, - expected_url=( -@@ -1906,7 +1906,7 @@ - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - s3_config={'adressing_style': 'virtual'}, - expected_url=( -@@ -1914,7 +1914,7 @@ - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - s3_config={'adressing_style': 'path'}, - expected_url=( -@@ -1927,27 +1927,27 @@ - us_east_1_regional_endpoint = { - 'us_east_1_regional_endpoint': 'regional' - } -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, - expected_url=( - 'https://bucket.s3.us-east-1.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, - expected_url=( - 'https://bucket.s3.us-west-2.amazonaws.com/key')) -- yield t.case( -+ t.case( - region=None, bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, - expected_url=( - 'https://bucket.s3.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, - expected_url=( - 'https://bucket.s3.unknown.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config={ - 'us_east_1_regional_endpoint': 'regional', -@@ -1955,7 +1955,7 @@ - }, - expected_url=( - 'https://bucket.s3.dualstack.us-east-1.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config={ - 'us_east_1_regional_endpoint': 'regional', -@@ -1963,7 +1963,7 @@ - }, - expected_url=( - 'https://bucket.s3-accelerate.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config={ - 'us_east_1_regional_endpoint': 'regional', -@@ -1977,19 +1977,19 @@ - us_east_1_regional_endpoint_legacy = { - 'us_east_1_regional_endpoint': 'legacy' - } -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint_legacy, - expected_url=( - 'https://bucket.s3.amazonaws.com/key')) - -- yield t.case( -+ t.case( - region=None, bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint_legacy, - expected_url=( - 'https://bucket.s3.amazonaws.com/key')) - -- yield t.case( -+ t.case( - region='unknown', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint_legacy, - expected_url=( -@@ -2041,7 +2041,7 @@ - with ClientHTTPStubber(s3) as http_stubber: - http_stubber.add_response() - s3.put_object(Bucket=bucket, Key=key, Body=b'bar') -- assert_equal(http_stubber.requests[0].url, expected_url) -+ assert http_stubber.requests[0].url == expected_url - - - def _create_s3_client(region, is_secure, endpoint_url, s3_config, -@@ -2074,96 +2074,96 @@ - - # us-east-1, or the "global" endpoint. A signature version of - # None means the user doesn't have signature version configured. -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version=None, -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-1', bucket='bucket', key='key', -- signature_version='s3v4', -- s3_config={'addressing_style': 'path'}, -- expected_url='https://s3.amazonaws.com/bucket/key') -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version=None, -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-1', bucket='bucket', key='key', -+ signature_version='s3v4', -+ s3_config={'addressing_style': 'path'}, -+ expected_url='https://s3.amazonaws.com/bucket/key') - - # A region that supports both 's3' and 's3v4'. -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version=None, -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version='s3v4', -- s3_config={'addressing_style': 'path'}, -- expected_url='https://s3.us-west-2.amazonaws.com/bucket/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version=None, -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version='s3v4', -+ s3_config={'addressing_style': 'path'}, -+ expected_url='https://s3.us-west-2.amazonaws.com/bucket/key') - - # An 's3v4' only region. -- yield t.case(region='us-east-2', bucket='bucket', key='key', -- signature_version=None, -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-2', bucket='bucket', key='key', -- signature_version='s3', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-2', bucket='bucket', key='key', -- signature_version='s3v4', -- expected_url='https://bucket.s3.amazonaws.com/key') -- yield t.case(region='us-east-2', bucket='bucket', key='key', -- signature_version='s3v4', -- s3_config={'addressing_style': 'path'}, -- expected_url='https://s3.us-east-2.amazonaws.com/bucket/key') -+ t.case(region='us-east-2', bucket='bucket', key='key', -+ signature_version=None, -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-2', bucket='bucket', key='key', -+ signature_version='s3', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-2', bucket='bucket', key='key', -+ signature_version='s3v4', -+ expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-east-2', bucket='bucket', key='key', -+ signature_version='s3v4', -+ s3_config={'addressing_style': 'path'}, -+ expected_url='https://s3.us-east-2.amazonaws.com/bucket/key') - - # Dualstack endpoints -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - signature_version=None, - s3_config={'use_dualstack_endpoint': True}, - expected_url='https://bucket.s3.dualstack.us-west-2.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - signature_version='s3', - s3_config={'use_dualstack_endpoint': True}, - expected_url='https://bucket.s3.dualstack.us-west-2.amazonaws.com/key') -- yield t.case( -+ t.case( - region='us-west-2', bucket='bucket', key='key', - signature_version='s3v4', - s3_config={'use_dualstack_endpoint': True}, - expected_url='https://bucket.s3.dualstack.us-west-2.amazonaws.com/key') - - # Accelerate -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version=None, -- s3_config={'use_accelerate_endpoint': True}, -- expected_url='https://bucket.s3-accelerate.amazonaws.com/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version=None, -+ s3_config={'use_accelerate_endpoint': True}, -+ expected_url='https://bucket.s3-accelerate.amazonaws.com/key') - - # A region that we don't know about. -- yield t.case(region='us-west-50', bucket='bucket', key='key', -- signature_version=None, -- expected_url='https://bucket.s3.amazonaws.com/key') -+ t.case(region='us-west-50', bucket='bucket', key='key', -+ signature_version=None, -+ expected_url='https://bucket.s3.amazonaws.com/key') - - # Customer provided URL results in us leaving the host untouched. -- yield t.case(region='us-west-2', bucket='bucket', key='key', -- signature_version=None, -- customer_provided_endpoint='https://foo.com/', -- expected_url='https://foo.com/bucket/key') -+ t.case(region='us-west-2', bucket='bucket', key='key', -+ signature_version=None, -+ customer_provided_endpoint='https://foo.com/', -+ expected_url='https://foo.com/bucket/key') - - # Access-point - accesspoint_arn = ( - 'arn:aws:s3:us-west-2:123456789012:accesspoint:myendpoint' - ) -- yield t.case( -+ t.case( - region='us-west-2', bucket=accesspoint_arn, key='key', - expected_url=( - 'https://myendpoint-123456789012.s3-accesspoint.' - 'us-west-2.amazonaws.com/key' - ) - ) -- yield t.case( -+ t.case( - region='us-east-1', bucket=accesspoint_arn, key='key', - s3_config={'use_arn_region': False}, - expected_url=( -@@ -2176,12 +2176,12 @@ - us_east_1_regional_endpoint = { - 'us_east_1_regional_endpoint': 'regional' - } -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, signature_version='s3', - expected_url=( - 'https://bucket.s3.us-east-1.amazonaws.com/key')) -- yield t.case( -+ t.case( - region='us-east-1', bucket='bucket', key='key', - s3_config=us_east_1_regional_endpoint, signature_version='s3v4', - expected_url=( -@@ -2203,4 +2203,4 @@ - # those are tested elsewhere. We just care about the hostname/path. - parts = urlsplit(url) - actual = '%s://%s%s' % parts[:3] -- assert_equal(actual, expected_url) -+ assert actual == expected_url -diff -Nru botocore-1.18.15.orig/tests/functional/test_service_alias.py botocore-1.18.15/tests/functional/test_service_alias.py ---- botocore-1.18.15.orig/tests/functional/test_service_alias.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_service_alias.py 2020-10-09 10:13:49.548472372 +0200 -@@ -17,7 +17,7 @@ - def test_can_use_service_alias(): - session = botocore.session.get_session() - for (alias, name) in SERVICE_NAME_ALIASES.items(): -- yield _instantiates_the_same_client, session, name, alias -+ _instantiates_the_same_client(session, name, alias) - - - def _instantiates_the_same_client(session, service_name, service_alias): -diff -Nru botocore-1.18.15.orig/tests/functional/test_service_names.py botocore-1.18.15/tests/functional/test_service_names.py ---- botocore-1.18.15.orig/tests/functional/test_service_names.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_service_names.py 2020-10-09 10:13:49.512471875 +0200 -@@ -12,7 +12,6 @@ - # language governing permissions and limitations under the License. - import re - --from nose.tools import assert_true - from botocore.session import get_session - - BLACKLIST = [ -@@ -41,18 +40,18 @@ - def _assert_name_length(service_name): - if service_name not in BLACKLIST: - service_name_length = len(service_name) -- assert_true(service_name_length >= MIN_SERVICE_NAME_LENGTH, -- 'Service name must be greater than or equal to 2 ' -- 'characters in length.') -- assert_true(service_name_length <= MAX_SERVICE_NAME_LENGTH, -- 'Service name must be less than or equal to 50 ' -- 'characters in length.') -+ assert service_name_length >= MIN_SERVICE_NAME_LENGTH, \ -+ ('Service name must be greater than or equal to {:d} ' + -+ 'characters in length.').format(MIN_SERVICE_NAME_LENGTH) -+ assert service_name_length <= MAX_SERVICE_NAME_LENGTH, \ -+ ('Service name must be less than or equal to {:d} ' + -+ 'characters in length.').format(MAX_SERVICE_NAME_LENGTH) - - - def _assert_name_pattern(service_name): - if service_name not in BLACKLIST: -- valid = VALID_NAME_REGEX.match(service_name) is not None -- assert_true(valid, VALID_NAME_EXPLANATION) -+ assert VALID_NAME_REGEX.match(service_name) is not None, \ -+ VALID_NAME_EXPLANATION - - - def test_service_names_are_valid(): -@@ -60,5 +59,5 @@ - loader = session.get_component('data_loader') - service_names = loader.list_available_services('service-2') - for service_name in service_names: -- yield _assert_name_length, service_name -- yield _assert_name_pattern, service_name -+ _assert_name_length(service_name) -+ _assert_name_pattern(service_name) -diff -Nru botocore-1.18.15.orig/tests/functional/test_session.py botocore-1.18.15/tests/functional/test_session.py ---- botocore-1.18.15.orig/tests/functional/test_session.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_session.py 2020-10-09 10:13:49.552472426 +0200 -@@ -12,7 +12,7 @@ - # language governing permissions and limitations under the License. - from tests import unittest, temporary_file - --import mock -+from tests import mock - - import botocore.session - from botocore.exceptions import ProfileNotFound -diff -Nru botocore-1.18.15.orig/tests/functional/test_six_imports.py botocore-1.18.15/tests/functional/test_six_imports.py ---- botocore-1.18.15.orig/tests/functional/test_six_imports.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_six_imports.py 2020-10-09 10:13:49.548472372 +0200 -@@ -15,7 +15,7 @@ - if not filename.endswith('.py'): - continue - fullname = os.path.join(rootdir, filename) -- yield _assert_no_bare_six_imports, fullname -+ _assert_no_bare_six_imports(fullname) - - - def _assert_no_bare_six_imports(filename): -diff -Nru botocore-1.18.15.orig/tests/functional/test_sts.py botocore-1.18.15/tests/functional/test_sts.py ---- botocore-1.18.15.orig/tests/functional/test_sts.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_sts.py 2020-10-09 10:13:49.552472426 +0200 -@@ -13,7 +13,7 @@ - from datetime import datetime - import re - --import mock -+from tests import mock - - from tests import BaseSessionTest - from tests import temporary_file -diff -Nru botocore-1.18.15.orig/tests/functional/test_stub.py botocore-1.18.15/tests/functional/test_stub.py ---- botocore-1.18.15.orig/tests/functional/test_stub.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_stub.py 2020-10-09 10:13:49.528472096 +0200 -@@ -16,6 +16,7 @@ - import botocore - import botocore.session - import botocore.stub as stub -+from botocore.compat import six - from botocore.stub import Stubber - from botocore.exceptions import StubResponseError, ClientError, \ - StubAssertionError, UnStubbedResponseError -@@ -54,8 +55,8 @@ - def test_activated_stubber_errors_with_no_registered_stubs(self): - self.stubber.activate() - # Params one per line for readability. -- with self.assertRaisesRegexp(UnStubbedResponseError, -- "Unexpected API Call"): -+ with six.assertRaisesRegex(self, UnStubbedResponseError, -+ "Unexpected API Call"): - self.client.list_objects( - Bucket='asdfasdfasdfasdf', - Delimiter='asdfasdfasdfasdf', -@@ -119,8 +120,8 @@ - 'list_objects', service_response, expected_params) - self.stubber.activate() - # This should call should raise an for mismatching expected params. -- with self.assertRaisesRegexp(StubResponseError, -- "{'Bucket': 'bar'},\n"): -+ with six.assertRaisesRegex(self, StubResponseError, -+ "{'Bucket': 'bar'},\n"): - self.client.list_objects(Bucket='foo') - - def test_expected_params_mixed_with_errors_responses(self): -@@ -143,7 +144,8 @@ - self.client.list_objects(Bucket='foo') - - # The second call should throw an error for unexpected parameters -- with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'): -+ with six.assertRaisesRegex(self, StubResponseError, -+ 'Expected parameters'): - self.client.list_objects(Bucket='foo') - - def test_can_continue_to_call_after_expected_params_fail(self): -diff -Nru botocore-1.18.15.orig/tests/functional/test_waiter_config.py botocore-1.18.15/tests/functional/test_waiter_config.py ---- botocore-1.18.15.orig/tests/functional/test_waiter_config.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/functional/test_waiter_config.py 2020-10-09 10:13:49.548472372 +0200 -@@ -98,9 +98,9 @@ - except UnknownServiceError: - # The service doesn't have waiters - continue -- yield _validate_schema, validator, waiter_model -+ _validate_schema(validator, waiter_model) - for waiter_name in client.waiter_names: -- yield _lint_single_waiter, client, waiter_name, service_model -+ _lint_single_waiter(client, waiter_name, service_model) - - - def _lint_single_waiter(client, waiter_name, service_model): -diff -Nru botocore-1.18.15.orig/tests/__init__.py botocore-1.18.15/tests/__init__.py ---- botocore-1.18.15.orig/tests/__init__.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/__init__.py 2020-10-09 10:13:49.504471764 +0200 -@@ -13,7 +13,10 @@ - - import os - import sys --import mock -+try: -+ import mock -+except ImportError: -+ from unittest import mock - import time - import random - import shutil -@@ -29,8 +32,6 @@ - from dateutil.tz import tzlocal - import unittest - --from nose.tools import assert_equal -- - import botocore.loaders - import botocore.session - from botocore.awsrequest import AWSResponse -@@ -346,16 +347,16 @@ - - # Because the query string ordering isn't relevant, we have to parse - # every single part manually and then handle the query string. -- assert_equal(parts1.scheme, parts2.scheme) -- assert_equal(parts1.netloc, parts2.netloc) -- assert_equal(parts1.path, parts2.path) -- assert_equal(parts1.params, parts2.params) -- assert_equal(parts1.fragment, parts2.fragment) -- assert_equal(parts1.username, parts2.username) -- assert_equal(parts1.password, parts2.password) -- assert_equal(parts1.hostname, parts2.hostname) -- assert_equal(parts1.port, parts2.port) -- assert_equal(parse_qs(parts1.query), parse_qs(parts2.query)) -+ assert parts1.scheme == parts2.scheme -+ assert parts1.netloc == parts2.netloc -+ assert parts1.path == parts2.path -+ assert parts1.params == parts2.params -+ assert parts1.fragment == parts2.fragment -+ assert parts1.username == parts2.username -+ assert parts1.password == parts2.password -+ assert parts1.hostname == parts2.hostname -+ assert parts1.port == parts2.port -+ assert parse_qs(parts1.query) == parse_qs(parts2.query) - - - class HTTPStubberException(Exception): -diff -Nru botocore-1.18.15.orig/tests/integration/test_client.py botocore-1.18.15/tests/integration/test_client.py ---- botocore-1.18.15.orig/tests/integration/test_client.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_client.py 2020-10-09 10:13:49.532472151 +0200 -@@ -79,8 +79,8 @@ - def test_region_mentioned_in_invalid_region(self): - client = self.session.create_client( - 'cloudformation', region_name='us-east-999') -- with self.assertRaisesRegexp(EndpointConnectionError, -- 'Could not connect to the endpoint URL'): -+ with six.assertRaisesRegex(self, EndpointConnectionError, -+ 'Could not connect to the endpoint URL'): - client.list_stacks() - - def test_client_modeled_exception(self): -diff -Nru botocore-1.18.15.orig/tests/integration/test_credentials.py botocore-1.18.15/tests/integration/test_credentials.py ---- botocore-1.18.15.orig/tests/integration/test_credentials.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_credentials.py 2020-10-09 10:13:49.552472426 +0200 -@@ -11,7 +11,7 @@ - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. - import os --import mock -+from tests import mock - import tempfile - import shutil - import json -diff -Nru botocore-1.18.15.orig/tests/integration/test_ec2.py botocore-1.18.15/tests/integration/test_ec2.py ---- botocore-1.18.15.orig/tests/integration/test_ec2.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_ec2.py 2020-10-09 10:13:49.512471875 +0200 -@@ -13,8 +13,6 @@ - from tests import unittest - import itertools - --from nose.plugins.attrib import attr -- - import botocore.session - from botocore.exceptions import ClientError - -diff -Nru botocore-1.18.15.orig/tests/integration/test_emr.py botocore-1.18.15/tests/integration/test_emr.py ---- botocore-1.18.15.orig/tests/integration/test_emr.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_emr.py 2020-10-09 10:13:49.516471930 +0200 -@@ -12,8 +12,6 @@ - # language governing permissions and limitations under the License. - from tests import unittest - --from nose.tools import assert_true -- - import botocore.session - from botocore.paginate import PageIterator - from botocore.exceptions import OperationNotPageableError -@@ -34,7 +32,7 @@ - def _test_can_list_clusters_in_region(session, region): - client = session.create_client('emr', region_name=region) - response = client.list_clusters() -- assert_true('Clusters' in response) -+ assert 'Clusters' in response - - - # I consider these integration tests because they're -diff -Nru botocore-1.18.15.orig/tests/integration/test_loaders.py botocore-1.18.15/tests/integration/test_loaders.py ---- botocore-1.18.15.orig/tests/integration/test_loaders.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_loaders.py 2020-10-09 10:13:49.552472426 +0200 -@@ -13,7 +13,7 @@ - import os - from tests import unittest - --import mock -+from tests import mock - - import botocore.session - -diff -Nru botocore-1.18.15.orig/tests/integration/test_s3.py botocore-1.18.15/tests/integration/test_s3.py ---- botocore-1.18.15.orig/tests/integration/test_s3.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_s3.py 2020-10-09 10:13:49.516471930 +0200 -@@ -22,11 +22,10 @@ - import shutil - import threading - import logging --import mock - from tarfile import TarFile - from contextlib import closing - --from nose.plugins.attrib import attr -+import pytest - import urllib3 - - from botocore.endpoint import Endpoint -@@ -324,7 +323,7 @@ - Bucket=self.bucket_name, Key=key_name) - self.assert_status_code(response, 204) - -- @attr('slow') -+ @pytest.mark.slow - def test_can_paginate(self): - for i in range(5): - key_name = 'key%s' % i -@@ -340,7 +339,7 @@ - for el in responses] - self.assertEqual(key_names, ['key0', 'key1', 'key2', 'key3', 'key4']) - -- @attr('slow') -+ @pytest.mark.slow - def test_can_paginate_with_page_size(self): - for i in range(5): - key_name = 'key%s' % i -@@ -357,7 +356,7 @@ - for el in data] - self.assertEqual(key_names, ['key0', 'key1', 'key2', 'key3', 'key4']) - -- @attr('slow') -+ @pytest.mark.slow - def test_result_key_iters(self): - for i in range(5): - key_name = 'key/%s/%s' % (i, i) -@@ -380,7 +379,7 @@ - self.assertIn('Contents', response) - self.assertIn('CommonPrefixes', response) - -- @attr('slow') -+ @pytest.mark.slow - def test_can_get_and_put_object(self): - self.create_object('foobarbaz', body='body contents') - time.sleep(3) -@@ -930,7 +929,7 @@ - Key='foo.txt', Body=body) - self.assert_status_code(response, 200) - -- @attr('slow') -+ @pytest.mark.slow - def test_paginate_list_objects_unicode(self): - key_names = [ - u'non-ascii-key-\xe4\xf6\xfc-01.txt', -@@ -953,7 +952,7 @@ - - self.assertEqual(key_names, key_refs) - -- @attr('slow') -+ @pytest.mark.slow - def test_paginate_list_objects_safe_chars(self): - key_names = [ - u'-._~safe-chars-key-01.txt', -@@ -1247,7 +1246,7 @@ - - eu_bucket = self.create_bucket(self.bucket_region) - msg = 'The authorization mechanism you have provided is not supported.' -- with self.assertRaisesRegexp(ClientError, msg): -+ with six.assertRaisesRegex(self, ClientError, msg): - sigv2_client.list_objects(Bucket=eu_bucket) - - def test_region_redirects_multiple_requests(self): -diff -Nru botocore-1.18.15.orig/tests/integration/test_smoke.py botocore-1.18.15/tests/integration/test_smoke.py ---- botocore-1.18.15.orig/tests/integration/test_smoke.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_smoke.py 2020-10-09 10:13:49.516471930 +0200 -@@ -11,17 +11,14 @@ - - """ - import os --import mock - from pprint import pformat - import warnings - import logging --from nose.tools import assert_equal, assert_true - - from tests import ClientHTTPStubber - from botocore import xform_name - import botocore.session - from botocore.client import ClientError --from botocore.endpoint import Endpoint - from botocore.exceptions import ConnectionClosedError - - -@@ -262,10 +259,9 @@ - method = getattr(client, operation_name) - with warnings.catch_warnings(record=True) as caught_warnings: - response = method(**kwargs) -- assert_equal(len(caught_warnings), 0, -- "Warnings were emitted during smoke test: %s" -- % caught_warnings) -- assert_true('Errors' not in response) -+ assert len(caught_warnings) == 0, \ -+ "Warnings were emitted during smoke test: %s" % caught_warnings -+ assert 'Errors' not in response - - - def test_can_make_request_and_understand_errors_with_client(): -@@ -275,7 +271,7 @@ - for operation_name in ERROR_TESTS[service_name]: - kwargs = ERROR_TESTS[service_name][operation_name] - method_name = xform_name(operation_name) -- yield _make_error_client_call, client, method_name, kwargs -+ _make_error_client_call(client, method_name, kwargs) - - - def _make_error_client_call(client, operation_name, kwargs): -diff -Nru botocore-1.18.15.orig/tests/integration/test_sts.py botocore-1.18.15/tests/integration/test_sts.py ---- botocore-1.18.15.orig/tests/integration/test_sts.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_sts.py 2020-10-09 10:13:49.532472151 +0200 -@@ -13,6 +13,8 @@ - from tests import unittest - - import botocore.session -+ -+from botocore.compat import six - from botocore.exceptions import ClientError - - class TestSTS(unittest.TestCase): -@@ -38,5 +40,5 @@ - self.assertEqual(sts.meta.endpoint_url, - 'https://sts.us-west-2.amazonaws.com') - # Signing error will be thrown with the incorrect region name included. -- with self.assertRaisesRegexp(ClientError, 'ap-southeast-1') as e: -+ with six.assertRaisesRegex(self, ClientError, 'ap-southeast-1'): - sts.get_session_token() -diff -Nru botocore-1.18.15.orig/tests/integration/test_waiters.py botocore-1.18.15/tests/integration/test_waiters.py ---- botocore-1.18.15.orig/tests/integration/test_waiters.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/integration/test_waiters.py 2020-10-09 10:13:49.516471930 +0200 -@@ -12,14 +12,14 @@ - # language governing permissions and limitations under the License. - from tests import unittest, random_chars - --from nose.plugins.attrib import attr -+import pytest - - import botocore.session - from botocore.exceptions import WaiterError - - - # This is the same test as above, except using the client interface. --@attr('slow') -+@pytest.mark.slow - class TestWaiterForDynamoDB(unittest.TestCase): - def setUp(self): - self.session = botocore.session.get_session() -diff -Nru botocore-1.18.15.orig/tests/unit/auth/test_signers.py botocore-1.18.15/tests/unit/auth/test_signers.py ---- botocore-1.18.15.orig/tests/unit/auth/test_signers.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/auth/test_signers.py 2020-10-09 10:13:49.552472426 +0200 -@@ -18,7 +18,7 @@ - import base64 - import json - --import mock -+from tests import mock - - import botocore.auth - import botocore.credentials -diff -Nru botocore-1.18.15.orig/tests/unit/auth/test_sigv4.py botocore-1.18.15/tests/unit/auth/test_sigv4.py ---- botocore-1.18.15.orig/tests/unit/auth/test_sigv4.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/auth/test_sigv4.py 2020-10-09 10:13:49.516471930 +0200 -@@ -18,8 +18,7 @@ - http://docs.aws.amazon.com/general/latest/gr/signature-v4-test-suite.html - - This module contains logic to run these tests. The test files were --placed in ./aws4_testsuite, and we're using nose's test generators to --dynamically generate testcases based on these files. -+placed in ./aws4_testsuite. - - """ - import os -@@ -28,7 +27,7 @@ - import datetime - from botocore.compat import six - --import mock -+from tests import mock - - import botocore.auth - from botocore.awsrequest import AWSRequest -@@ -106,7 +105,7 @@ - if test_case in TESTS_TO_IGNORE: - log.debug("Skipping test: %s", test_case) - continue -- yield (_test_signature_version_4, test_case) -+ _test_signature_version_4(test_case) - datetime_patcher.stop() - formatdate_patcher.stop() - -@@ -147,21 +146,22 @@ - auth = botocore.auth.SigV4Auth(test_case.credentials, 'host', 'us-east-1') - - actual_canonical_request = auth.canonical_request(request) -- assert_equal(actual_canonical_request, test_case.canonical_request, -- test_case.raw_request, 'canonical_request') -+ assert_requests_equal(actual_canonical_request, -+ test_case.canonical_request, -+ test_case.raw_request, 'canonical_request') - - actual_string_to_sign = auth.string_to_sign(request, - actual_canonical_request) -- assert_equal(actual_string_to_sign, test_case.string_to_sign, -- test_case.raw_request, 'string_to_sign') -+ assert_requests_equal(actual_string_to_sign, test_case.string_to_sign, -+ test_case.raw_request, 'string_to_sign') - - auth.add_auth(request) - actual_auth_header = request.headers['Authorization'] -- assert_equal(actual_auth_header, test_case.authorization_header, -- test_case.raw_request, 'authheader') -+ assert_requests_equal(actual_auth_header, test_case.authorization_header, -+ test_case.raw_request, 'authheader') - - --def assert_equal(actual, expected, raw_request, part): -+def assert_requests_equal(actual, expected, raw_request, part): - if actual != expected: - message = "The %s did not match" % part - message += "\nACTUAL:%r !=\nEXPECT:%r" % (actual, expected) -diff -Nru botocore-1.18.15.orig/tests/unit/docs/bcdoc/test_docstringparser.py botocore-1.18.15/tests/unit/docs/bcdoc/test_docstringparser.py ---- botocore-1.18.15.orig/tests/unit/docs/bcdoc/test_docstringparser.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/bcdoc/test_docstringparser.py 2020-10-09 10:13:49.556472483 +0200 -@@ -18,7 +18,7 @@ - # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - # IN THE SOFTWARE. --import mock -+from tests import mock - from tests import unittest - - import botocore.docs.bcdoc.docstringparser as parser -diff -Nru botocore-1.18.15.orig/tests/unit/docs/__init__.py botocore-1.18.15/tests/unit/docs/__init__.py ---- botocore-1.18.15.orig/tests/unit/docs/__init__.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/__init__.py 2020-10-09 10:13:49.552472426 +0200 -@@ -16,7 +16,7 @@ - import shutil - - from botocore.docs.bcdoc.restdoc import DocumentStructure --import mock -+from tests import mock - - from tests import unittest - from botocore.compat import OrderedDict -diff -Nru botocore-1.18.15.orig/tests/unit/docs/test_docs.py botocore-1.18.15/tests/unit/docs/test_docs.py ---- botocore-1.18.15.orig/tests/unit/docs/test_docs.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/test_docs.py 2020-10-09 10:13:49.556472483 +0200 -@@ -14,7 +14,7 @@ - import shutil - import tempfile - --import mock -+from tests import mock - - from tests.unit.docs import BaseDocsTest - from botocore.session import get_session -diff -Nru botocore-1.18.15.orig/tests/unit/docs/test_example.py botocore-1.18.15/tests/unit/docs/test_example.py ---- botocore-1.18.15.orig/tests/unit/docs/test_example.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/test_example.py 2020-10-09 10:13:49.556472483 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - - from tests.unit.docs import BaseDocsTest - from botocore.hooks import HierarchicalEmitter -diff -Nru botocore-1.18.15.orig/tests/unit/docs/test_params.py botocore-1.18.15/tests/unit/docs/test_params.py ---- botocore-1.18.15.orig/tests/unit/docs/test_params.py 2020-10-08 20:05:10.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/test_params.py 2020-10-09 10:13:49.556472483 +0200 -@@ -10,7 +10,7 @@ - # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. --import mock -+from tests import mock - - from tests.unit.docs import BaseDocsTest - from botocore.hooks import HierarchicalEmitter -diff -Nru botocore-1.18.15.orig/tests/unit/docs/test_service.py botocore-1.18.15/tests/unit/docs/test_service.py ---- botocore-1.18.15.orig/tests/unit/docs/test_service.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/test_service.py 2020-10-09 10:13:49.556472483 +0200 -@@ -12,7 +12,7 @@ - # language governing permissions and limitations under the License. - import os - --import mock -+from tests import mock - - from tests.unit.docs import BaseDocsTest - from botocore.session import get_session -diff -Nru botocore-1.18.15.orig/tests/unit/docs/test_utils.py botocore-1.18.15/tests/unit/docs/test_utils.py ---- botocore-1.18.15.orig/tests/unit/docs/test_utils.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/docs/test_utils.py 2020-10-09 10:13:49.532472151 +0200 -@@ -223,5 +223,5 @@ - class TestEscapeControls(unittest.TestCase): - def test_escapes_controls(self): - escaped = escape_controls('\na\rb\tc\fd\be') -- self.assertEquals(escaped, '\\na\\rb\\tc\\fd\\be') -+ self.assertEqual(escaped, '\\na\\rb\\tc\\fd\\be') - -diff -Nru botocore-1.18.15.orig/tests/unit/response_parsing/README.rst botocore-1.18.15/tests/unit/response_parsing/README.rst ---- botocore-1.18.15.orig/tests/unit/response_parsing/README.rst 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/response_parsing/README.rst 2020-10-09 10:13:49.528472096 +0200 -@@ -16,12 +16,12 @@ - file contains the expected Python data structure created from the XML - response. - --The main test is contained in ``test_response_parser.py`` and is --implemented as a nose generator. Each time through the loop an XML --file is read and passed to a ``botocore.response.XmlResponse`` --object. The corresponding JSON file is then parsed and compared to --the value created by the parser. If the are equal, the test passes. If --they are not equal, both the expected result and the actual result are -+The main test is contained in ``test_response_parser.py``. Each -+time through the loop an XML file is read and passed to -+a ``botocore.response.XmlResponse`` object. The corresponding -+JSON file is then parsed and compared to the value created by the -+parser. If the are equal, the test passes. If they are not -+equal, both the expected result and the actual result are - pretty-printed to stdout and the tests continue. - - ----------------- -diff -Nru botocore-1.18.15.orig/tests/unit/response_parsing/test_response_parsing.py botocore-1.18.15/tests/unit/response_parsing/test_response_parsing.py ---- botocore-1.18.15.orig/tests/unit/response_parsing/test_response_parsing.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/response_parsing/test_response_parsing.py 2020-10-09 10:13:49.532472151 +0200 -@@ -119,8 +119,8 @@ - expected = _get_expected_parsed_result(xmlfile) - operation_model = _get_operation_model(service_model, xmlfile) - raw_response_body = _get_raw_response_body(xmlfile) -- yield _test_parsed_response, xmlfile, raw_response_body, \ -- operation_model, expected -+ _test_parsed_response(xmlfile, raw_response_body, -+ operation_model, expected) - - - def _get_raw_response_body(xmlfile): -@@ -179,8 +179,8 @@ - operation_model = service_model.operation_model(op_name) - with open(raw_response_file, 'rb') as f: - raw_response_body = f.read() -- yield _test_parsed_response, raw_response_file, \ -- raw_response_body, operation_model, expected -+ _test_parsed_response(raw_response_file, -+ raw_response_body, operation_model, expected) - - - def _uhg_test_json_parsing(): -diff -Nru botocore-1.18.15.orig/tests/unit/retries/test_adaptive.py botocore-1.18.15/tests/unit/retries/test_adaptive.py ---- botocore-1.18.15.orig/tests/unit/retries/test_adaptive.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/retries/test_adaptive.py 2020-10-09 10:13:49.556472483 +0200 -@@ -1,6 +1,6 @@ - from tests import unittest - --import mock -+from tests import mock - - from botocore.retries import adaptive - from botocore.retries import standard -diff -Nru botocore-1.18.15.orig/tests/unit/retries/test_special.py botocore-1.18.15/tests/unit/retries/test_special.py ---- botocore-1.18.15.orig/tests/unit/retries/test_special.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/retries/test_special.py 2020-10-09 10:13:49.516471930 +0200 -@@ -1,9 +1,7 @@ - from tests import unittest - --import mock --from nose.tools import assert_equal, assert_is_instance -+from tests import mock - --from botocore.compat import six - from botocore.awsrequest import AWSResponse - from botocore.retries import standard, special - -diff -Nru botocore-1.18.15.orig/tests/unit/retries/test_standard.py botocore-1.18.15/tests/unit/retries/test_standard.py ---- botocore-1.18.15.orig/tests/unit/retries/test_standard.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/retries/test_standard.py 2020-10-09 10:13:49.520471985 +0200 -@@ -1,7 +1,6 @@ - from tests import unittest - --import mock --from nose.tools import assert_equal, assert_is_instance -+from tests import mock - - from botocore.retries import standard - from botocore.retries import quota -@@ -154,22 +153,20 @@ - def test_can_detect_retryable_transient_errors(): - transient_checker = standard.TransientRetryableChecker() - for case in RETRYABLE_TRANSIENT_ERRORS: -- yield (_verify_retryable, transient_checker, None) + case -+ _verify_retryable(transient_checker, None, *case) - - - def test_can_detect_retryable_throttled_errors(): - throttled_checker = standard.ThrottledRetryableChecker() - for case in RETRYABLE_THROTTLED_RESPONSES: -- yield (_verify_retryable, throttled_checker, None) + case -+ _verify_retryable(throttled_checker, None, *case) - - - def test_can_detect_modeled_retryable_errors(): - modeled_retry_checker = standard.ModeledRetryableChecker() -- test_params = (_verify_retryable, modeled_retry_checker, -- get_operation_model_with_retries()) - for case in RETRYABLE_MODELED_ERRORS: -- test_case = test_params + case -- yield test_case -+ _verify_retryable(modeled_retry_checker, -+ get_operation_model_with_retries(), *case) - - - def test_standard_retry_conditions(): -@@ -184,9 +181,8 @@ - # are retryable for a different checker. We need to filter out all - # the False cases. - all_cases = [c for c in all_cases if c[2]] -- test_params = (_verify_retryable, standard_checker, op_model) - for case in all_cases: -- yield test_params + case -+ _verify_retryable(standard_checker, op_model, *case) - - - def get_operation_model_with_retries(): -@@ -213,7 +209,7 @@ - http_response=http_response, - caught_exception=caught_exception, - ) -- assert_equal(checker.is_retryable(context), is_retryable) -+ assert checker.is_retryable(context) == is_retryable - - - def arbitrary_retry_context(): -@@ -233,36 +229,36 @@ - checker = standard.MaxAttemptsChecker(max_attempts=3) - context = arbitrary_retry_context() - context.attempt_number = 1 -- assert_equal(checker.is_retryable(context), True) -+ assert checker.is_retryable(context) - - context.attempt_number = 2 -- assert_equal(checker.is_retryable(context), True) -+ assert checker.is_retryable(context) - - context.attempt_number = 3 -- assert_equal(checker.is_retryable(context), False) -+ assert not checker.is_retryable(context) - - - def test_max_attempts_adds_metadata_key_when_reached(): - checker = standard.MaxAttemptsChecker(max_attempts=3) - context = arbitrary_retry_context() - context.attempt_number = 3 -- assert_equal(checker.is_retryable(context), False) -- assert_equal(context.get_retry_metadata(), {'MaxAttemptsReached': True}) -+ assert not checker.is_retryable(context) -+ assert context.get_retry_metadata() == {'MaxAttemptsReached': True} - - - def test_can_create_default_retry_handler(): - mock_client = mock.Mock() - mock_client.meta.service_model.service_id = model.ServiceId('my-service') -- assert_is_instance(standard.register_retry_handler(mock_client), -- standard.RetryHandler) -+ assert isinstance(standard.register_retry_handler(mock_client), -+ standard.RetryHandler) - call_args_list = mock_client.meta.events.register.call_args_list - # We should have registered the retry quota to after-calls - first_call = call_args_list[0][0] - second_call = call_args_list[1][0] - # Not sure if there's a way to verify the class associated with the - # bound method matches what we expect. -- assert_equal(first_call[0], 'after-call.my-service') -- assert_equal(second_call[0], 'needs-retry.my-service') -+ assert first_call[0] == 'after-call.my-service' -+ assert second_call[0] == 'needs-retry.my-service' - - - class TestRetryHandler(unittest.TestCase): -diff -Nru botocore-1.18.15.orig/tests/unit/test_args.py botocore-1.18.15/tests/unit/test_args.py ---- botocore-1.18.15.orig/tests/unit/test_args.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_args.py 2020-10-09 10:13:49.556472483 +0200 -@@ -15,7 +15,7 @@ - - import botocore.config - from tests import unittest --import mock -+from tests import mock - - from botocore import args - from botocore import exceptions -diff -Nru botocore-1.18.15.orig/tests/unit/test_awsrequest.py botocore-1.18.15/tests/unit/test_awsrequest.py ---- botocore-1.18.15.orig/tests/unit/test_awsrequest.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_awsrequest.py 2020-10-09 10:13:49.532472151 +0200 -@@ -18,13 +18,15 @@ - import shutil - import io - import socket --import sys - --from mock import Mock, patch -+try: -+ from mock import Mock, patch -+except ImportError: -+ from unittest.mock import Mock, patch - from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool - - from botocore.exceptions import UnseekableStreamError --from botocore.awsrequest import AWSRequest, AWSPreparedRequest, AWSResponse -+from botocore.awsrequest import AWSRequest, AWSResponse - from botocore.awsrequest import AWSHTTPConnection, AWSHTTPSConnection, HeadersDict - from botocore.awsrequest import prepare_request_dict, create_request_object - from botocore.compat import file_type, six -@@ -271,11 +273,11 @@ - def test_text_property(self): - self.set_raw_stream([b'\xe3\x82\xb8\xe3\x83\xa7\xe3\x82\xb0']) - self.response.headers['content-type'] = 'text/plain; charset=utf-8' -- self.assertEquals(self.response.text, u'\u30b8\u30e7\u30b0') -+ self.assertEqual(self.response.text, u'\u30b8\u30e7\u30b0') - - def test_text_property_defaults_utf8(self): - self.set_raw_stream([b'\xe3\x82\xb8\xe3\x83\xa7\xe3\x82\xb0']) -- self.assertEquals(self.response.text, u'\u30b8\u30e7\u30b0') -+ self.assertEqual(self.response.text, u'\u30b8\u30e7\u30b0') - - - class TestAWSHTTPConnection(unittest.TestCase): -diff -Nru botocore-1.18.15.orig/tests/unit/test_client.py botocore-1.18.15/tests/unit/test_client.py ---- botocore-1.18.15.orig/tests/unit/test_client.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_client.py 2020-10-09 10:13:49.532472151 +0200 -@@ -12,7 +12,7 @@ - # language governing permissions and limitations under the License. - import botocore.config - from tests import unittest --import mock -+from tests import mock - - import botocore - from botocore import utils -@@ -554,8 +554,8 @@ - creator = self.create_client_creator() - service_client = creator.create_client( - 'myservice', 'us-west-2', credentials=self.credentials) -- with self.assertRaisesRegexp( -- TypeError, 'only accepts keyword arguments'): -+ with six.assertRaisesRegex(self, TypeError, -+ 'only accepts keyword arguments'): - service_client.test_operation('foo') - - @mock.patch('botocore.args.RequestSigner.sign') -@@ -1550,15 +1550,15 @@ - self.assertEqual(config.read_timeout, 50) - - def test_invalid_kwargs(self): -- with self.assertRaisesRegexp(TypeError, 'Got unexpected keyword'): -+ with six.assertRaisesRegex(self, TypeError, 'Got unexpected keyword'): - botocore.config.Config(foo='foo') - - def test_pass_invalid_length_of_args(self): -- with self.assertRaisesRegexp(TypeError, 'Takes at most'): -+ with six.assertRaisesRegex(self, TypeError, 'Takes at most'): - botocore.config.Config('foo', *botocore.config.Config.OPTION_DEFAULTS.values()) - - def test_create_with_multiple_kwargs(self): -- with self.assertRaisesRegexp(TypeError, 'Got multiple values'): -+ with six.assertRaisesRegex(self, TypeError, 'Got multiple values'): - botocore.config.Config('us-east-1', region_name='us-east-1') - - def test_merge_returns_new_config_object(self): -@@ -1610,10 +1610,10 @@ - self.assertEqual(config.retries['max_attempts'], 15) - - def test_validates_retry_config(self): -- with self.assertRaisesRegexp( -- InvalidRetryConfigurationError, -- 'Cannot provide retry configuration for "not-allowed"'): -- botocore.config.Config(retries={'not-allowed': True}) -+ with six.assertRaisesRegex( -+ self, InvalidRetryConfigurationError, -+ 'Cannot provide retry configuration for "not-allowed"'): -+ botocore.config.Config(retries={'not-allowed': True}) - - def test_validates_max_retry_attempts(self): - with self.assertRaises(InvalidMaxRetryAttemptsError): -diff -Nru botocore-1.18.15.orig/tests/unit/test_compat.py botocore-1.18.15/tests/unit/test_compat.py ---- botocore-1.18.15.orig/tests/unit/test_compat.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_compat.py 2020-10-09 10:13:49.520471985 +0200 -@@ -11,9 +11,7 @@ - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. - import datetime --import mock -- --from nose.tools import assert_equal, assert_raises -+from tests import mock - - from botocore.exceptions import MD5UnavailableError - from botocore.compat import ( -@@ -98,80 +96,76 @@ - get_md5() - - --def test_compat_shell_split_windows(): -- windows_cases = { -- r'': [], -- r'spam \\': [r'spam', '\\\\'], -- r'spam ': [r'spam'], -- r' spam': [r'spam'], -- 'spam eggs': [r'spam', r'eggs'], -- 'spam\teggs': [r'spam', r'eggs'], -- 'spam\neggs': ['spam\neggs'], -- '""': [''], -- '" "': [' '], -- '"\t"': ['\t'], -- '\\\\': ['\\\\'], -- '\\\\ ': ['\\\\'], -- '\\\\\t': ['\\\\'], -- r'\"': ['"'], -- # The following four test cases are official test cases given in -- # Microsoft's documentation. -- r'"abc" d e': [r'abc', r'd', r'e'], -- r'a\\b d"e f"g h': [r'a\\b', r'de fg', r'h'], -- r'a\\\"b c d': [r'a\"b', r'c', r'd'], -- r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e'] -- } -- runner = ShellSplitTestRunner() -- for input_string, expected_output in windows_cases.items(): -- yield runner.assert_equal, input_string, expected_output, "win32" -- -- yield runner.assert_raises, r'"', ValueError, "win32" -- -- --def test_compat_shell_split_unix(): -- unix_cases = { -- r'': [], -- r'spam \\': [r'spam', '\\'], -- r'spam ': [r'spam'], -- r' spam': [r'spam'], -- 'spam eggs': [r'spam', r'eggs'], -- 'spam\teggs': [r'spam', r'eggs'], -- 'spam\neggs': ['spam', 'eggs'], -- '""': [''], -- '" "': [' '], -- '"\t"': ['\t'], -- '\\\\': ['\\'], -- '\\\\ ': ['\\'], -- '\\\\\t': ['\\'], -- r'\"': ['"'], -- # The following four test cases are official test cases given in -- # Microsoft's documentation, but adapted to unix shell splitting. -- r'"abc" d e': [r'abc', r'd', r'e'], -- r'a\\b d"e f"g h': [r'a\b', r'de fg', r'h'], -- r'a\\\"b c d': [r'a\"b', r'c', r'd'], -- r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e'] -- } -- runner = ShellSplitTestRunner() -- for input_string, expected_output in unix_cases.items(): -- yield runner.assert_equal, input_string, expected_output, "linux2" -- yield runner.assert_equal, input_string, expected_output, "darwin" -- -- yield runner.assert_raises, r'"', ValueError, "linux2" -- yield runner.assert_raises, r'"', ValueError, "darwin" -- -- --class ShellSplitTestRunner(object): -- def assert_equal(self, s, expected, platform): -- assert_equal(compat_shell_split(s, platform), expected) -+class TestCompatShellSplit(unittest.TestCase): -+ def test_compat_shell_split_windows(self): -+ windows_cases = { -+ r'': [], -+ r'spam \\': [r'spam', '\\\\'], -+ r'spam ': [r'spam'], -+ r' spam': [r'spam'], -+ 'spam eggs': [r'spam', r'eggs'], -+ 'spam\teggs': [r'spam', r'eggs'], -+ 'spam\neggs': ['spam\neggs'], -+ '""': [''], -+ '" "': [' '], -+ '"\t"': ['\t'], -+ '\\\\': ['\\\\'], -+ '\\\\ ': ['\\\\'], -+ '\\\\\t': ['\\\\'], -+ r'\"': ['"'], -+ # The following four test cases are official test cases given in -+ # Microsoft's documentation. -+ r'"abc" d e': [r'abc', r'd', r'e'], -+ r'a\\b d"e f"g h': [r'a\\b', r'de fg', r'h'], -+ r'a\\\"b c d': [r'a\"b', r'c', r'd'], -+ r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e'] -+ } -+ for input_string, expected_output in windows_cases.items(): -+ self.assertEqual(compat_shell_split(input_string, "win32"), -+ expected_output) -+ -+ with self.assertRaises(ValueError): -+ compat_shell_split(r'"', "win32") -+ -+ def test_compat_shell_split_unix(self): -+ unix_cases = { -+ r'': [], -+ r'spam \\': [r'spam', '\\'], -+ r'spam ': [r'spam'], -+ r' spam': [r'spam'], -+ 'spam eggs': [r'spam', r'eggs'], -+ 'spam\teggs': [r'spam', r'eggs'], -+ 'spam\neggs': ['spam', 'eggs'], -+ '""': [''], -+ '" "': [' '], -+ '"\t"': ['\t'], -+ '\\\\': ['\\'], -+ '\\\\ ': ['\\'], -+ '\\\\\t': ['\\'], -+ r'\"': ['"'], -+ # The following four test cases are official test cases given in -+ # Microsoft's documentation, but adapted to unix shell splitting. -+ r'"abc" d e': [r'abc', r'd', r'e'], -+ r'a\\b d"e f"g h': [r'a\b', r'de fg', r'h'], -+ r'a\\\"b c d': [r'a\"b', r'c', r'd'], -+ r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e'] -+ } -+ for input_string, expected_output in unix_cases.items(): -+ self.assertEqual(compat_shell_split(input_string, "linux2"), -+ expected_output) -+ self.assertEqual(compat_shell_split(input_string, "darwin"), -+ expected_output) - -- def assert_raises(self, s, exception_cls, platform): -- assert_raises(exception_cls, compat_shell_split, s, platform) -+ with self.assertRaises(ValueError): -+ compat_shell_split(r'"', "linux2") -+ with self.assertRaises(ValueError): -+ compat_shell_split(r'"', "darwin") - - - class TestTimezoneOperations(unittest.TestCase): - def test_get_tzinfo_options(self): - options = get_tzinfo_options() -- self.assertTrue(len(options) > 0) -+ self.assertGreater(len(options), 0) - - for tzinfo in options: - self.assertIsInstance(tzinfo(), datetime.tzinfo) -diff -Nru botocore-1.18.15.orig/tests/unit/test_configloader.py botocore-1.18.15/tests/unit/test_configloader.py ---- botocore-1.18.15.orig/tests/unit/test_configloader.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_configloader.py 2020-10-09 10:13:49.556472483 +0200 -@@ -14,7 +14,7 @@ - # language governing permissions and limitations under the License. - from tests import unittest, BaseEnvVar - import os --import mock -+from tests import mock - import tempfile - import shutil - -diff -Nru botocore-1.18.15.orig/tests/unit/test_config_provider.py botocore-1.18.15/tests/unit/test_config_provider.py ---- botocore-1.18.15.orig/tests/unit/test_config_provider.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_config_provider.py 2020-10-09 10:13:49.520471985 +0200 -@@ -11,8 +11,7 @@ - # ANY KIND, either express or implied. See the License for the specific - # language governing permissions and limitations under the License. - from tests import unittest --import mock --from nose.tools import assert_equal -+from tests import mock - - import botocore - import botocore.session as session -@@ -308,7 +307,7 @@ - provider = ConfigValueStore() - provider.set_config_variable('fake_variable', 'foo') - value = provider.get_config_variable('fake_variable') -- self.assertEquals(value, 'foo') -+ self.assertEqual(value, 'foo') - - def test_can_set_config_provider(self): - foo_value_provider = mock.Mock(spec=BaseProvider) -@@ -448,7 +447,7 @@ - providers=providers, - ) - value = provider.provide() -- assert_equal(value, expected_value) -+ assert value == expected_value - - - def test_chain_provider(): -@@ -468,9 +467,9 @@ - ('foo', ['foo', 'bar', 'baz']), - ] - for case in cases: -- yield assert_chain_does_provide, \ -- _make_providers_that_return(case[1]), \ -- case[0] -+ assert_chain_does_provide( -+ _make_providers_that_return(case[1]), -+ case[0]) - - - class TestChainProvider(unittest.TestCase): -diff -Nru botocore-1.18.15.orig/tests/unit/test_credentials.py botocore-1.18.15/tests/unit/test_credentials.py ---- botocore-1.18.15.orig/tests/unit/test_credentials.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_credentials.py 2020-10-09 10:13:49.536472205 +0200 -@@ -13,7 +13,7 @@ - # language governing permissions and limitations under the License. - from datetime import datetime, timedelta - import subprocess --import mock -+from tests import mock - import os - import tempfile - import shutil -@@ -1083,7 +1083,7 @@ - "Credentials were refreshed, but the refreshed credentials are " - "still expired." - ) -- with self.assertRaisesRegexp(RuntimeError, error_message): -+ with six.assertRaisesRegex(self, RuntimeError, error_message): - creds.get_frozen_credentials() - - def test_partial_creds_is_an_error(self): -@@ -1149,7 +1149,7 @@ - "Credentials were refreshed, but the refreshed credentials are " - "still expired." - ) -- with self.assertRaisesRegexp(RuntimeError, error_message): -+ with six.assertRaisesRegex(self, RuntimeError, error_message): - creds.get_frozen_credentials() - - # Now we update the environment with non-expired credentials, -@@ -2745,7 +2745,7 @@ - mandatory_refresh=7, - refresh_function=fail_refresh - ) -- with self.assertRaisesRegexp(Exception, 'refresh failed'): -+ with six.assertRaisesRegex(self, Exception, 'refresh failed'): - creds.get_frozen_credentials() - - def test_exception_propogated_on_expired_credentials(self): -@@ -2758,7 +2758,7 @@ - mandatory_refresh=7, - refresh_function=fail_refresh - ) -- with self.assertRaisesRegexp(Exception, 'refresh failed'): -+ with six.assertRaisesRegex(self, Exception, 'refresh failed'): - # Because credentials are actually expired, any - # failure to refresh should be propagated. - creds.get_frozen_credentials() -@@ -2779,7 +2779,7 @@ - creds_last_for=-2, - ) - err_msg = 'refreshed credentials are still expired' -- with self.assertRaisesRegexp(RuntimeError, err_msg): -+ with six.assertRaisesRegex(self, RuntimeError, err_msg): - # Because credentials are actually expired, any - # failure to refresh should be propagated. - creds.get_frozen_credentials() -@@ -3067,7 +3067,7 @@ - - provider = self.create_process_provider() - exception = botocore.exceptions.CredentialRetrievalError -- with self.assertRaisesRegexp(exception, 'Error Message'): -+ with six.assertRaisesRegex(self, exception, 'Error Message'): - provider.load() - - def test_unsupported_version_raises_mismatch(self): -@@ -3085,7 +3085,7 @@ - - provider = self.create_process_provider() - exception = botocore.exceptions.CredentialRetrievalError -- with self.assertRaisesRegexp(exception, 'Unsupported version'): -+ with six.assertRaisesRegex(self, exception, 'Unsupported version'): - provider.load() - - def test_missing_version_in_payload_returned_raises_exception(self): -@@ -3102,7 +3102,7 @@ - - provider = self.create_process_provider() - exception = botocore.exceptions.CredentialRetrievalError -- with self.assertRaisesRegexp(exception, 'Unsupported version'): -+ with six.assertRaisesRegex(self, exception, 'Unsupported version'): - provider.load() - - def test_missing_access_key_raises_exception(self): -@@ -3119,7 +3119,7 @@ - - provider = self.create_process_provider() - exception = botocore.exceptions.CredentialRetrievalError -- with self.assertRaisesRegexp(exception, 'Missing required key'): -+ with six.assertRaisesRegex(self, exception, 'Missing required key'): - provider.load() - - def test_missing_secret_key_raises_exception(self): -@@ -3136,7 +3136,7 @@ - - provider = self.create_process_provider() - exception = botocore.exceptions.CredentialRetrievalError -- with self.assertRaisesRegexp(exception, 'Missing required key'): -+ with six.assertRaisesRegex(self, exception, 'Missing required key'): - provider.load() - - def test_missing_session_token(self): -diff -Nru botocore-1.18.15.orig/tests/unit/test_discovery.py botocore-1.18.15/tests/unit/test_discovery.py ---- botocore-1.18.15.orig/tests/unit/test_discovery.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_discovery.py 2020-10-09 10:13:49.560472538 +0200 -@@ -1,5 +1,8 @@ - import time --from mock import Mock, call -+try: -+ from mock import Mock, call -+except ImportError: -+ from unittest.mock import Mock, call - from tests import unittest - - from botocore.awsrequest import AWSRequest -diff -Nru botocore-1.18.15.orig/tests/unit/test_endpoint.py botocore-1.18.15/tests/unit/test_endpoint.py ---- botocore-1.18.15.orig/tests/unit/test_endpoint.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_endpoint.py 2020-10-09 10:13:49.560472538 +0200 -@@ -13,7 +13,10 @@ - import socket - from tests import unittest - --from mock import Mock, patch, sentinel -+try: -+ from mock import Mock, patch, sentinel -+except ImportError: -+ from unittest.mock import Mock, patch, sentinel - - from botocore.compat import six - from botocore.awsrequest import AWSRequest -diff -Nru botocore-1.18.15.orig/tests/unit/test_errorfactory.py botocore-1.18.15/tests/unit/test_errorfactory.py ---- botocore-1.18.15.orig/tests/unit/test_errorfactory.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_errorfactory.py 2020-10-09 10:13:49.536472205 +0200 -@@ -12,6 +12,7 @@ - # language governing permissions and limitations under the License. - from tests import unittest - -+from botocore.compat import six - from botocore.exceptions import ClientError - from botocore.errorfactory import BaseClientExceptions - from botocore.errorfactory import ClientExceptionsFactory -@@ -39,7 +40,7 @@ - def test_gettattr_message(self): - exception_cls = type('MyException', (ClientError,), {}) - self.code_to_exception['MyExceptionCode'] = exception_cls -- with self.assertRaisesRegexp( -+ with six.assertRaisesRegex(self, - AttributeError, 'Valid exceptions are: MyException'): - self.exceptions.SomeUnmodeledError - -diff -Nru botocore-1.18.15.orig/tests/unit/test_eventstream.py botocore-1.18.15/tests/unit/test_eventstream.py ---- botocore-1.18.15.orig/tests/unit/test_eventstream.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_eventstream.py 2020-10-09 10:13:49.520471985 +0200 -@@ -12,8 +12,10 @@ - # language governing permissions and limitations under the License. - """Unit tests for the binary event stream decoder. """ - --from mock import Mock --from nose.tools import assert_equal, raises -+try: -+ from mock import Mock -+except ImportError: -+ from unittest.mock import Mock - - from botocore.parsers import EventStreamXMLParser - from botocore.eventstream import ( -@@ -240,18 +242,12 @@ - - def assert_message_equal(message_a, message_b): - """Asserts all fields for two messages are equal. """ -- assert_equal( -- message_a.prelude.total_length, -- message_b.prelude.total_length -- ) -- assert_equal( -- message_a.prelude.headers_length, -- message_b.prelude.headers_length -- ) -- assert_equal(message_a.prelude.crc, message_b.prelude.crc) -- assert_equal(message_a.headers, message_b.headers) -- assert_equal(message_a.payload, message_b.payload) -- assert_equal(message_a.crc, message_b.crc) -+ assert message_a.prelude.total_length == message_b.prelude.total_length -+ assert message_a.prelude.headers_length == message_b.prelude.headers_length -+ assert message_a.prelude.crc == message_b.prelude.crc -+ assert message_a.headers == message_b.headers -+ assert message_a.payload == message_b.payload -+ assert message_a.crc == message_b.crc - - - def test_partial_message(): -@@ -262,7 +258,7 @@ - mid_point = 15 - event_buffer.add_data(data[:mid_point]) - messages = list(event_buffer) -- assert_equal(messages, []) -+ assert messages == [] - event_buffer.add_data(data[mid_point:len(data)]) - for message in event_buffer: - assert_message_equal(message, EMPTY_MESSAGE[1]) -@@ -280,7 +276,7 @@ - def test_positive_cases(): - """Test that all positive cases decode how we expect. """ - for (encoded, decoded) in POSITIVE_CASES: -- yield check_message_decodes, encoded, decoded -+ check_message_decodes(encoded, decoded) - - - def test_all_positive_cases(): -@@ -301,8 +297,13 @@ - def test_negative_cases(): - """Test that all negative cases raise the expected exception. """ - for (encoded, exception) in NEGATIVE_CASES: -- test_function = raises(exception)(check_message_decodes) -- yield test_function, encoded, None -+ try: -+ check_message_decodes(encoded, None) -+ except exception: -+ pass -+ else: -+ raise AssertionError( -+ 'Expected exception {!s} has not been raised.'.format(exception)) - - - def test_header_parser(): -@@ -329,87 +330,87 @@ - - parser = EventStreamHeaderParser() - headers = parser.parse(headers_data) -- assert_equal(headers, expected_headers) -+ assert headers == expected_headers - - - def test_message_prelude_properties(): - """Test that calculated properties from the payload are correct. """ - # Total length: 40, Headers Length: 15, random crc - prelude = MessagePrelude(40, 15, 0x00000000) -- assert_equal(prelude.payload_length, 9) -- assert_equal(prelude.headers_end, 27) -- assert_equal(prelude.payload_end, 36) -+ assert prelude.payload_length == 9 -+ assert prelude.headers_end == 27 -+ assert prelude.payload_end == 36 - - - def test_message_to_response_dict(): - response_dict = PAYLOAD_ONE_STR_HEADER[1].to_response_dict() -- assert_equal(response_dict['status_code'], 200) -+ assert response_dict['status_code'] == 200 - expected_headers = {'content-type': 'application/json'} -- assert_equal(response_dict['headers'], expected_headers) -- assert_equal(response_dict['body'], b"{'foo':'bar'}") -+ assert response_dict['headers'] == expected_headers -+ assert response_dict['body'] == b"{'foo':'bar'}" - - - def test_message_to_response_dict_error(): - response_dict = ERROR_EVENT_MESSAGE[1].to_response_dict() -- assert_equal(response_dict['status_code'], 400) -+ assert response_dict['status_code'] == 400 - headers = { - ':message-type': 'error', - ':error-code': 'code', - ':error-message': 'message', - } -- assert_equal(response_dict['headers'], headers) -- assert_equal(response_dict['body'], b'') -+ assert response_dict['headers'] == headers -+ assert response_dict['body'] == b'' - - - def test_unpack_uint8(): - (value, bytes_consumed) = DecodeUtils.unpack_uint8(b'\xDE') -- assert_equal(bytes_consumed, 1) -- assert_equal(value, 0xDE) -+ assert bytes_consumed == 1 -+ assert value == 0xDE - - - def test_unpack_uint32(): - (value, bytes_consumed) = DecodeUtils.unpack_uint32(b'\xDE\xAD\xBE\xEF') -- assert_equal(bytes_consumed, 4) -- assert_equal(value, 0xDEADBEEF) -+ assert bytes_consumed == 4 -+ assert value == 0xDEADBEEF - - - def test_unpack_int8(): - (value, bytes_consumed) = DecodeUtils.unpack_int8(b'\xFE') -- assert_equal(bytes_consumed, 1) -- assert_equal(value, -2) -+ assert bytes_consumed == 1 -+ assert value == -2 - - - def test_unpack_int16(): - (value, bytes_consumed) = DecodeUtils.unpack_int16(b'\xFF\xFE') -- assert_equal(bytes_consumed, 2) -- assert_equal(value, -2) -+ assert bytes_consumed == 2 -+ assert value == -2 - - - def test_unpack_int32(): - (value, bytes_consumed) = DecodeUtils.unpack_int32(b'\xFF\xFF\xFF\xFE') -- assert_equal(bytes_consumed, 4) -- assert_equal(value, -2) -+ assert bytes_consumed == 4 -+ assert value == -2 - - - def test_unpack_int64(): - test_bytes = b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFE' - (value, bytes_consumed) = DecodeUtils.unpack_int64(test_bytes) -- assert_equal(bytes_consumed, 8) -- assert_equal(value, -2) -+ assert bytes_consumed == 8 -+ assert value == -2 - - - def test_unpack_array_short(): - test_bytes = b'\x00\x10application/json' - (value, bytes_consumed) = DecodeUtils.unpack_byte_array(test_bytes) -- assert_equal(bytes_consumed, 18) -- assert_equal(value, b'application/json') -+ assert bytes_consumed == 18 -+ assert value == b'application/json' - - - def test_unpack_byte_array_int(): - (value, array_bytes_consumed) = DecodeUtils.unpack_byte_array( - b'\x00\x00\x00\x10application/json', length_byte_size=4) -- assert_equal(array_bytes_consumed, 20) -- assert_equal(value, b'application/json') -+ assert array_bytes_consumed == 20 -+ assert value == b'application/json' - - - def test_unpack_utf8_string(): -@@ -417,18 +418,19 @@ - utf8_string = b'\xe6\x97\xa5\xe6\x9c\xac\xe8\xaa\x9e' - encoded = length + utf8_string - (value, bytes_consumed) = DecodeUtils.unpack_utf8_string(encoded) -- assert_equal(bytes_consumed, 11) -- assert_equal(value, utf8_string.decode('utf-8')) -+ assert bytes_consumed == 11 -+ assert value == utf8_string.decode('utf-8') - - - def test_unpack_prelude(): - data = b'\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03' - prelude = DecodeUtils.unpack_prelude(data) -- assert_equal(prelude, ((1, 2, 3), 12)) -+ assert prelude == ((1, 2, 3), 12) - - - def create_mock_raw_stream(*data): - raw_stream = Mock() -+ - def generator(): - for chunk in data: - yield chunk -@@ -445,7 +447,7 @@ - output_shape = Mock() - event_stream = EventStream(raw_stream, output_shape, parser, '') - events = list(event_stream) -- assert_equal(len(events), 1) -+ assert len(events) == 1 - - response_dict = { - 'headers': {'event-id': 0x0000a00c}, -@@ -455,14 +457,19 @@ - parser.parse.assert_called_with(response_dict, output_shape) - - --@raises(EventStreamError) - def test_eventstream_wrapper_iteration_error(): -- raw_stream = create_mock_raw_stream(ERROR_EVENT_MESSAGE[0]) -- parser = Mock(spec=EventStreamXMLParser) -- parser.parse.return_value = {} -- output_shape = Mock() -- event_stream = EventStream(raw_stream, output_shape, parser, '') -- list(event_stream) -+ try: -+ raw_stream = create_mock_raw_stream(ERROR_EVENT_MESSAGE[0]) -+ parser = Mock(spec=EventStreamXMLParser) -+ parser.parse.return_value = {} -+ output_shape = Mock() -+ event_stream = EventStream(raw_stream, output_shape, parser, '') -+ list(event_stream) -+ except EventStreamError: -+ pass -+ else: -+ raise AssertionError( -+ 'Expected exception EventStreamError has not been raised.') - - - def test_event_stream_wrapper_close(): -@@ -492,22 +499,32 @@ - assert event.payload == payload - - --@raises(NoInitialResponseError) - def test_event_stream_initial_response_wrong_type(): -- raw_stream = create_mock_raw_stream( -- b"\x00\x00\x00+\x00\x00\x00\x0e4\x8b\xec{\x08event-id\x04\x00", -- b"\x00\xa0\x0c{'foo':'bar'}\xd3\x89\x02\x85", -- ) -- parser = Mock(spec=EventStreamXMLParser) -- output_shape = Mock() -- event_stream = EventStream(raw_stream, output_shape, parser, '') -- event_stream.get_initial_response() -+ try: -+ raw_stream = create_mock_raw_stream( -+ b"\x00\x00\x00+\x00\x00\x00\x0e4\x8b\xec{\x08event-id\x04\x00", -+ b"\x00\xa0\x0c{'foo':'bar'}\xd3\x89\x02\x85", -+ ) -+ parser = Mock(spec=EventStreamXMLParser) -+ output_shape = Mock() -+ event_stream = EventStream(raw_stream, output_shape, parser, '') -+ event_stream.get_initial_response() -+ except NoInitialResponseError: -+ pass -+ else: -+ raise AssertionError( -+ 'Expected exception NoInitialResponseError has not been raised.') - - --@raises(NoInitialResponseError) - def test_event_stream_initial_response_no_event(): -- raw_stream = create_mock_raw_stream(b'') -- parser = Mock(spec=EventStreamXMLParser) -- output_shape = Mock() -- event_stream = EventStream(raw_stream, output_shape, parser, '') -- event_stream.get_initial_response() -+ try: -+ raw_stream = create_mock_raw_stream(b'') -+ parser = Mock(spec=EventStreamXMLParser) -+ output_shape = Mock() -+ event_stream = EventStream(raw_stream, output_shape, parser, '') -+ event_stream.get_initial_response() -+ except NoInitialResponseError: -+ pass -+ else: -+ raise AssertionError( -+ 'Expected exception NoInitialResponseError has not been raised.') -diff -Nru botocore-1.18.15.orig/tests/unit/test_exceptions.py botocore-1.18.15/tests/unit/test_exceptions.py ---- botocore-1.18.15.orig/tests/unit/test_exceptions.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_exceptions.py 2020-10-09 10:13:49.520471985 +0200 -@@ -14,8 +14,6 @@ - import pickle - from tests import unittest - --from nose.tools import assert_equal -- - import botocore.awsrequest - import botocore.session - from botocore import exceptions -@@ -24,7 +22,7 @@ - def test_client_error_can_handle_missing_code_or_message(): - response = {'Error': {}} - expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown' -- assert_equal(str(exceptions.ClientError(response, 'blackhole')), expect) -+ assert str(exceptions.ClientError(response, 'blackhole')) == expect - - - def test_client_error_has_operation_name_set(): -@@ -36,7 +34,7 @@ - def test_client_error_set_correct_operation_name(): - response = {'Error': {}} - exception = exceptions.ClientError(response, 'blackhole') -- assert_equal(exception.operation_name, 'blackhole') -+ assert exception.operation_name == 'blackhole' - - - def test_retry_info_added_when_present(): -diff -Nru botocore-1.18.15.orig/tests/unit/test_handlers.py botocore-1.18.15/tests/unit/test_handlers.py ---- botocore-1.18.15.orig/tests/unit/test_handlers.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_handlers.py 2020-10-09 10:13:49.536472205 +0200 -@@ -14,7 +14,7 @@ - from tests import unittest, BaseSessionTest - - import base64 --import mock -+from tests import mock - import copy - import os - import json -@@ -126,7 +126,7 @@ - 'foo/keyname%2B?versionId=asdf+') - - def test_copy_source_has_validation_failure(self): -- with self.assertRaisesRegexp(ParamValidationError, 'Key'): -+ with six.assertRaisesRegex(self, ParamValidationError, 'Key'): - handlers.handle_copy_source_param( - {'CopySource': {'Bucket': 'foo'}}) - -diff -Nru botocore-1.18.15.orig/tests/unit/test_history.py botocore-1.18.15/tests/unit/test_history.py ---- botocore-1.18.15.orig/tests/unit/test_history.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_history.py 2020-10-09 10:13:49.556472483 +0200 -@@ -1,6 +1,6 @@ - from tests import unittest - --import mock -+from tests import mock - - from botocore.history import HistoryRecorder - from botocore.history import BaseHistoryHandler -diff -Nru botocore-1.18.15.orig/tests/unit/test_http_client_exception_mapping.py botocore-1.18.15/tests/unit/test_http_client_exception_mapping.py ---- botocore-1.18.15.orig/tests/unit/test_http_client_exception_mapping.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_http_client_exception_mapping.py 2020-10-09 10:13:49.524472039 +0200 -@@ -1,4 +1,4 @@ --from nose.tools import assert_raises -+import unittest - - from botocore import exceptions as botocore_exceptions - from botocore.vendored.requests import exceptions as requests_exceptions -@@ -13,15 +13,9 @@ - ] - - --def _raise_exception(exception): -- raise exception(endpoint_url=None, proxy_url=None, error=None) -- -- --def _test_exception_mapping(new_exception, old_exception): -- # assert that the new exception can still be caught by the old vendored one -- assert_raises(old_exception, _raise_exception, new_exception) -- -- --def test_http_client_exception_mapping(): -- for new_exception, old_exception in EXCEPTION_MAPPING: -- yield _test_exception_mapping, new_exception, old_exception -+class TestHttpClientExceptionMapping(unittest.TestCase): -+ def test_http_client_exception_mapping(self): -+ for new_exception, old_exception in EXCEPTION_MAPPING: -+ with self.assertRaises(old_exception): -+ raise new_exception(endpoint_url=None, proxy_url=None, -+ error=None) -diff -Nru botocore-1.18.15.orig/tests/unit/test_http_session.py botocore-1.18.15/tests/unit/test_http_session.py ---- botocore-1.18.15.orig/tests/unit/test_http_session.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_http_session.py 2020-10-09 10:13:49.524472039 +0200 -@@ -1,11 +1,12 @@ - import socket - --from mock import patch, Mock, ANY -+try: -+ from mock import patch, Mock, ANY -+except ImportError: -+ from unittest.mock import patch, Mock, ANY - from tests import unittest --from nose.tools import raises - from urllib3.exceptions import NewConnectionError, ProtocolError - --from botocore.vendored import six - from botocore.awsrequest import AWSRequest - from botocore.awsrequest import AWSHTTPConnectionPool, AWSHTTPSConnectionPool - from botocore.httpsession import get_cert_path -@@ -250,15 +251,15 @@ - session = URLLib3Session() - session.send(self.request.prepare()) - -- @raises(EndpointConnectionError) - def test_catches_new_connection_error(self): -- error = NewConnectionError(None, None) -- self.make_request_with_error(error) -+ with self.assertRaises(EndpointConnectionError): -+ error = NewConnectionError(None, None) -+ self.make_request_with_error(error) - -- @raises(ConnectionClosedError) - def test_catches_bad_status_line(self): -- error = ProtocolError(None) -- self.make_request_with_error(error) -+ with self.assertRaises(ConnectionClosedError): -+ error = ProtocolError(None) -+ self.make_request_with_error(error) - - def test_aws_connection_classes_are_used(self): - session = URLLib3Session() -diff -Nru botocore-1.18.15.orig/tests/unit/test_idempotency.py botocore-1.18.15/tests/unit/test_idempotency.py ---- botocore-1.18.15.orig/tests/unit/test_idempotency.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_idempotency.py 2020-10-09 10:13:49.556472483 +0200 -@@ -13,7 +13,7 @@ - - from tests import unittest - import re --import mock -+from tests import mock - from botocore.handlers import generate_idempotent_uuid - - -diff -Nru botocore-1.18.15.orig/tests/unit/test_loaders.py botocore-1.18.15/tests/unit/test_loaders.py ---- botocore-1.18.15.orig/tests/unit/test_loaders.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_loaders.py 2020-10-09 10:13:49.540472262 +0200 -@@ -22,12 +22,13 @@ - import os - import contextlib - import copy --import mock -+from tests import mock - - from botocore.exceptions import DataNotFoundError, UnknownServiceError - from botocore.loaders import JSONFileLoader - from botocore.loaders import Loader, create_loader - from botocore.loaders import ExtrasProcessor -+from botocore.compat import six - - from tests import BaseEnvVar - -@@ -156,8 +157,8 @@ - - # Should have a) the unknown service name and b) list of valid - # service names. -- with self.assertRaisesRegexp(UnknownServiceError, -- 'Unknown service.*BAZ.*baz'): -+ with six.assertRaisesRegex(self, UnknownServiceError, -+ 'Unknown service.*BAZ.*baz'): - loader.load_service_model('BAZ', type_name='service-2') - - def test_load_service_model_uses_provided_type_name(self): -@@ -169,8 +170,8 @@ - # Should have a) the unknown service name and b) list of valid - # service names. - provided_type_name = 'not-service-2' -- with self.assertRaisesRegexp(UnknownServiceError, -- 'Unknown service.*BAZ.*baz'): -+ with six.assertRaisesRegex(self, UnknownServiceError, -+ 'Unknown service.*BAZ.*baz'): - loader.load_service_model( - 'BAZ', type_name=provided_type_name) - -diff -Nru botocore-1.18.15.orig/tests/unit/test_model.py botocore-1.18.15/tests/unit/test_model.py ---- botocore-1.18.15.orig/tests/unit/test_model.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_model.py 2020-10-09 10:13:49.524472039 +0200 -@@ -2,11 +2,11 @@ - - from botocore import model - from botocore.compat import OrderedDict --from botocore.exceptions import MissingServiceIdError -+from botocore.compat import six - - - def test_missing_model_attribute_raises_exception(): -- # We're using a nose test generator here to cut down -+ # We're using a test generator here to cut down - # on the duplication. The property names below - # all have the same test logic. - service_model = model.ServiceModel({'metadata': {'endpointPrefix': 'foo'}}) -@@ -28,7 +28,7 @@ - "be raised, but no exception was raised for: %s" % attr_name) - - for name in property_names: -- yield _test_attribute_raise_exception, name -+ _test_attribute_raise_exception(name) - - - class TestServiceId(unittest.TestCase): -@@ -105,9 +105,9 @@ - } - service_name = 'myservice' - service_model = model.ServiceModel(service_model, service_name) -- with self.assertRaisesRegexp(model.UndefinedModelAttributeError, -- service_name): -- service_model.service_id -+ with six.assertRaisesRegex(self, model.UndefinedModelAttributeError, -+ service_name): -+ service_model.service_id() - - def test_operation_does_not_exist(self): - with self.assertRaises(model.OperationNotFoundError): -diff -Nru botocore-1.18.15.orig/tests/unit/test_paginate.py botocore-1.18.15/tests/unit/test_paginate.py ---- botocore-1.18.15.orig/tests/unit/test_paginate.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_paginate.py 2020-10-09 10:13:49.540472262 +0200 -@@ -20,7 +20,7 @@ - from botocore.exceptions import PaginationError - from botocore.compat import six - --import mock -+from tests import mock - - - def encode_token(token): -@@ -823,7 +823,7 @@ - {"Users": ["User3"]}, - ] - self.method.side_effect = responses -- with self.assertRaisesRegexp(ValueError, 'Bad starting token'): -+ with six.assertRaisesRegex(self, ValueError, 'Bad starting token'): - pagination_config = {'StartingToken': 'does___not___work'} - self.paginator.paginate( - PaginationConfig=pagination_config).build_full_result() -diff -Nru botocore-1.18.15.orig/tests/unit/test_parsers.py botocore-1.18.15/tests/unit/test_parsers.py ---- botocore-1.18.15.orig/tests/unit/test_parsers.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_parsers.py 2020-10-09 10:13:49.524472039 +0200 -@@ -14,11 +14,11 @@ - import datetime - - from dateutil.tz import tzutc --from nose.tools import assert_equal - - from botocore import parsers - from botocore import model - from botocore.compat import json, MutableMapping -+from botocore.compat import six - - - # HTTP responses will typically return a custom HTTP -@@ -597,8 +597,8 @@ - parser = parsers.QueryParser() - output_shape = None - # The XML body should be in the error message. -- with self.assertRaisesRegexp(parsers.ResponseParserError, -- '\x08\xa5.R\xd7\xda}\xab\xfa\xc4\x84\xef" -+ b"\xe3zS\x80\xee\x90\x88\xf7\xac\xe2\xef\xcd\xe9")) -+ -+ -+class TestTreeHash(unittest.TestCase): -+ # Note that for these tests I've independently verified -+ # what the expected tree hashes should be from other -+ # SDK implementations. -+ -+ def test_empty_tree_hash(self): -+ self.assertEqual( -+ calculate_tree_hash(six.BytesIO(b'')), -+ 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855') -+ -+ def test_tree_hash_less_than_one_mb(self): -+ one_k = six.BytesIO(b'a' * 1024) -+ self.assertEqual( -+ calculate_tree_hash(one_k), -+ '2edc986847e209b4016e141a6dc8716d3207350f416969382d431539bf292e4a') -+ -+ def test_tree_hash_exactly_one_mb(self): -+ one_meg_bytestring = b'a' * (1 * 1024 * 1024) -+ one_meg = six.BytesIO(one_meg_bytestring) -+ self.assertEqual( -+ calculate_tree_hash(one_meg), -+ '9bc1b2a288b26af7257a36277ae3816a7d4f16e89c1e7e77d0a5c48bad62b360') -+ -+ def test_tree_hash_multiple_of_one_mb(self): -+ four_mb = six.BytesIO(b'a' * (4 * 1024 * 1024)) -+ self.assertEqual( -+ calculate_tree_hash(four_mb), -+ '9491cb2ed1d4e7cd53215f4017c23ec4ad21d7050a1e6bb636c4f67e8cddb844') -+ -+ def test_tree_hash_offset_of_one_mb_multiple(self): -+ offset_four_mb = six.BytesIO(b'a' * (4 * 1024 * 1024) + b'a' * 20) -+ self.assertEqual( -+ calculate_tree_hash(offset_four_mb), -+ '12f3cbd6101b981cde074039f6f728071da8879d6f632de8afc7cdf00661b08f') -+ -+ -+class TestIsValidEndpointURL(unittest.TestCase): -+ def test_dns_name_is_valid(self): -+ self.assertTrue(is_valid_endpoint_url('https://s3.amazonaws.com/')) -+ -+ def test_ip_address_is_allowed(self): -+ self.assertTrue(is_valid_endpoint_url('https://10.10.10.10/')) -+ -+ def test_path_component_ignored(self): -+ self.assertTrue( -+ is_valid_endpoint_url('https://foo.bar.com/other/path/')) -+ -+ def test_can_have_port(self): -+ self.assertTrue(is_valid_endpoint_url('https://foo.bar.com:12345/')) -+ -+ def test_ip_can_have_port(self): -+ self.assertTrue(is_valid_endpoint_url('https://10.10.10.10:12345/')) -+ -+ def test_cannot_have_spaces(self): -+ self.assertFalse(is_valid_endpoint_url('https://my invalid name/')) -+ -+ def test_missing_scheme(self): -+ self.assertFalse(is_valid_endpoint_url('foo.bar.com')) -+ -+ def test_no_new_lines(self): -+ self.assertFalse(is_valid_endpoint_url('https://foo.bar.com\nbar/')) -+ -+ def test_long_hostname(self): -+ long_hostname = 'htps://%s.com' % ('a' * 256) -+ self.assertFalse(is_valid_endpoint_url(long_hostname)) -+ -+ def test_hostname_can_end_with_dot(self): -+ self.assertTrue(is_valid_endpoint_url('https://foo.bar.com./')) -+ -+ def test_hostname_no_dots(self): -+ self.assertTrue(is_valid_endpoint_url('https://foo/')) -+ -+ -+class TestFixS3Host(unittest.TestCase): -+ def test_fix_s3_host_initial(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://s3-us-west-2.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ self.assertEqual(request.url, -+ 'https://bucket.s3-us-west-2.amazonaws.com/key.txt') -+ self.assertEqual(request.auth_path, '/bucket/key.txt') -+ -+ def test_fix_s3_host_only_applied_once(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://s3.us-west-2.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ # Calling the handler again should not affect the end result: -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ self.assertEqual(request.url, -+ 'https://bucket.s3.us-west-2.amazonaws.com/key.txt') -+ # This was a bug previously. We want to make sure that -+ # calling fix_s3_host() again does not alter the auth_path. -+ # Otherwise we'll get signature errors. -+ self.assertEqual(request.auth_path, '/bucket/key.txt') -+ -+ def test_dns_style_not_used_for_get_bucket_location(self): -+ original_url = 'https://s3-us-west-2.amazonaws.com/bucket?location' -+ request = AWSRequest( -+ method='GET', headers={}, -+ url=original_url, -+ ) -+ signature_version = 's3' -+ region_name = 'us-west-2' -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ # The request url should not have been modified because this is -+ # a request for GetBucketLocation. -+ self.assertEqual(request.url, original_url) -+ -+ def test_can_provide_default_endpoint_url(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://s3-us-west-2.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name, -+ default_endpoint_url='foo.s3.amazonaws.com') -+ self.assertEqual(request.url, -+ 'https://bucket.foo.s3.amazonaws.com/key.txt') -+ -+ def test_no_endpoint_url_uses_request_url(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://s3-us-west-2.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ fix_s3_host( -+ request=request, signature_version=signature_version, -+ region_name=region_name, -+ # A value of None means use the url in the current request. -+ default_endpoint_url=None, -+ ) -+ self.assertEqual(request.url, -+ 'https://bucket.s3-us-west-2.amazonaws.com/key.txt') -+ -+ -+class TestSwitchToVirtualHostStyle(unittest.TestCase): -+ def test_switch_to_virtual_host_style(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ self.assertEqual(request.url, -+ 'https://bucket.foo.amazonaws.com/key.txt') -+ self.assertEqual(request.auth_path, '/bucket/key.txt') -+ -+ def test_uses_default_endpoint(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name, default_endpoint_url='s3.amazonaws.com') -+ self.assertEqual(request.url, -+ 'https://bucket.s3.amazonaws.com/key.txt') -+ self.assertEqual(request.auth_path, '/bucket/key.txt') -+ -+ def test_throws_invalid_dns_name_error(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/mybucket.foo/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ with self.assertRaises(InvalidDNSNameError): -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ -+ def test_fix_s3_host_only_applied_once(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ # Calling the handler again should not affect the end result: -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ self.assertEqual(request.url, -+ 'https://bucket.foo.amazonaws.com/key.txt') -+ # This was a bug previously. We want to make sure that -+ # calling fix_s3_host() again does not alter the auth_path. -+ # Otherwise we'll get signature errors. -+ self.assertEqual(request.auth_path, '/bucket/key.txt') -+ -+ def test_virtual_host_style_for_make_bucket(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/bucket' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ self.assertEqual(request.url, -+ 'https://bucket.foo.amazonaws.com/') -+ -+ def test_virtual_host_style_not_used_for_get_bucket_location(self): -+ original_url = 'https://foo.amazonaws.com/bucket?location' -+ request = AWSRequest( -+ method='GET', headers={}, -+ url=original_url, -+ ) -+ signature_version = 's3' -+ region_name = 'us-west-2' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ # The request url should not have been modified because this is -+ # a request for GetBucketLocation. -+ self.assertEqual(request.url, original_url) -+ -+ def test_virtual_host_style_not_used_for_list_buckets(self): -+ original_url = 'https://foo.amazonaws.com/' -+ request = AWSRequest( -+ method='GET', headers={}, -+ url=original_url, -+ ) -+ signature_version = 's3' -+ region_name = 'us-west-2' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name) -+ # The request url should not have been modified because this is -+ # a request for GetBucketLocation. -+ self.assertEqual(request.url, original_url) -+ -+ def test_is_unaffected_by_sigv4(self): -+ request = AWSRequest( -+ method='PUT', headers={}, -+ url='https://foo.amazonaws.com/bucket/key.txt' -+ ) -+ region_name = 'us-west-2' -+ signature_version = 's3v4' -+ switch_to_virtual_host_style( -+ request=request, signature_version=signature_version, -+ region_name=region_name, default_endpoint_url='s3.amazonaws.com') -+ self.assertEqual(request.url, -+ 'https://bucket.s3.amazonaws.com/key.txt') -+ -+ -+class TestInstanceCache(unittest.TestCase): -+ class DummyClass(object): -+ def __init__(self, cache): -+ self._instance_cache = cache -+ -+ @instance_cache -+ def add(self, x, y): -+ return x + y -+ -+ @instance_cache -+ def sub(self, x, y): -+ return x - y -+ -+ def setUp(self): -+ self.cache = {} -+ -+ def test_cache_single_method_call(self): -+ adder = self.DummyClass(self.cache) -+ self.assertEqual(adder.add(2, 1), 3) -+ # This should result in one entry in the cache. -+ self.assertEqual(len(self.cache), 1) -+ # When we call the method with the same args, -+ # we should reuse the same entry in the cache. -+ self.assertEqual(adder.add(2, 1), 3) -+ self.assertEqual(len(self.cache), 1) -+ -+ def test_can_cache_multiple_methods(self): -+ adder = self.DummyClass(self.cache) -+ adder.add(2, 1) -+ -+ # A different method results in a new cache entry, -+ # so now there should be two elements in the cache. -+ self.assertEqual(adder.sub(2, 1), 1) -+ self.assertEqual(len(self.cache), 2) -+ self.assertEqual(adder.sub(2, 1), 1) -+ -+ def test_can_cache_kwargs(self): -+ adder = self.DummyClass(self.cache) -+ adder.add(x=2, y=1) -+ self.assertEqual(adder.add(x=2, y=1), 3) -+ self.assertEqual(len(self.cache), 1) -+ -+ -+class TestMergeDicts(unittest.TestCase): -+ def test_merge_dicts_overrides(self): -+ first = { -+ 'foo': {'bar': {'baz': {'one': 'ORIGINAL', 'two': 'ORIGINAL'}}}} -+ second = {'foo': {'bar': {'baz': {'one': 'UPDATE'}}}} -+ -+ merge_dicts(first, second) -+ # The value from the second dict wins. -+ self.assertEqual(first['foo']['bar']['baz']['one'], 'UPDATE') -+ # And we still preserve the other attributes. -+ self.assertEqual(first['foo']['bar']['baz']['two'], 'ORIGINAL') -+ -+ def test_merge_dicts_new_keys(self): -+ first = { -+ 'foo': {'bar': {'baz': {'one': 'ORIGINAL', 'two': 'ORIGINAL'}}}} -+ second = {'foo': {'bar': {'baz': {'three': 'UPDATE'}}}} -+ -+ merge_dicts(first, second) -+ self.assertEqual(first['foo']['bar']['baz']['one'], 'ORIGINAL') -+ self.assertEqual(first['foo']['bar']['baz']['two'], 'ORIGINAL') -+ self.assertEqual(first['foo']['bar']['baz']['three'], 'UPDATE') -+ -+ def test_merge_empty_dict_does_nothing(self): -+ first = {'foo': {'bar': 'baz'}} -+ merge_dicts(first, {}) -+ self.assertEqual(first, {'foo': {'bar': 'baz'}}) -+ -+ def test_more_than_one_sub_dict(self): -+ first = {'one': {'inner': 'ORIGINAL', 'inner2': 'ORIGINAL'}, -+ 'two': {'inner': 'ORIGINAL', 'inner2': 'ORIGINAL'}} -+ second = {'one': {'inner': 'UPDATE'}, 'two': {'inner': 'UPDATE'}} -+ -+ merge_dicts(first, second) -+ self.assertEqual(first['one']['inner'], 'UPDATE') -+ self.assertEqual(first['one']['inner2'], 'ORIGINAL') -+ -+ self.assertEqual(first['two']['inner'], 'UPDATE') -+ self.assertEqual(first['two']['inner2'], 'ORIGINAL') -+ -+ def test_new_keys(self): -+ first = {'one': {'inner': 'ORIGINAL'}, 'two': {'inner': 'ORIGINAL'}} -+ second = {'three': {'foo': {'bar': 'baz'}}} -+ # In this case, second has no keys in common, but we'd still expect -+ # this to get merged. -+ merge_dicts(first, second) -+ self.assertEqual(first['three']['foo']['bar'], 'baz') -+ -+ def test_list_values_no_append(self): -+ dict1 = {'Foo': ['old_foo_value']} -+ dict2 = {'Foo': ['new_foo_value']} -+ merge_dicts(dict1, dict2) -+ self.assertEqual( -+ dict1, {'Foo': ['new_foo_value']}) -+ -+ def test_list_values_append(self): -+ dict1 = {'Foo': ['old_foo_value']} -+ dict2 = {'Foo': ['new_foo_value']} -+ merge_dicts(dict1, dict2, append_lists=True) -+ self.assertEqual( -+ dict1, {'Foo': ['old_foo_value', 'new_foo_value']}) -+ -+ def test_list_values_mismatching_types(self): -+ dict1 = {'Foo': 'old_foo_value'} -+ dict2 = {'Foo': ['new_foo_value']} -+ merge_dicts(dict1, dict2, append_lists=True) -+ self.assertEqual( -+ dict1, {'Foo': ['new_foo_value']}) -+ -+ def test_list_values_missing_key(self): -+ dict1 = {} -+ dict2 = {'Foo': ['foo_value']} -+ merge_dicts(dict1, dict2, append_lists=True) -+ self.assertEqual( -+ dict1, {'Foo': ['foo_value']}) -+ -+ -+class TestLowercaseDict(unittest.TestCase): -+ def test_lowercase_dict_empty(self): -+ original = {} -+ copy = lowercase_dict(original) -+ self.assertEqual(original, copy) -+ -+ def test_lowercase_dict_original_keys_lower(self): -+ original = { -+ 'lower_key1': 1, -+ 'lower_key2': 2, -+ } -+ copy = lowercase_dict(original) -+ self.assertEqual(original, copy) -+ -+ def test_lowercase_dict_original_keys_mixed(self): -+ original = { -+ 'SOME_KEY': 'value', -+ 'AnOTher_OnE': 'anothervalue', -+ } -+ copy = lowercase_dict(original) -+ expected = { -+ 'some_key': 'value', -+ 'another_one': 'anothervalue', -+ } -+ self.assertEqual(expected, copy) -+ -+ -+class TestGetServiceModuleName(unittest.TestCase): -+ def setUp(self): -+ self.service_description = { -+ 'metadata': { -+ 'serviceFullName': 'AWS MyService', -+ 'apiVersion': '2014-01-01', -+ 'endpointPrefix': 'myservice', -+ 'signatureVersion': 'v4', -+ 'protocol': 'query' -+ }, -+ 'operations': {}, -+ 'shapes': {}, -+ } -+ self.service_model = ServiceModel( -+ self.service_description, 'myservice') -+ -+ def test_default(self): -+ self.assertEqual( -+ get_service_module_name(self.service_model), -+ 'MyService' -+ ) -+ -+ def test_client_name_with_amazon(self): -+ self.service_description['metadata']['serviceFullName'] = ( -+ 'Amazon MyService') -+ self.assertEqual( -+ get_service_module_name(self.service_model), -+ 'MyService' -+ ) -+ -+ def test_client_name_using_abreviation(self): -+ self.service_description['metadata']['serviceAbbreviation'] = ( -+ 'Abbreviation') -+ self.assertEqual( -+ get_service_module_name(self.service_model), -+ 'Abbreviation' -+ ) -+ -+ def test_client_name_with_non_alphabet_characters(self): -+ self.service_description['metadata']['serviceFullName'] = ( -+ 'Amazon My-Service') -+ self.assertEqual( -+ get_service_module_name(self.service_model), -+ 'MyService' -+ ) -+ -+ def test_client_name_with_no_full_name_or_abbreviation(self): -+ del self.service_description['metadata']['serviceFullName'] -+ self.assertEqual( -+ get_service_module_name(self.service_model), -+ 'myservice' -+ ) -+ -+ -+class TestPercentEncodeSequence(unittest.TestCase): -+ def test_percent_encode_empty(self): -+ self.assertEqual(percent_encode_sequence({}), '') -+ -+ def test_percent_encode_special_chars(self): -+ self.assertEqual( -+ percent_encode_sequence({'k1': 'with spaces++/'}), -+ 'k1=with%20spaces%2B%2B%2F') -+ -+ def test_percent_encode_string_string_tuples(self): -+ self.assertEqual(percent_encode_sequence([('k1', 'v1'), ('k2', 'v2')]), -+ 'k1=v1&k2=v2') -+ -+ def test_percent_encode_dict_single_pair(self): -+ self.assertEqual(percent_encode_sequence({'k1': 'v1'}), 'k1=v1') -+ -+ def test_percent_encode_dict_string_string(self): -+ self.assertEqual( -+ percent_encode_sequence(OrderedDict([('k1', 'v1'), ('k2', 'v2')])), -+ 'k1=v1&k2=v2') -+ -+ def test_percent_encode_single_list_of_values(self): -+ self.assertEqual(percent_encode_sequence({'k1': ['a', 'b', 'c']}), -+ 'k1=a&k1=b&k1=c') -+ -+ def test_percent_encode_list_values_of_string(self): -+ self.assertEqual( -+ percent_encode_sequence( -+ OrderedDict([('k1', ['a', 'list']), -+ ('k2', ['another', 'list'])])), -+ 'k1=a&k1=list&k2=another&k2=list') -+ -+class TestPercentEncode(unittest.TestCase): -+ def test_percent_encode_obj(self): -+ self.assertEqual(percent_encode(1), '1') -+ -+ def test_percent_encode_text(self): -+ self.assertEqual(percent_encode(u''), '') -+ self.assertEqual(percent_encode(u'a'), 'a') -+ self.assertEqual(percent_encode(u'\u0000'), '%00') -+ # Codepoint > 0x7f -+ self.assertEqual(percent_encode(u'\u2603'), '%E2%98%83') -+ # Codepoint > 0xffff -+ self.assertEqual(percent_encode(u'\U0001f32e'), '%F0%9F%8C%AE') -+ -+ def test_percent_encode_bytes(self): -+ self.assertEqual(percent_encode(b''), '') -+ self.assertEqual(percent_encode(b'a'), u'a') -+ self.assertEqual(percent_encode(b'\x00'), u'%00') -+ # UTF-8 Snowman -+ self.assertEqual(percent_encode(b'\xe2\x98\x83'), '%E2%98%83') -+ # Arbitrary bytes (not valid UTF-8). -+ self.assertEqual(percent_encode(b'\x80\x00'), '%80%00') -+ -+class TestSwitchHostS3Accelerate(unittest.TestCase): -+ def setUp(self): -+ self.original_url = 'https://s3.amazonaws.com/foo/key.txt' -+ self.request = AWSRequest( -+ method='PUT', headers={}, -+ url=self.original_url -+ ) -+ self.client_config = Config() -+ self.request.context['client_config'] = self.client_config -+ -+ def test_switch_host(self): -+ switch_host_s3_accelerate(self.request, 'PutObject') -+ self.assertEqual( -+ self.request.url, -+ 'https://s3-accelerate.amazonaws.com/foo/key.txt') -+ -+ def test_do_not_switch_black_listed_operations(self): -+ # It should not get switched for ListBuckets, DeleteBucket, and -+ # CreateBucket -+ blacklist_ops = [ -+ 'ListBuckets', -+ 'DeleteBucket', -+ 'CreateBucket' -+ ] -+ for op_name in blacklist_ops: -+ switch_host_s3_accelerate(self.request, op_name) -+ self.assertEqual(self.request.url, self.original_url) -+ -+ def test_uses_original_endpoint_scheme(self): -+ self.request.url = 'http://s3.amazonaws.com/foo/key.txt' -+ switch_host_s3_accelerate(self.request, 'PutObject') -+ self.assertEqual( -+ self.request.url, -+ 'http://s3-accelerate.amazonaws.com/foo/key.txt') -+ -+ def test_uses_dualstack(self): -+ self.client_config.s3 = {'use_dualstack_endpoint': True} -+ self.original_url = 'https://s3.dualstack.amazonaws.com/foo/key.txt' -+ self.request = AWSRequest( -+ method='PUT', headers={}, -+ url=self.original_url -+ ) -+ self.request.context['client_config'] = self.client_config -+ switch_host_s3_accelerate(self.request, 'PutObject') -+ self.assertEqual( -+ self.request.url, -+ 'https://s3-accelerate.dualstack.amazonaws.com/foo/key.txt') -+ -+ -+class TestDeepMerge(unittest.TestCase): -+ def test_simple_merge(self): -+ a = {'key': 'value'} -+ b = {'otherkey': 'othervalue'} -+ deep_merge(a, b) -+ -+ expected = {'key': 'value', 'otherkey': 'othervalue'} -+ self.assertEqual(a, expected) -+ -+ def test_merge_list(self): -+ # Lists are treated as opaque data and so no effort should be made to -+ # combine them. -+ a = {'key': ['original']} -+ b = {'key': ['new']} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': ['new']}) -+ -+ def test_merge_number(self): -+ # The value from b is always taken -+ a = {'key': 10} -+ b = {'key': 45} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': 45}) -+ -+ a = {'key': 45} -+ b = {'key': 10} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': 10}) -+ -+ def test_merge_boolean(self): -+ # The value from b is always taken -+ a = {'key': False} -+ b = {'key': True} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': True}) -+ -+ a = {'key': True} -+ b = {'key': False} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': False}) -+ -+ def test_merge_string(self): -+ a = {'key': 'value'} -+ b = {'key': 'othervalue'} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': 'othervalue'}) -+ -+ def test_merge_overrides_value(self): -+ # The value from b is always taken, even when it's a different type -+ a = {'key': 'original'} -+ b = {'key': {'newkey': 'newvalue'}} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': {'newkey': 'newvalue'}}) -+ -+ a = {'key': {'anotherkey': 'value'}} -+ b = {'key': 'newvalue'} -+ deep_merge(a, b) -+ self.assertEqual(a, {'key': 'newvalue'}) -+ -+ def test_deep_merge(self): -+ a = { -+ 'first': { -+ 'second': { -+ 'key': 'value', -+ 'otherkey': 'othervalue' -+ }, -+ 'key': 'value' -+ } -+ } -+ b = { -+ 'first': { -+ 'second': { -+ 'otherkey': 'newvalue', -+ 'yetanotherkey': 'yetanothervalue' -+ } -+ } -+ } -+ deep_merge(a, b) -+ -+ expected = { -+ 'first': { -+ 'second': { -+ 'key': 'value', -+ 'otherkey': 'newvalue', -+ 'yetanotherkey': 'yetanothervalue' -+ }, -+ 'key': 'value' -+ } -+ } -+ self.assertEqual(a, expected) -+ -+ -+class TestS3RegionRedirector(unittest.TestCase): -+ def setUp(self): -+ self.endpoint_bridge = mock.Mock() -+ self.endpoint_bridge.resolve.return_value = { -+ 'endpoint_url': 'https://eu-central-1.amazonaws.com' -+ } -+ self.client = mock.Mock() -+ self.cache = {} -+ self.redirector = S3RegionRedirector(self.endpoint_bridge, self.client) -+ self.set_client_response_headers({}) -+ self.operation = mock.Mock() -+ self.operation.name = 'foo' -+ -+ def set_client_response_headers(self, headers): -+ error_response = ClientError({ -+ 'Error': { -+ 'Code': '', -+ 'Message': '' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': headers -+ } -+ }, 'HeadBucket') -+ success_response = { -+ 'ResponseMetadata': { -+ 'HTTPHeaders': headers -+ } -+ } -+ self.client.head_bucket.side_effect = [ -+ error_response, success_response] -+ -+ def test_set_request_url(self): -+ params = {'url': 'https://us-west-2.amazonaws.com/foo'} -+ context = {'signing': { -+ 'endpoint': 'https://eu-central-1.amazonaws.com' -+ }} -+ self.redirector.set_request_url(params, context) -+ self.assertEqual( -+ params['url'], 'https://eu-central-1.amazonaws.com/foo') -+ -+ def test_only_changes_request_url_if_endpoint_present(self): -+ params = {'url': 'https://us-west-2.amazonaws.com/foo'} -+ context = {} -+ self.redirector.set_request_url(params, context) -+ self.assertEqual( -+ params['url'], 'https://us-west-2.amazonaws.com/foo') -+ -+ def test_set_request_url_keeps_old_scheme(self): -+ params = {'url': 'http://us-west-2.amazonaws.com/foo'} -+ context = {'signing': { -+ 'endpoint': 'https://eu-central-1.amazonaws.com' -+ }} -+ self.redirector.set_request_url(params, context) -+ self.assertEqual( -+ params['url'], 'http://eu-central-1.amazonaws.com/foo') -+ -+ def test_sets_signing_context_from_cache(self): -+ signing_context = {'endpoint': 'bar'} -+ self.cache['foo'] = signing_context -+ self.redirector = S3RegionRedirector( -+ self.endpoint_bridge, self.client, cache=self.cache) -+ params = {'Bucket': 'foo'} -+ context = {} -+ self.redirector.redirect_from_cache(params, context) -+ self.assertEqual(context.get('signing'), signing_context) -+ -+ def test_only_changes_context_if_bucket_in_cache(self): -+ signing_context = {'endpoint': 'bar'} -+ self.cache['bar'] = signing_context -+ self.redirector = S3RegionRedirector( -+ self.endpoint_bridge, self.client, cache=self.cache) -+ params = {'Bucket': 'foo'} -+ context = {} -+ self.redirector.redirect_from_cache(params, context) -+ self.assertNotEqual(context.get('signing'), signing_context) -+ -+ def test_redirect_from_error(self): -+ request_dict = { -+ 'context': {'signing': {'bucket': 'foo'}}, -+ 'url': 'https://us-west-2.amazonaws.com/foo' -+ } -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ }) -+ -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ -+ # The response needs to be 0 so that there is no retry delay -+ self.assertEqual(redirect_response, 0) -+ -+ self.assertEqual( -+ request_dict['url'], 'https://eu-central-1.amazonaws.com/foo') -+ -+ expected_signing_context = { -+ 'endpoint': 'https://eu-central-1.amazonaws.com', -+ 'bucket': 'foo', -+ 'region': 'eu-central-1' -+ } -+ signing_context = request_dict['context'].get('signing') -+ self.assertEqual(signing_context, expected_signing_context) -+ self.assertTrue(request_dict['context'].get('s3_redirected')) -+ -+ def test_does_not_redirect_if_previously_redirected(self): -+ request_dict = { -+ 'context': { -+ 'signing': {'bucket': 'foo', 'region': 'us-west-2'}, -+ 's3_redirected': True, -+ }, -+ 'url': 'https://us-west-2.amazonaws.com/foo' -+ } -+ response = (None, { -+ 'Error': { -+ 'Code': '400', -+ 'Message': 'Bad Request', -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'us-west-2'} -+ } -+ }) -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertIsNone(redirect_response) -+ -+ def test_does_not_redirect_unless_permanentredirect_recieved(self): -+ request_dict = {} -+ response = (None, {}) -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertIsNone(redirect_response) -+ self.assertEqual(request_dict, {}) -+ -+ def test_does_not_redirect_if_region_cannot_be_found(self): -+ request_dict = {'url': 'https://us-west-2.amazonaws.com/foo', -+ 'context': {'signing': {'bucket': 'foo'}}} -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {} -+ } -+ }) -+ -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ -+ self.assertIsNone(redirect_response) -+ -+ def test_redirects_301(self): -+ request_dict = {'url': 'https://us-west-2.amazonaws.com/foo', -+ 'context': {'signing': {'bucket': 'foo'}}} -+ response = (None, { -+ 'Error': { -+ 'Code': '301', -+ 'Message': 'Moved Permanently' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ }) -+ -+ self.operation.name = 'HeadObject' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertEqual(redirect_response, 0) -+ -+ self.operation.name = 'ListObjects' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertIsNone(redirect_response) -+ -+ def test_redirects_400_head_bucket(self): -+ request_dict = {'url': 'https://us-west-2.amazonaws.com/foo', -+ 'context': {'signing': {'bucket': 'foo'}}} -+ response = (None, { -+ 'Error': {'Code': '400', 'Message': 'Bad Request'}, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ }) -+ -+ self.operation.name = 'HeadObject' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertEqual(redirect_response, 0) -+ -+ self.operation.name = 'ListObjects' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertIsNone(redirect_response) -+ -+ def test_does_not_redirect_400_head_bucket_no_region_header(self): -+ # We should not redirect a 400 Head* if the region header is not -+ # present as this will lead to infinitely calling HeadBucket. -+ request_dict = {'url': 'https://us-west-2.amazonaws.com/foo', -+ 'context': {'signing': {'bucket': 'foo'}}} -+ response = (None, { -+ 'Error': {'Code': '400', 'Message': 'Bad Request'}, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {} -+ } -+ }) -+ -+ self.operation.name = 'HeadBucket' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ head_bucket_calls = self.client.head_bucket.call_count -+ self.assertIsNone(redirect_response) -+ # We should not have made an additional head bucket call -+ self.assertEqual(head_bucket_calls, 0) -+ -+ def test_does_not_redirect_if_None_response(self): -+ request_dict = {'url': 'https://us-west-2.amazonaws.com/foo', -+ 'context': {'signing': {'bucket': 'foo'}}} -+ response = None -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertIsNone(redirect_response) -+ -+ def test_get_region_from_response(self): -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ }) -+ region = self.redirector.get_bucket_region('foo', response) -+ self.assertEqual(region, 'eu-central-1') -+ -+ def test_get_region_from_response_error_body(self): -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo', -+ 'Region': 'eu-central-1' -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {} -+ } -+ }) -+ region = self.redirector.get_bucket_region('foo', response) -+ self.assertEqual(region, 'eu-central-1') -+ -+ def test_get_region_from_head_bucket_error(self): -+ self.set_client_response_headers( -+ {'x-amz-bucket-region': 'eu-central-1'}) -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo', -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {} -+ } -+ }) -+ region = self.redirector.get_bucket_region('foo', response) -+ self.assertEqual(region, 'eu-central-1') -+ -+ def test_get_region_from_head_bucket_success(self): -+ success_response = { -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ } -+ self.client.head_bucket.side_effect = None -+ self.client.head_bucket.return_value = success_response -+ response = (None, { -+ 'Error': { -+ 'Code': 'PermanentRedirect', -+ 'Endpoint': 'foo.eu-central-1.amazonaws.com', -+ 'Bucket': 'foo', -+ }, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {} -+ } -+ }) -+ region = self.redirector.get_bucket_region('foo', response) -+ self.assertEqual(region, 'eu-central-1') -+ -+ def test_no_redirect_from_error_for_accesspoint(self): -+ request_dict = { -+ 'url': ( -+ 'https://myendpoint-123456789012.s3-accesspoint.' -+ 'us-west-2.amazonaws.com/key' -+ ), -+ 'context': { -+ 's3_accesspoint': {} -+ } -+ } -+ response = (None, { -+ 'Error': {'Code': '400', 'Message': 'Bad Request'}, -+ 'ResponseMetadata': { -+ 'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'} -+ } -+ }) -+ -+ self.operation.name = 'HeadObject' -+ redirect_response = self.redirector.redirect_from_error( -+ request_dict, response, self.operation) -+ self.assertEqual(redirect_response, None) -+ -+ def test_no_redirect_from_cache_for_accesspoint(self): -+ self.cache['foo'] = {'endpoint': 'foo-endpoint'} -+ self.redirector = S3RegionRedirector( -+ self.endpoint_bridge, self.client, cache=self.cache) -+ params = {'Bucket': 'foo'} -+ context = {'s3_accesspoint': {}} -+ self.redirector.redirect_from_cache(params, context) -+ self.assertNotIn('signing', context) -+ -+ -+class TestArnParser(unittest.TestCase): -+ def setUp(self): -+ self.parser = ArnParser() -+ -+ def test_parse(self): -+ arn = 'arn:aws:s3:us-west-2:1023456789012:myresource' -+ self.assertEqual( -+ self.parser.parse_arn(arn), -+ { -+ 'partition': 'aws', -+ 'service': 's3', -+ 'region': 'us-west-2', -+ 'account': '1023456789012', -+ 'resource': 'myresource', -+ } -+ ) -+ -+ def test_parse_invalid_arn(self): -+ with self.assertRaises(InvalidArnException): -+ self.parser.parse_arn('arn:aws:s3') -+ -+ def test_parse_arn_with_resource_type(self): -+ arn = 'arn:aws:s3:us-west-2:1023456789012:bucket_name:mybucket' -+ self.assertEqual( -+ self.parser.parse_arn(arn), -+ { -+ 'partition': 'aws', -+ 'service': 's3', -+ 'region': 'us-west-2', -+ 'account': '1023456789012', -+ 'resource': 'bucket_name:mybucket', -+ } -+ ) -+ -+ def test_parse_arn_with_empty_elements(self): -+ arn = 'arn:aws:s3:::mybucket' -+ self.assertEqual( -+ self.parser.parse_arn(arn), -+ { -+ 'partition': 'aws', -+ 'service': 's3', -+ 'region': '', -+ 'account': '', -+ 'resource': 'mybucket', -+ } -+ ) -+ -+ -+class TestS3ArnParamHandler(unittest.TestCase): -+ def setUp(self): -+ self.arn_handler = S3ArnParamHandler() -+ self.model = mock.Mock(OperationModel) -+ self.model.name = 'GetObject' -+ -+ def test_register(self): -+ event_emitter = mock.Mock() -+ self.arn_handler.register(event_emitter) -+ event_emitter.register.assert_called_with( -+ 'before-parameter-build.s3', self.arn_handler.handle_arn) -+ -+ def test_accesspoint_arn(self): -+ params = { -+ 'Bucket': 'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint' -+ } -+ context = {} -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': 'endpoint'}) -+ self.assertEqual( -+ context, -+ { -+ 's3_accesspoint': { -+ 'name': 'endpoint', -+ 'account': '123456789012', -+ 'region': 'us-west-2', -+ 'partition': 'aws', -+ 'service': 's3', -+ } -+ } -+ ) -+ -+ def test_accesspoint_arn_with_colon(self): -+ params = { -+ 'Bucket': 'arn:aws:s3:us-west-2:123456789012:accesspoint:endpoint' -+ } -+ context = {} -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': 'endpoint'}) -+ self.assertEqual( -+ context, -+ { -+ 's3_accesspoint': { -+ 'name': 'endpoint', -+ 'account': '123456789012', -+ 'region': 'us-west-2', -+ 'partition': 'aws', -+ 'service': 's3', -+ } -+ } -+ ) -+ -+ def test_errors_for_non_accesspoint_arn(self): -+ params = { -+ 'Bucket': 'arn:aws:s3:us-west-2:123456789012:unsupported:resource' -+ } -+ context = {} -+ with self.assertRaises(UnsupportedS3ArnError): -+ self.arn_handler.handle_arn(params, self.model, context) -+ -+ def test_outpost_arn_with_colon(self): -+ params = { -+ 'Bucket': ( -+ 'arn:aws:s3-outposts:us-west-2:123456789012:outpost:' -+ 'op-01234567890123456:accesspoint:myaccesspoint' -+ ) -+ } -+ context = {} -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': 'myaccesspoint'}) -+ self.assertEqual( -+ context, -+ { -+ 's3_accesspoint': { -+ 'name': 'myaccesspoint', -+ 'outpost_name': 'op-01234567890123456', -+ 'account': '123456789012', -+ 'region': 'us-west-2', -+ 'partition': 'aws', -+ 'service': 's3-outposts', -+ } -+ } -+ ) -+ -+ def test_outpost_arn_with_slash(self): -+ params = { -+ 'Bucket': ( -+ 'arn:aws:s3-outposts:us-west-2:123456789012:outpost/' -+ 'op-01234567890123456/accesspoint/myaccesspoint' -+ ) -+ } -+ context = {} -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': 'myaccesspoint'}) -+ self.assertEqual( -+ context, -+ { -+ 's3_accesspoint': { -+ 'name': 'myaccesspoint', -+ 'outpost_name': 'op-01234567890123456', -+ 'account': '123456789012', -+ 'region': 'us-west-2', -+ 'partition': 'aws', -+ 'service': 's3-outposts', -+ } -+ } -+ ) -+ -+ def test_outpost_arn_errors_for_missing_fields(self): -+ params = { -+ 'Bucket': 'arn:aws:s3-outposts:us-west-2:123456789012:outpost/' -+ 'op-01234567890123456/accesspoint' -+ } -+ with self.assertRaises(UnsupportedOutpostResourceError): -+ self.arn_handler.handle_arn(params, self.model, {}) -+ -+ def test_outpost_arn_errors_for_empty_fields(self): -+ params = { -+ 'Bucket': 'arn:aws:s3-outposts:us-west-2:123456789012:outpost/' -+ '/accesspoint/myaccesspoint' -+ } -+ with self.assertRaises(UnsupportedOutpostResourceError): -+ self.arn_handler.handle_arn(params, self.model, {}) -+ -+ def test_ignores_bucket_names(self): -+ params = {'Bucket': 'mybucket'} -+ context = {} -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': 'mybucket'}) -+ self.assertEqual(context, {}) -+ -+ def test_ignores_create_bucket(self): -+ arn = 'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint' -+ params = {'Bucket': arn} -+ context = {} -+ self.model.name = 'CreateBucket' -+ self.arn_handler.handle_arn(params, self.model, context) -+ self.assertEqual(params, {'Bucket': arn}) -+ self.assertEqual(context, {}) -+ -+ -+class TestS3EndpointSetter(unittest.TestCase): -+ def setUp(self): -+ self.operation_name = 'GetObject' -+ self.signature_version = 's3v4' -+ self.region_name = 'us-west-2' -+ self.service = 's3' -+ self.account = '123456789012' -+ self.bucket = 'mybucket' -+ self.key = 'key.txt' -+ self.accesspoint_name = 'myaccesspoint' -+ self.outpost_name = 'op-123456789012' -+ self.partition = 'aws' -+ self.endpoint_resolver = mock.Mock() -+ self.dns_suffix = 'amazonaws.com' -+ self.endpoint_resolver.construct_endpoint.return_value = { -+ 'dnsSuffix': self.dns_suffix -+ } -+ self.endpoint_setter = self.get_endpoint_setter() -+ -+ def get_endpoint_setter(self, **kwargs): -+ setter_kwargs = { -+ 'endpoint_resolver': self.endpoint_resolver, -+ 'region': self.region_name, -+ } -+ setter_kwargs.update(kwargs) -+ return S3EndpointSetter(**setter_kwargs) -+ -+ def get_s3_request(self, bucket=None, key=None, scheme='https://', -+ querystring=None): -+ url = scheme + 's3.us-west-2.amazonaws.com/' -+ if bucket: -+ url += bucket -+ if key: -+ url += '/%s' % key -+ if querystring: -+ url += '?%s' % querystring -+ return AWSRequest(method='GET', headers={}, url=url) -+ -+ def get_s3_outpost_request(self, **s3_request_kwargs): -+ request = self.get_s3_request( -+ self.accesspoint_name, **s3_request_kwargs) -+ accesspoint_context = self.get_s3_accesspoint_context( -+ name=self.accesspoint_name, outpost_name=self.outpost_name) -+ request.context['s3_accesspoint'] = accesspoint_context -+ return request -+ -+ def get_s3_accesspoint_request(self, accesspoint_name=None, -+ accesspoint_context=None, -+ **s3_request_kwargs): -+ if not accesspoint_name: -+ accesspoint_name = self.accesspoint_name -+ request = self.get_s3_request(accesspoint_name, **s3_request_kwargs) -+ if accesspoint_context is None: -+ accesspoint_context = self.get_s3_accesspoint_context( -+ name=accesspoint_name) -+ request.context['s3_accesspoint'] = accesspoint_context -+ return request -+ -+ def get_s3_accesspoint_context(self, **overrides): -+ accesspoint_context = { -+ 'name': self.accesspoint_name, -+ 'account': self.account, -+ 'region': self.region_name, -+ 'partition': self.partition, -+ 'service': self.service, -+ } -+ accesspoint_context.update(overrides) -+ return accesspoint_context -+ -+ def call_set_endpoint(self, endpoint_setter, request, **kwargs): -+ set_endpoint_kwargs = { -+ 'request': request, -+ 'operation_name': self.operation_name, -+ 'signature_version': self.signature_version, -+ 'region_name': self.region_name, -+ } -+ set_endpoint_kwargs.update(kwargs) -+ endpoint_setter.set_endpoint(**set_endpoint_kwargs) -+ -+ def test_register(self): -+ event_emitter = mock.Mock() -+ self.endpoint_setter.register(event_emitter) -+ event_emitter.register.assert_called_with( -+ 'before-sign.s3', self.endpoint_setter.set_endpoint) -+ -+ def test_outpost_endpoint(self): -+ request = self.get_s3_outpost_request() -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.%s.s3-outposts.%s.amazonaws.com/' % ( -+ self.accesspoint_name, self.account, self.outpost_name, -+ self.region_name, -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_outpost_endpoint_preserves_key_in_path(self): -+ request = self.get_s3_outpost_request(key=self.key) -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.%s.s3-outposts.%s.amazonaws.com/%s' % ( -+ self.accesspoint_name, self.account, self.outpost_name, -+ self.region_name, self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_accesspoint_endpoint(self): -+ request = self.get_s3_accesspoint_request() -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.s3-accesspoint.%s.amazonaws.com/' % ( -+ self.accesspoint_name, self.account, self.region_name -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_accesspoint_preserves_key_in_path(self): -+ request = self.get_s3_accesspoint_request(key=self.key) -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.s3-accesspoint.%s.amazonaws.com/%s' % ( -+ self.accesspoint_name, self.account, self.region_name, -+ self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_accesspoint_preserves_scheme(self): -+ request = self.get_s3_accesspoint_request(scheme='http://') -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'http://%s-%s.s3-accesspoint.%s.amazonaws.com/' % ( -+ self.accesspoint_name, self.account, self.region_name, -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_accesspoint_preserves_query_string(self): -+ request = self.get_s3_accesspoint_request(querystring='acl') -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.s3-accesspoint.%s.amazonaws.com/?acl' % ( -+ self.accesspoint_name, self.account, self.region_name, -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_uses_resolved_dns_suffix(self): -+ self.endpoint_resolver.construct_endpoint.return_value = { -+ 'dnsSuffix': 'mysuffix.com' -+ } -+ request = self.get_s3_accesspoint_request() -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.s3-accesspoint.%s.mysuffix.com/' % ( -+ self.accesspoint_name, self.account, self.region_name, -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_uses_region_of_client_if_use_arn_disabled(self): -+ client_region = 'client-region' -+ self.endpoint_setter = self.get_endpoint_setter( -+ region=client_region, s3_config={'use_arn_region': False}) -+ request = self.get_s3_accesspoint_request() -+ self.call_set_endpoint(self.endpoint_setter, request=request) -+ expected_url = 'https://%s-%s.s3-accesspoint.%s.amazonaws.com/' % ( -+ self.accesspoint_name, self.account, client_region, -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_accesspoint_errors_for_custom_endpoint(self): -+ endpoint_setter = self.get_endpoint_setter( -+ endpoint_url='https://custom.com') -+ request = self.get_s3_accesspoint_request() -+ with self.assertRaises(UnsupportedS3AccesspointConfigurationError): -+ self.call_set_endpoint(endpoint_setter, request=request) -+ -+ def test_errors_for_mismatching_partition(self): -+ endpoint_setter = self.get_endpoint_setter(partition='aws-cn') -+ accesspoint_context = self.get_s3_accesspoint_context(partition='aws') -+ request = self.get_s3_accesspoint_request( -+ accesspoint_context=accesspoint_context) -+ with self.assertRaises(UnsupportedS3AccesspointConfigurationError): -+ self.call_set_endpoint(endpoint_setter, request=request) -+ -+ def test_errors_for_mismatching_partition_when_using_client_region(self): -+ endpoint_setter = self.get_endpoint_setter( -+ s3_config={'use_arn_region': False}, partition='aws-cn' -+ ) -+ accesspoint_context = self.get_s3_accesspoint_context(partition='aws') -+ request = self.get_s3_accesspoint_request( -+ accesspoint_context=accesspoint_context) -+ with self.assertRaises(UnsupportedS3AccesspointConfigurationError): -+ self.call_set_endpoint(endpoint_setter, request=request) -+ -+ def test_set_endpoint_for_auto(self): -+ endpoint_setter = self.get_endpoint_setter( -+ s3_config={'addressing_style': 'auto'}) -+ request = self.get_s3_request(self.bucket, self.key) -+ self.call_set_endpoint(endpoint_setter, request) -+ expected_url = 'https://%s.s3.us-west-2.amazonaws.com/%s' % ( -+ self.bucket, self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_set_endpoint_for_virtual(self): -+ endpoint_setter = self.get_endpoint_setter( -+ s3_config={'addressing_style': 'virtual'}) -+ request = self.get_s3_request(self.bucket, self.key) -+ self.call_set_endpoint(endpoint_setter, request) -+ expected_url = 'https://%s.s3.us-west-2.amazonaws.com/%s' % ( -+ self.bucket, self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_set_endpoint_for_path(self): -+ endpoint_setter = self.get_endpoint_setter( -+ s3_config={'addressing_style': 'path'}) -+ request = self.get_s3_request(self.bucket, self.key) -+ self.call_set_endpoint(endpoint_setter, request) -+ expected_url = 'https://s3.us-west-2.amazonaws.com/%s/%s' % ( -+ self.bucket, self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ def test_set_endpoint_for_accelerate(self): -+ endpoint_setter = self.get_endpoint_setter( -+ s3_config={'use_accelerate_endpoint': True}) -+ request = self.get_s3_request(self.bucket, self.key) -+ self.call_set_endpoint(endpoint_setter, request) -+ expected_url = 'https://%s.s3-accelerate.amazonaws.com/%s' % ( -+ self.bucket, self.key -+ ) -+ self.assertEqual(request.url, expected_url) -+ -+ -+class TestContainerMetadataFetcher(unittest.TestCase): -+ def setUp(self): -+ self.responses = [] -+ self.http = mock.Mock() -+ self.sleep = mock.Mock() -+ -+ def create_fetcher(self): -+ return ContainerMetadataFetcher(self.http, sleep=self.sleep) -+ -+ def fake_response(self, status_code, body): -+ response = mock.Mock() -+ response.status_code = status_code -+ response.content = body -+ return response -+ -+ def set_http_responses_to(self, *responses): -+ http_responses = [] -+ for response in responses: -+ if isinstance(response, Exception): -+ # Simulating an error condition. -+ http_response = response -+ elif hasattr(response, 'status_code'): -+ # It's a precreated fake_response. -+ http_response = response -+ else: -+ http_response = self.fake_response( -+ status_code=200, body=json.dumps(response).encode('utf-8')) -+ http_responses.append(http_response) -+ self.http.send.side_effect = http_responses -+ -+ def assert_request(self, method, url, headers): -+ request = self.http.send.call_args[0][0] -+ self.assertEqual(request.method, method) -+ self.assertEqual(request.url, url) -+ self.assertEqual(request.headers, headers) -+ -+ def assert_can_retrieve_metadata_from(self, full_uri): -+ response_body = {'foo': 'bar'} -+ self.set_http_responses_to(response_body) -+ fetcher = self.create_fetcher() -+ response = fetcher.retrieve_full_uri(full_uri) -+ self.assertEqual(response, response_body) -+ self.assert_request('GET', full_uri, {'Accept': 'application/json'}) -+ -+ def assert_host_is_not_allowed(self, full_uri): -+ response_body = {'foo': 'bar'} -+ self.set_http_responses_to(response_body) -+ fetcher = self.create_fetcher() -+ with self.assertRaisesRegexp(ValueError, 'Unsupported host'): -+ fetcher.retrieve_full_uri(full_uri) -+ self.assertFalse(self.http.send.called) -+ -+ def test_can_specify_extra_headers_are_merged(self): -+ headers = { -+ # The 'Accept' header will override the -+ # default Accept header of application/json. -+ 'Accept': 'application/not-json', -+ 'X-Other-Header': 'foo', -+ } -+ self.set_http_responses_to({'foo': 'bar'}) -+ fetcher = self.create_fetcher() -+ response = fetcher.retrieve_full_uri( -+ 'http://localhost', headers) -+ self.assert_request('GET', 'http://localhost', headers) -+ -+ def test_can_retrieve_uri(self): -+ json_body = { -+ "AccessKeyId" : "a", -+ "SecretAccessKey" : "b", -+ "Token" : "c", -+ "Expiration" : "d" -+ } -+ self.set_http_responses_to(json_body) -+ -+ fetcher = self.create_fetcher() -+ response = fetcher.retrieve_uri('/foo?id=1') -+ -+ self.assertEqual(response, json_body) -+ # Ensure we made calls to the right endpoint. -+ headers = {'Accept': 'application/json'} -+ self.assert_request('GET', 'http://169.254.170.2/foo?id=1', headers) -+ -+ def test_can_retry_requests(self): -+ success_response = { -+ "AccessKeyId" : "a", -+ "SecretAccessKey" : "b", -+ "Token" : "c", -+ "Expiration" : "d" -+ } -+ self.set_http_responses_to( -+ # First response is a connection error, should -+ # be retried. -+ ConnectionClosedError(endpoint_url=''), -+ # Second response is the successful JSON response -+ # with credentials. -+ success_response, -+ ) -+ fetcher = self.create_fetcher() -+ response = fetcher.retrieve_uri('/foo?id=1') -+ self.assertEqual(response, success_response) -+ -+ def test_propagates_credential_error_on_http_errors(self): -+ self.set_http_responses_to( -+ # In this scenario, we never get a successful response. -+ ConnectionClosedError(endpoint_url=''), -+ ConnectionClosedError(endpoint_url=''), -+ ConnectionClosedError(endpoint_url=''), -+ ConnectionClosedError(endpoint_url=''), -+ ConnectionClosedError(endpoint_url=''), -+ ) -+ # As a result, we expect an appropriate error to be raised. -+ fetcher = self.create_fetcher() -+ with self.assertRaises(MetadataRetrievalError): -+ fetcher.retrieve_uri('/foo?id=1') -+ self.assertEqual(self.http.send.call_count, fetcher.RETRY_ATTEMPTS) -+ -+ def test_error_raised_on_non_200_response(self): -+ self.set_http_responses_to( -+ self.fake_response(status_code=404, body=b'Error not found'), -+ self.fake_response(status_code=404, body=b'Error not found'), -+ self.fake_response(status_code=404, body=b'Error not found'), -+ ) -+ fetcher = self.create_fetcher() -+ with self.assertRaises(MetadataRetrievalError): -+ fetcher.retrieve_uri('/foo?id=1') -+ # Should have tried up to RETRY_ATTEMPTS. -+ self.assertEqual(self.http.send.call_count, fetcher.RETRY_ATTEMPTS) -+ -+ def test_error_raised_on_no_json_response(self): -+ # If the service returns a sucess response but with a body that -+ # does not contain JSON, we should still retry up to RETRY_ATTEMPTS, -+ # but after exhausting retries we propagate the exception. -+ self.set_http_responses_to( -+ self.fake_response(status_code=200, body=b'Not JSON'), -+ self.fake_response(status_code=200, body=b'Not JSON'), -+ self.fake_response(status_code=200, body=b'Not JSON'), -+ ) -+ fetcher = self.create_fetcher() -+ with self.assertRaises(MetadataRetrievalError) as e: -+ fetcher.retrieve_uri('/foo?id=1') -+ self.assertNotIn('Not JSON', str(e.exception)) -+ # Should have tried up to RETRY_ATTEMPTS. -+ self.assertEqual(self.http.send.call_count, fetcher.RETRY_ATTEMPTS) -+ -+ def test_can_retrieve_full_uri_with_fixed_ip(self): -+ self.assert_can_retrieve_metadata_from( -+ 'http://%s/foo?id=1' % ContainerMetadataFetcher.IP_ADDRESS) -+ -+ def test_localhost_http_is_allowed(self): -+ self.assert_can_retrieve_metadata_from('http://localhost/foo') -+ -+ def test_localhost_with_port_http_is_allowed(self): -+ self.assert_can_retrieve_metadata_from('http://localhost:8000/foo') -+ -+ def test_localhost_https_is_allowed(self): -+ self.assert_can_retrieve_metadata_from('https://localhost/foo') -+ -+ def test_can_use_127_ip_addr(self): -+ self.assert_can_retrieve_metadata_from('https://127.0.0.1/foo') -+ -+ def test_can_use_127_ip_addr_with_port(self): -+ self.assert_can_retrieve_metadata_from('https://127.0.0.1:8080/foo') -+ -+ def test_link_local_http_is_not_allowed(self): -+ self.assert_host_is_not_allowed('http://169.254.0.1/foo') -+ -+ def test_link_local_https_is_not_allowed(self): -+ self.assert_host_is_not_allowed('https://169.254.0.1/foo') -+ -+ def test_non_link_local_nonallowed_url(self): -+ self.assert_host_is_not_allowed('http://169.1.2.3/foo') -+ -+ def test_error_raised_on_nonallowed_url(self): -+ self.assert_host_is_not_allowed('http://somewhere.com/foo') -+ -+ def test_external_host_not_allowed_if_https(self): -+ self.assert_host_is_not_allowed('https://somewhere.com/foo') -+ -+ -+class TestUnsigned(unittest.TestCase): -+ def test_copy_returns_same_object(self): -+ self.assertIs(botocore.UNSIGNED, copy.copy(botocore.UNSIGNED)) -+ -+ def test_deepcopy_returns_same_object(self): -+ self.assertIs(botocore.UNSIGNED, copy.deepcopy(botocore.UNSIGNED)) -+ -+ -+class TestInstanceMetadataFetcher(unittest.TestCase): -+ def setUp(self): -+ urllib3_session_send = 'botocore.httpsession.URLLib3Session.send' -+ self._urllib3_patch = mock.patch(urllib3_session_send) -+ self._send = self._urllib3_patch.start() -+ self._imds_responses = [] -+ self._send.side_effect = self.get_imds_response -+ self._role_name = 'role-name' -+ self._creds = { -+ 'AccessKeyId': 'spam', -+ 'SecretAccessKey': 'eggs', -+ 'Token': 'spam-token', -+ 'Expiration': 'something', -+ } -+ self._expected_creds = { -+ 'access_key': self._creds['AccessKeyId'], -+ 'secret_key': self._creds['SecretAccessKey'], -+ 'token': self._creds['Token'], -+ 'expiry_time': self._creds['Expiration'], -+ 'role_name': self._role_name -+ } -+ -+ def tearDown(self): -+ self._urllib3_patch.stop() -+ -+ def add_imds_response(self, body, status_code=200): -+ response = botocore.awsrequest.AWSResponse( -+ url='http://169.254.169.254/', -+ status_code=status_code, -+ headers={}, -+ raw=RawResponse(body) -+ ) -+ self._imds_responses.append(response) -+ -+ def add_get_role_name_imds_response(self, role_name=None): -+ if role_name is None: -+ role_name = self._role_name -+ self.add_imds_response(body=role_name.encode('utf-8')) -+ -+ def add_get_credentials_imds_response(self, creds=None): -+ if creds is None: -+ creds = self._creds -+ self.add_imds_response(body=json.dumps(creds).encode('utf-8')) -+ -+ def add_get_token_imds_response(self, token, status_code=200): -+ self.add_imds_response(body=token.encode('utf-8'), -+ status_code=status_code) -+ -+ def add_metadata_token_not_supported_response(self): -+ self.add_imds_response(b'', status_code=404) -+ -+ def add_imds_connection_error(self, exception): -+ self._imds_responses.append(exception) -+ -+ def get_imds_response(self, request): -+ response = self._imds_responses.pop(0) -+ if isinstance(response, Exception): -+ raise response -+ return response -+ -+ def test_disabled_by_environment(self): -+ env = {'AWS_EC2_METADATA_DISABLED': 'true'} -+ fetcher = InstanceMetadataFetcher(env=env) -+ result = fetcher.retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ self._send.assert_not_called() -+ -+ def test_disabled_by_environment_mixed_case(self): -+ env = {'AWS_EC2_METADATA_DISABLED': 'tRuE'} -+ fetcher = InstanceMetadataFetcher(env=env) -+ result = fetcher.retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ self._send.assert_not_called() -+ -+ def test_disabling_env_var_not_true(self): -+ url = 'https://example.com/' -+ env = {'AWS_EC2_METADATA_DISABLED': 'false'} -+ -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ fetcher = InstanceMetadataFetcher(base_url=url, env=env) -+ result = fetcher.retrieve_iam_role_credentials() -+ -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_includes_user_agent_header(self): -+ user_agent = 'my-user-agent' -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ self.assertEqual(self._send.call_count, 3) -+ for call in self._send.calls: -+ self.assertTrue(call[0][0].headers['User-Agent'], user_agent) -+ -+ def test_non_200_response_for_role_name_is_retried(self): -+ # Response for role name that have a non 200 status code should -+ # be retried. -+ self.add_get_token_imds_response(token='token') -+ self.add_imds_response( -+ status_code=429, body=b'{"message": "Slow down"}') -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_http_connection_error_for_role_name_is_retried(self): -+ # Connection related errors should be retried -+ self.add_get_token_imds_response(token='token') -+ self.add_imds_connection_error(ConnectionClosedError(endpoint_url='')) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_empty_response_for_role_name_is_retried(self): -+ # Response for role name that have a non 200 status code should -+ # be retried. -+ self.add_get_token_imds_response(token='token') -+ self.add_imds_response(body=b'') -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_non_200_response_is_retried(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ # Response for creds that has a 200 status code but has an empty -+ # body should be retried. -+ self.add_imds_response( -+ status_code=429, body=b'{"message": "Slow down"}') -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_http_connection_errors_is_retried(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ # Connection related errors should be retried -+ self.add_imds_connection_error(ConnectionClosedError(endpoint_url='')) -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_empty_response_is_retried(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ # Response for creds that has a 200 status code but is empty. -+ # This should be retried. -+ self.add_imds_response(body=b'') -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_invalid_json_is_retried(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ # Response for creds that has a 200 status code but is invalid JSON. -+ # This should be retried. -+ self.add_imds_response(body=b'{"AccessKey":') -+ self.add_get_credentials_imds_response() -+ result = InstanceMetadataFetcher( -+ num_attempts=2).retrieve_iam_role_credentials() -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_exhaust_retries_on_role_name_request(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_imds_response(status_code=400, body=b'') -+ result = InstanceMetadataFetcher( -+ num_attempts=1).retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ -+ def test_exhaust_retries_on_credentials_request(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ self.add_imds_response(status_code=400, body=b'') -+ result = InstanceMetadataFetcher( -+ num_attempts=1).retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ -+ def test_missing_fields_in_credentials_response(self): -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ # Response for creds that has a 200 status code and a JSON body -+ # representing an error. We do not necessarily want to retry this. -+ self.add_imds_response( -+ body=b'{"Code":"AssumeRoleUnauthorizedAccess","Message":"error"}') -+ result = InstanceMetadataFetcher().retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ -+ def test_token_is_included(self): -+ user_agent = 'my-user-agent' -+ self.add_get_token_imds_response(token='token') -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ # Check that subsequent calls after getting the token include the token. -+ self.assertEqual(self._send.call_count, 3) -+ for call in self._send.call_args_list[1:]: -+ self.assertEqual(call[0][0].headers['x-aws-ec2-metadata-token'], 'token') -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_metadata_token_not_supported_404(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_response(b'', status_code=404) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ for call in self._send.call_args_list[1:]: -+ self.assertNotIn('x-aws-ec2-metadata-token', call[0][0].headers) -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_metadata_token_not_supported_403(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_response(b'', status_code=403) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ for call in self._send.call_args_list[1:]: -+ self.assertNotIn('x-aws-ec2-metadata-token', call[0][0].headers) -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_metadata_token_not_supported_405(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_response(b'', status_code=405) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ for call in self._send.call_args_list[1:]: -+ self.assertNotIn('x-aws-ec2-metadata-token', call[0][0].headers) -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_metadata_token_not_supported_timeout(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_connection_error(ReadTimeoutError(endpoint_url='url')) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ for call in self._send.call_args_list[1:]: -+ self.assertNotIn('x-aws-ec2-metadata-token', call[0][0].headers) -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_token_not_supported_exhaust_retries(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_connection_error(ConnectTimeoutError(endpoint_url='url')) -+ self.add_get_role_name_imds_response() -+ self.add_get_credentials_imds_response() -+ -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ -+ for call in self._send.call_args_list[1:]: -+ self.assertNotIn('x-aws-ec2-metadata-token', call[0][0].headers) -+ self.assertEqual(result, self._expected_creds) -+ -+ def test_metadata_token_bad_request_yields_no_credentials(self): -+ user_agent = 'my-user-agent' -+ self.add_imds_response(b'', status_code=400) -+ result = InstanceMetadataFetcher( -+ user_agent=user_agent).retrieve_iam_role_credentials() -+ self.assertEqual(result, {}) -+ -+ -+class TestSSOTokenLoader(unittest.TestCase): -+ def setUp(self): -+ super(TestSSOTokenLoader, self).setUp() -+ self.start_url = 'https://d-abc123.awsapps.com/start' -+ self.cache_key = '40a89917e3175433e361b710a9d43528d7f1890a' -+ self.access_token = 'totally.a.token' -+ self.cached_token = { -+ 'accessToken': self.access_token, -+ 'expiresAt': '2002-10-18T03:52:38UTC' -+ } -+ self.cache = {} -+ self.loader = SSOTokenLoader(cache=self.cache) -+ -+ def test_can_load_token_exists(self): -+ self.cache[self.cache_key] = self.cached_token -+ access_token = self.loader(self.start_url) -+ self.assertEqual(self.access_token, access_token) -+ -+ def test_can_handle_does_not_exist(self): -+ with self.assertRaises(SSOTokenLoadError): -+ access_token = self.loader(self.start_url) -+ -+ def test_can_handle_invalid_cache(self): -+ self.cache[self.cache_key] = {} -+ with self.assertRaises(SSOTokenLoadError): -+ access_token = self.loader(self.start_url) -diff -Nru botocore-1.18.15.orig/tests/unit/test_waiters.py botocore-1.18.15/tests/unit/test_waiters.py ---- botocore-1.18.15.orig/tests/unit/test_waiters.py 2020-10-08 20:05:12.000000000 +0200 -+++ botocore-1.18.15/tests/unit/test_waiters.py 2020-10-09 10:13:49.544472317 +0200 -@@ -13,7 +13,7 @@ - import os - from tests import unittest, BaseEnvVar - --import mock -+from tests import mock - - import botocore - from botocore.compat import six -@@ -389,7 +389,7 @@ - ) - waiter = Waiter('MyWaiter', config, operation_method) - -- with self.assertRaisesRegexp(WaiterError, error_message): -+ with six.assertRaisesRegex(self, WaiterError, error_message): - waiter.wait() - - def test_waiter_transitions_to_failure_state(self):