diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index 717d2f4..0000000
--- a/.coveragerc
+++ /dev/null
@@ -1,4 +0,0 @@
-[run]
-omit =
-    venv*
-    tests/TestUtils.py
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 78c43e7..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,24 +0,0 @@
-# IDE
-.idea/
-.vscode/
-
-# Virtualenv
-venv*/
-
-# Cache
-__pycache__
-*.pyc
-.cache/
-
-# Testing
-.pytest_cache
-test/
-.coverage
-htmlcov/
-temp-*/
-buckets.txt
-
-# Pip build
-build/
-dist/
-S3Scanner.egg-info/
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 1d3c68e..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-language: python
-jobs:
-  include:
-  - python: '3.6'
-  - python: '3.7'
-  - python: '3.8'
-  - python: '3.9'
-    env:
-      - secure: "JShcKAHn4y57mTHDIV5+8dTRjE2cREJSswXAxhFf8jha+r58zF/uBfgXapzNh9u+dpvbVjF/N0/KxREubMTd4fduYTsxMOXyqENHnq7kVmRK6HXAAnM75JZzl1sonlHsIHHXxv45SuwYWX/fk6aMeBmkGukuvM8DGi4BEBzv0CnzEUmHlb5ZPKmQteemjhbn2d3yKPKagcieeDbSRhevGKPPmfnt0TqzpF/xrbtIL05yC+038Tesa0mZqV/HBrfZgSEtcMydIhbszhDjBwC3nzhhiC8AQJ8JGRPqR3nfTZRrHA0QMT3hr8XGpLouphvpDDwiotmOTRsGiBfONX+b2JDTx989eswIXmBsdua3pxjUNuLVTiRjl63+6zJSvT3mrJ1cZMRJPvbqYTY+mvckSMeDQv4oFZeD8QCD+z8zLa39GYfKBnapo0s+rvvxYyiVNZ9HQ1MExJyVleJWRMlmKtuNUHzHaCq+B8omcGZxhEfX4dVQ/RHwNRwkKbdUKOZy4muardhYorhVO+eLt4+bAipk8BEAXvIBwaAqbIN3+01a8TbTGKkxJUTllkf2Y7wFeF6IPtxvfpJ6Bgj4BNSpDrR/eoyIodG42J6Qdl4aK6/RQbI9vzUQ8yoSxQxzHHFZeclU2Qe5KZem3ztbexkiYB+Mv7oV/rr1LGixvbBsLzI="
-      - secure: "SLjBJJsmtbHZGwmZHHJTYk0qmlS6kcbur1SMM70+n/UEp55hAFo6Ae/n75G4RR0bVIVkgrJp3ZE9V/wZKVbOOaUaepyjZXfgRBjL/zBYjFgxgQhrLis3Bg+lR6qBoWifm/mfM+mUqHLDqelSbvpgE/oZLeM9vuYBYvI9LIZSeM3C6m+4ytKoayUgggq87lQRDr9d/YPpAZEYnT13mAqkd3zbovjLAEtALx6BOg5xZv7bHCx5WS5gz79CA+jFRjWU9q4ng5zyCERWFOeTcCYAHjxKXYOJYew8N/NYA2PFd+BiedQRHuIJAHg/auofchBewmtfHG6rgMZSuE+jzl1aB344zwpVJocR09kBXi6tk9KiASTrZMSHf31LEJFwciFnSsCTVQG7kVL0NdjZBEGO7zE1u5c4dxYEctDVvCz/kmH7hl70zajot9cYihh8VLvLwpGBYepTf2a+vhRwdxeZ81KuI3SeOBqNJwyT6wZMw9AfEOmK9LqyS9vBqusujwua+W/DDeqYFo99HkS2uMdX4/IfAB5DDhVakMrff8rrUuf1K1H6rtV7qckOHDET+wdjqymZkPD/mjjW+ibAattls4cZU3I7NRVsnNiZmXAT410A92y6JEiZPuG1djz/57yrqQ3S4KzVhgBq1t5WJRc84dUKCbYnwY4fDL4BH8lkync="
-  - name: "Python: 3.6"
-    os: windows
-    language: shell
-    before_install:
-      - choco install python --version=3.6.8
-      - python -m pip install -U pip setuptools
-    env: PATH=/c/Python36:/c/Python36/Scripts:$PATH
-    cache:
-      directories:
-        - $LOCALAPPDATA/pip/Cache
-
-cache: pip
-install:
-- pip install -r requirements.txt
-script:
-- pytest -s
-notifications:
-  email: false
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index aee9262..0000000
--- a/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-FROM python:3.8-alpine
-COPY . /app
-WORKDIR /app
-RUN pip install boto3
-RUN pip install .
-ENTRYPOINT ["python", "-m", "S3Scanner"]
\ No newline at end of file
diff --git a/README.md b/PKG-INFO
similarity index 77%
rename from README.md
rename to PKG-INFO
index 15f4e9c..1fbc212 100644
--- a/README.md
+++ b/PKG-INFO
@@ -1,3 +1,24 @@
+Metadata-Version: 2.1
+Name: S3Scanner
+Version: 2.0.2
+Summary: Scan for open S3 buckets and dump the contents
+Home-page: https://github.com/sa7mon/S3Scanner
+Author: Dan Salmon
+Author-email: dan@salmon.cat
+License: UNKNOWN
+Project-URL: Bug Tracker, https://github.com/sa7mon/S3Scanner
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Security
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Requires-Python: >=3.6
+Description-Content-Type: text/markdown
+License-File: LICENSE
+
 # S3Scanner
 [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Build Status](https://travis-ci.org/sa7mon/S3Scanner.svg?branch=master)](https://travis-ci.org/sa7mon/S3Scanner)
 
@@ -28,12 +49,13 @@ mode:
     dump                Dump the contents of buckets
 </pre>
 
-## Support
-πŸš€ If you've found this tool useful, please consider donating to support its development
+## πŸš€ Support
+If you've found this tool useful, please consider donating to support its development. You can find sponsor options on the side of this repo page or in [FUNDING.yml](.github/FUNDING.yml)
 
-[![paypal](https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=XG5BGLQZPJ9H8)
+<div align="center"><a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=s3scanner"><img src="https://user-images.githubusercontent.com/3712226/146481766-a331b010-29c4-4537-ac30-9a4b4aad06b3.png" height=50 width=140></a></div>
 
-[![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/B0B54D93O)
+<p align="center">Huge thank you to <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=s3scanner">tines</a> for being an ongoing sponsor of this project.</p>
+           
 
 ## Installation
 
@@ -133,4 +155,5 @@ Any or all of these permissions can be set for the 2 main user groups:
 
 ## License
 
-MIT
\ No newline at end of file
+MIT
+
diff --git a/S3Scanner.egg-info/PKG-INFO b/S3Scanner.egg-info/PKG-INFO
new file mode 100644
index 0000000..1fbc212
--- /dev/null
+++ b/S3Scanner.egg-info/PKG-INFO
@@ -0,0 +1,159 @@
+Metadata-Version: 2.1
+Name: S3Scanner
+Version: 2.0.2
+Summary: Scan for open S3 buckets and dump the contents
+Home-page: https://github.com/sa7mon/S3Scanner
+Author: Dan Salmon
+Author-email: dan@salmon.cat
+License: UNKNOWN
+Project-URL: Bug Tracker, https://github.com/sa7mon/S3Scanner
+Platform: UNKNOWN
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Security
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Requires-Python: >=3.6
+Description-Content-Type: text/markdown
+License-File: LICENSE
+
+# S3Scanner
+[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Build Status](https://travis-ci.org/sa7mon/S3Scanner.svg?branch=master)](https://travis-ci.org/sa7mon/S3Scanner)
+
+A tool to find open S3 buckets and dump their contentsπŸ’§
+
+<img src="https://user-images.githubusercontent.com/3712226/115632654-d4f8c280-a2cd-11eb-87ee-c70bbd4f1edb.png" width="85%"/>
+
+## Usage
+<pre>
+usage: s3scanner [-h] [--version] [--threads n] [--endpoint-url ENDPOINT_URL] [--endpoint-address-style {path,vhost}] [--insecure] {scan,dump} ...
+
+s3scanner: Audit unsecured S3 buckets
+           by Dan Salmon - github.com/sa7mon, @bltjetpack
+
+optional arguments:
+  -h, --help            show this help message and exit
+  --version             Display the current version of this tool
+  --threads n, -t n     Number of threads to use. Default: 4
+  --endpoint-url ENDPOINT_URL, -u ENDPOINT_URL
+                        URL of S3-compliant API. Default: https://s3.amazonaws.com
+  --endpoint-address-style {path,vhost}, -s {path,vhost}
+                        Address style to use for the endpoint. Default: path
+  --insecure, -i        Do not verify SSL
+
+mode:
+  {scan,dump}           (Must choose one)
+    scan                Scan bucket permissions
+    dump                Dump the contents of buckets
+</pre>
+
+## πŸš€ Support
+If you've found this tool useful, please consider donating to support its development. You can find sponsor options on the side of this repo page or in [FUNDING.yml](.github/FUNDING.yml)
+
+<div align="center"><a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=s3scanner"><img src="https://user-images.githubusercontent.com/3712226/146481766-a331b010-29c4-4537-ac30-9a4b4aad06b3.png" height=50 width=140></a></div>
+
+<p align="center">Huge thank you to <a href="https://www.tines.com/?utm_source=oss&utm_medium=sponsorship&utm_campaign=s3scanner">tines</a> for being an ongoing sponsor of this project.</p>
+           
+
+## Installation
+
+```shell
+pip3 install s3scanner
+```
+
+or via Docker:
+
+```shell
+docker build . -t s3scanner:latest
+docker run --rm s3scanner:latest scan --bucket my-buket
+```
+
+or from source:
+
+```shell
+git clone git@github.com:sa7mon/S3Scanner.git
+cd S3Scanner
+pip3 install -r requirements.txt
+python3 -m S3Scanner
+```
+
+## Features
+
+* ⚑️ Multi-threaded scanning
+* πŸ”­ Supports tons of S3-compatible APIs
+* πŸ•΅οΈβ€β™€οΈ Scans all bucket permissions to find misconfigurations
+* πŸ’Ύ Dump bucket contents to a local folder
+* 🐳 Docker support
+
+## Examples
+
+* Scan AWS buckets listed in a file with 8 threads
+  ```shell
+  $ s3scanner --threads 8 scan --buckets-file ./bucket-names.txt
+   ```
+* Scan a bucket in Digital Ocean Spaces 
+  ```shell
+  $ s3scanner --endpoint-url https://sfo2.digitaloceanspaces.com scan --bucket my-bucket
+  ```
+* Dump a single AWS bucket
+  ```shell
+  $ s3scanner dump --bucket my-bucket-to-dump
+  ```
+* Scan a single Dreamhost Objects bucket which uses the vhost address style and an invalid SSL cert
+  ```shell
+  $ s3scanner --endpoint-url https://objects.dreamhost.com --endpoint-address-style vhost --insecure scan --bucket my-bucket
+  ```
+
+## S3-compatible APIs
+
+`S3Scanner` can scan and dump buckets in S3-compatible APIs services other than AWS by using the
+`--endpoint-url` argument. Depending on the service, you may also need the `--endpoint-address-style`
+or `--insecure` arguments as well. 
+
+Some services have different endpoints corresponding to different regions
+
+**Note:** `S3Scanner` currently only supports scanning for anonymous user permissions of non-AWS services
+
+| Service | Example Endpoint | Address Style | Insecure ? |
+|---------|------------------|:-------------:|:----------:|
+| DigitalOcean Spaces (SFO2 region) | https://sfo2.digitaloceanspaces.com | path | No |  
+| Dreamhost | https://objects.dreamhost.com | vhost | Yes |
+| Linode Object Storage (eu-central-1 region) | https://eu-central-1.linodeobjects.com | vhost | No |
+| Scaleway Object Storage (nl-ams region) | https://s3.nl-ams.scw.cloud | path | No |
+| Wasabi Cloud Storage | http://s3.wasabisys.com/ | path | Yes |
+
+πŸ“š Current status of non-AWS APIs can be found [in the project wiki](https://github.com/sa7mon/S3Scanner/wiki/S3-Compatible-APIs)
+
+## Interpreting Results
+
+This tool will attempt to get all available information about a bucket, but it's up to you to interpret the results.
+
+[Possible permissions](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/set-bucket-permissions.html) for buckets:
+
+* Read - List and view all files
+* Write - Write files to bucket
+* Read ACP - Read all Access Control Policies attached to bucket
+* Write ACP - Write Access Control Policies to bucket
+* Full Control - All above permissions
+  
+Any or all of these permissions can be set for the 2 main user groups:
+* Authenticated Users
+* Public Users (those without AWS credentials set)
+* Individual users/groups (out of scope of this tool)
+  
+**What this means:** Just because a bucket doesn't allow reading/writing ACLs doesn't mean you can't read/write files in the bucket. Conversely, you may be able to list ACLs but not read/write to the bucket
+
+## Contributors
+* [Ohelig](https://github.com/Ohelig)
+* [vysecurity](https://github.com/vysecurity)
+* [janmasarik](https://github.com/janmasarik)
+* [alanyee](https://github.com/alanyee)
+* [klau5dev](https://github.com/klau5dev)
+* [hipotermia](https://github.com/hipotermia)
+
+## License
+
+MIT
+
diff --git a/S3Scanner.egg-info/SOURCES.txt b/S3Scanner.egg-info/SOURCES.txt
new file mode 100644
index 0000000..ba59823
--- /dev/null
+++ b/S3Scanner.egg-info/SOURCES.txt
@@ -0,0 +1,15 @@
+LICENSE
+README.md
+pyproject.toml
+setup.cfg
+S3Scanner/S3Bucket.py
+S3Scanner/S3Service.py
+S3Scanner/__init__.py
+S3Scanner/__main__.py
+S3Scanner/exceptions.py
+S3Scanner.egg-info/PKG-INFO
+S3Scanner.egg-info/SOURCES.txt
+S3Scanner.egg-info/dependency_links.txt
+S3Scanner.egg-info/entry_points.txt
+S3Scanner.egg-info/requires.txt
+S3Scanner.egg-info/top_level.txt
\ No newline at end of file
diff --git a/S3Scanner.egg-info/dependency_links.txt b/S3Scanner.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/S3Scanner.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/S3Scanner.egg-info/entry_points.txt b/S3Scanner.egg-info/entry_points.txt
new file mode 100644
index 0000000..edd2bca
--- /dev/null
+++ b/S3Scanner.egg-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+s3scanner = S3Scanner.__main__:main
diff --git a/S3Scanner.egg-info/requires.txt b/S3Scanner.egg-info/requires.txt
new file mode 100644
index 0000000..1e1f9b5
--- /dev/null
+++ b/S3Scanner.egg-info/requires.txt
@@ -0,0 +1 @@
+boto3>=1.20
diff --git a/S3Scanner.egg-info/top_level.txt b/S3Scanner.egg-info/top_level.txt
new file mode 100644
index 0000000..2b8f2ab
--- /dev/null
+++ b/S3Scanner.egg-info/top_level.txt
@@ -0,0 +1 @@
+S3Scanner
diff --git a/S3Scanner/S3Service.py b/S3Scanner/S3Service.py
index 64788ec..2105703 100644
--- a/S3Scanner/S3Service.py
+++ b/S3Scanner/S3Service.py
@@ -2,7 +2,7 @@
     This will be a service that the client program will instantiate to then call methods
     passing buckets
 """
-from boto3 import client  # TODO: Limit import to just boto3.client, probably
+from boto3 import client, session as boto_session # TODO: Limit import to just boto3.client, probably
 from S3Scanner.S3Bucket import S3Bucket, BucketExists, Permission, S3BucketObject
 from botocore.exceptions import ClientError
 import botocore.session
@@ -10,11 +10,12 @@ from botocore import UNSIGNED
 from botocore.client import Config
 import datetime
 from S3Scanner.exceptions import AccessDeniedException, InvalidEndpointException, BucketMightNotExistException
-from os.path import normpath
 import pathlib
 from concurrent.futures import ThreadPoolExecutor, as_completed
 from functools import partial
 from urllib3 import disable_warnings
+import os
+
 
 ALL_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AllUsers'
 AUTH_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
@@ -22,7 +23,7 @@ AUTH_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
 
 class S3Service:
     def __init__(self, forceNoCreds=False, endpoint_url='https://s3.amazonaws.com', verify_ssl=True,
-                 endpoint_address_style='path'):
+                 endpoint_address_style='path', profile='default'):
         """
         Service constructor
 
@@ -48,7 +49,13 @@ class S3Service:
                 raise InvalidEndpointException(message=f"Endpoint '{self.endpoint_url}' does not appear to be S3-compliant")
 
         # Check for AWS credentials
-        session = botocore.session.get_session()
+        session = boto_session.Session()
+        if profile in session.available_profiles: # use provided profile, if it is availble to use
+            session = boto_session.Session(profile_name=profile)
+        else:
+            print(f"Error: profile \"{profile}\" not found in ~/.aws/credentials")
+            exit(1)
+
         if forceNoCreds or session.get_credentials() is None or session.get_credentials().access_key is None:
             self.aws_creds_configured = False
             self.s3_client = client('s3',
@@ -57,7 +64,7 @@ class S3Service:
                                           endpoint_url=self.endpoint_url, use_ssl=use_ssl, verify=verify_ssl)
         else:
             self.aws_creds_configured = True
-            self.s3_client = client('s3', config=Config(s3={'addressing_style': self.endpoint_address_style}, connect_timeout=3,
+            self.s3_client = session.client('s3', config=Config(s3={'addressing_style': self.endpoint_address_style}, connect_timeout=3,
                                          retries={'max_attempts': 2}),
                                           endpoint_url=self.endpoint_url, use_ssl=use_ssl, verify=verify_ssl)
 
@@ -284,7 +291,7 @@ class S3Service:
 
             for future in as_completed(futures):
                 if future.exception():
-                    print(f"{bucket.name} | Download failed: {futures[future]}")
+                    print(f"{bucket.name} | Download failed: {futures[future]} | {future.exception()}")
 
         print(f"{bucket.name} | Dumping completed")
 
@@ -292,13 +299,17 @@ class S3Service:
         """
         Download `obj` from `bucket` into `dest_directory`
 
-        :param str dest_directory: Directory to store the object into
+        :param str dest_directory: Directory to store the object into. _Must_ end in a slash
         :param S3Bucket bucket: Bucket to download the object from
         :param bool verbose: Output verbose messages to the user
         :param S3BucketObject obj: Object to downlaod
         :return: None
         """
-        dest_file_path = pathlib.Path(normpath(dest_directory + obj.key))
+        dest_file_path = pathlib.Path(os.path.normpath(os.path.join(dest_directory, obj.key)))
+
+        if not self.is_safe_file_to_download(obj.key, dest_directory):
+            print(f"{bucket.name} | Skipping file {obj.key}. File references a parent directory.")
+            return
         if dest_file_path.exists():
             if dest_file_path.stat().st_size == obj.size:
                 if verbose:
@@ -342,6 +353,20 @@ class S3Service:
                 raise AccessDeniedException("AccessDenied while enumerating bucket objects")
         bucket.objects_enumerated = True
 
+    def is_safe_file_to_download(self, file_to_check, dest_directory):
+        """
+        Check if bucket object would be saved outside of `dest_directory` if downloaded.
+        AWS allows object keys to include relative path characters like '../' which can lead to a 
+        path traversal-like issue where objects get saved outside of the intended directory.
+
+        :param string file_to_check: Bucket object key
+        :param string dest_directory: Path to directory to save file in
+        :return: bool
+        """
+        file_to_check = os.path.abspath(os.path.join(dest_directory, file_to_check))
+        safe_dir = os.path.abspath(dest_directory)
+        return os.path.commonpath([safe_dir]) == os.path.commonpath([safe_dir, file_to_check])
+
     def parse_found_acl(self, bucket):
         """
         Translate ACL grants into permission properties. If we were able to read the ACLs, we should be able to skip
diff --git a/S3Scanner/__main__.py b/S3Scanner/__main__.py
index aeeee36..c044880 100644
--- a/S3Scanner/__main__.py
+++ b/S3Scanner/__main__.py
@@ -16,7 +16,7 @@ from .S3Service import S3Service
 from concurrent.futures import ThreadPoolExecutor, as_completed
 from .exceptions import InvalidEndpointException
 
-CURRENT_VERSION = '2.0.1'
+CURRENT_VERSION = '2.0.2'
 AWS_ENDPOINT = 'https://s3.amazonaws.com'
 
 
@@ -127,6 +127,7 @@ def main():
     parser.add_argument('--endpoint-address-style', '-s', dest='endpoint_address_style', choices=['path', 'vhost'],
                         default='path', help='Address style to use for the endpoint. Default: path')
     parser.add_argument('--insecure', '-i', dest='verify_ssl', action='store_false', help='Do not verify SSL')
+    parser.add_argument('--profile', '-p', dest='aws_profile',default='default', help='AWS profile to use (defaults to `default`)')
     subparsers = parser.add_subparsers(title='mode', dest='mode', help='(Must choose one)')
 
     # Scan mode
@@ -160,7 +161,7 @@ def main():
     s3service = None
     anons3service = None
     try:
-        s3service = S3Service(endpoint_url=args.endpoint_url, verify_ssl=args.verify_ssl, endpoint_address_style=args.endpoint_address_style)
+        s3service = S3Service(endpoint_url=args.endpoint_url, verify_ssl=args.verify_ssl, endpoint_address_style=args.endpoint_address_style,profile=args.aws_profile)
         anons3service = S3Service(forceNoCreds=True, endpoint_url=args.endpoint_url, verify_ssl=args.verify_ssl, endpoint_address_style=args.endpoint_address_style)
     except InvalidEndpointException as e:
         print(f"Error: {e.message}")
diff --git a/conftest.py b/conftest.py
deleted file mode 100644
index 33b565c..0000000
--- a/conftest.py
+++ /dev/null
@@ -1,15 +0,0 @@
-####
-# Pytest Configuration
-####
-
-
-def pytest_addoption(parser):
-    parser.addoption("--do-dangerous", action="store_true",
-                     help="Run all tests, including ones where buckets are created.")
-
-
-def pytest_generate_tests(metafunc):
-    if "do_dangerous_test" in metafunc.fixturenames:
-        do_dangerous_test = True if metafunc.config.getoption("do_dangerous") else False
-        print("do_dangerous_test: " + str(do_dangerous_test))
-        metafunc.parametrize("do_dangerous_test", [do_dangerous_test])
\ No newline at end of file
diff --git a/debian/changelog b/debian/changelog
index 2029e9f..d77f066 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+s3scanner (2.0.2+git20220131.1.6a67603-0kali1) UNRELEASED; urgency=low
+
+  * New upstream snapshot.
+
+ -- Kali Janitor <janitor@kali.org>  Fri, 18 Feb 2022 10:45:29 -0000
+
 s3scanner (2.0.1-0kali1) kali-dev; urgency=medium
 
   * Initial release (see 4611)
diff --git a/debian/patches/Add-a-setup.py.patch b/debian/patches/Add-a-setup.py.patch
index 4f79940..9ace489 100644
--- a/debian/patches/Add-a-setup.py.patch
+++ b/debian/patches/Add-a-setup.py.patch
@@ -12,11 +12,10 @@ Last-Update: 2021-08-20
  1 file changed, 34 insertions(+)
  create mode 100644 setup.py
 
-diff --git a/setup.py b/setup.py
-new file mode 100644
-index 0000000..b094c7f
+Index: s3scanner/setup.py
+===================================================================
 --- /dev/null
-+++ b/setup.py
++++ s3scanner/setup.py
 @@ -0,0 +1,34 @@
 +from setuptools import setup, find_packages
 +
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index ffce0ec..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pytest-xdist
-boto3
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index 818f41f..5ce9027 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,33 +1,38 @@
 [metadata]
 name = S3Scanner
-version = 2.0.1
+version = 2.0.2
 author = Dan Salmon
 author_email = dan@salmon.cat
 description = Scan for open S3 buckets and dump the contents
 long_description = file: README.md
 long_description_content_type = text/markdown
 url = https://github.com/sa7mon/S3Scanner
-project_urls =
-    Bug Tracker = https://github.com/sa7mon/S3Scanner
-classifiers =
-    Programming Language :: Python :: 3.6
-    Programming Language :: Python :: 3.7
-    Programming Language :: Python :: 3.8
-    Programming Language :: Python :: 3.9
-    Topic :: Security
-    License :: OSI Approved :: MIT License
-    Operating System :: OS Independent
+project_urls = 
+	Bug Tracker = https://github.com/sa7mon/S3Scanner
+classifiers = 
+	Programming Language :: Python :: 3.6
+	Programming Language :: Python :: 3.7
+	Programming Language :: Python :: 3.8
+	Programming Language :: Python :: 3.9
+	Topic :: Security
+	License :: OSI Approved :: MIT License
+	Operating System :: OS Independent
 
 [options]
 packages = S3Scanner
-install_requires =
-    boto3
+install_requires = 
+	boto3>=1.20
 python_requires = >=3.6
 
 [options.entry_points]
-console_scripts =
-    s3scanner = S3Scanner.__main__:main
+console_scripts = 
+	s3scanner = S3Scanner.__main__:main
 
 [tool:pytest]
-python_files=test_*.py
-filterwarnings = ignore::pytest.PytestCollectionWarning
\ No newline at end of file
+python_files = test_*.py
+filterwarnings = ignore::pytest.PytestCollectionWarning
+
+[egg_info]
+tag_build = 
+tag_date = 0
+
diff --git a/tests/TestUtils.py b/tests/TestUtils.py
deleted file mode 100644
index efa541a..0000000
--- a/tests/TestUtils.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import random
-import string
-import boto3
-
-
-class TestBucketService:
-    def __init__(self):
-        self.session = boto3.Session(profile_name='privileged')
-        self.s3_client = self.session.client('s3')
-
-    @staticmethod
-    def generate_random_bucket_name(length=40):
-        candidates = string.ascii_lowercase + string.digits
-        return 's3scanner-' + ''.join(random.choice(candidates) for i in range(length))
-
-    def delete_bucket(self, bucket_name):
-        self.s3_client.delete_bucket(Bucket=bucket_name)
-
-    def create_bucket(self, danger_bucket):
-        bucket_name = self.generate_random_bucket_name()
-
-        # For type descriptions, refer to: https://github.com/sa7mon/S3Scanner/wiki/Test-Buckets
-        if danger_bucket == 1:
-            self.s3_client.create_bucket(Bucket=bucket_name,
-                                         GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
-            self.s3_client.put_bucket_acl(Bucket=bucket_name,
-                                          GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers',
-                                          GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
-        elif danger_bucket == 2:
-            self.s3_client.create_bucket(Bucket=bucket_name,
-                                         GrantWrite='uri=http://acs.amazonaws.com/groups/global/AllUsers',
-                                         GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AllUsers')
-        elif danger_bucket == 3:
-            self.s3_client.create_bucket(Bucket=bucket_name,
-                                         GrantRead='uri=http://acs.amazonaws.com/groups/global/AllUsers',
-                                         GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers',
-                                         GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
-        elif danger_bucket == 4:
-            self.s3_client.create_bucket(Bucket=bucket_name,
-                                         GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers,'
-                                                    'uri=http://acs.amazonaws.com/groups/global/AllUsers')
-        elif danger_bucket == 5:
-            self.s3_client.create_bucket(Bucket=bucket_name,
-                                         GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers,'
-                                                       'uri=http://acs.amazonaws.com/groups/global/AllUsers')
-        else:
-            raise Exception("Unknown danger bucket type")
-
-        return bucket_name
diff --git a/tests/test_bucket.py b/tests/test_bucket.py
deleted file mode 100644
index c975f92..0000000
--- a/tests/test_bucket.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from S3Scanner.S3Bucket import S3Bucket, S3BucketObject, Permission
-
-"""
-Tests for S3Bucket class go here
-"""
-
-
-def test_invalid_bucket_name():
-    try:
-        S3Bucket(name="asdf,;0()")
-    except ValueError as ve:
-        if str(ve) != "Invalid bucket name":
-            raise ve
-
-
-def test_s3_bucket_object():
-    o1 = S3BucketObject(key='index.html', size=8096, last_modified='2018-03-02T08:10:25.000Z')
-    o2 = S3BucketObject(key='home.html', size=2, last_modified='2018-03-02T08:10:25.000Z')
-
-    assert o1 != o2
-    assert o2 < o1  # test __lt__ method which compares keys
-    assert str(o1) == "Key: index.html, Size: 8096, LastModified: 2018-03-02T08:10:25.000Z"
-    assert o1.get_human_readable_size() == "7.9KB"
-
-
-def test_check_bucket_name():
-    S3Bucket(name="asdfasdf.s3.amazonaws.com")
-    S3Bucket(name="asdf:us-west-1")
-
-
-def test_get_human_readable_permissions():
-    b = S3Bucket(name='asdf')
-    b.AllUsersRead = Permission.ALLOWED
-    b.AllUsersWrite = Permission.ALLOWED
-    b.AllUsersReadACP = Permission.ALLOWED
-    b.AllUsersWriteACP = Permission.ALLOWED
-    b.AuthUsersRead = Permission.ALLOWED
-    b.AuthUsersWrite = Permission.ALLOWED
-    b.AuthUsersReadACP = Permission.ALLOWED
-    b.AuthUsersWriteACP = Permission.ALLOWED
-
-    b.get_human_readable_permissions()
-
-    b.AllUsersFullControl = Permission.ALLOWED
-    b.AuthUsersFullControl = Permission.ALLOWED
-
-    b.get_human_readable_permissions()
-
diff --git a/tests/test_scanner.py b/tests/test_scanner.py
deleted file mode 100644
index a892a2c..0000000
--- a/tests/test_scanner.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import sys
-import subprocess
-import os
-import time
-import shutil
-
-from S3Scanner.S3Service import S3Service
-
-
-def test_arguments():
-    s = S3Service()
-
-    a = subprocess.run([sys.executable, '-m', 'S3Scanner', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert a.stdout.decode('utf-8').strip() == '2.0.1'
-
-    b = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'flaws.cloud'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert_scanner_output(s, 'flaws.cloud | bucket_exists | AuthUsers: [], AllUsers: [Read]', b.stdout.decode('utf-8').strip())
-
-    c = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'asdfasdf---,'], stdout=subprocess.PIPE,
-                       stderr=subprocess.PIPE)
-    assert_scanner_output(s, 'asdfasdf---, | bucket_invalid_name', c.stdout.decode('utf-8').strip())
-
-    d = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'isurehopethisbucketdoesntexistasdfasdf'],
-                       stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert_scanner_output(s, 'isurehopethisbucketdoesntexistasdfasdf | bucket_not_exist', d.stdout.decode('utf-8').strip())
-
-    e = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'flaws.cloud', '--dangerous'],
-                       stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert_scanner_output(s, f"INFO: Including dangerous checks. WARNING: This may change bucket ACL destructively{os.linesep}flaws.cloud | bucket_exists | AuthUsers: [], AllUsers: [Read]", e.stdout.decode('utf-8').strip())
-
-    f = subprocess.run([sys.executable, '-m', 'S3Scanner', 'dump', '--bucket', 'flaws.cloud', '--dump-dir', './asfasdf'],
-                       stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert_scanner_output(s, "Error: Given --dump-dir does not exist or is not a directory", f.stdout.decode('utf-8').strip())
-
-    # Create temp folder to dump into
-    test_folder = os.path.join(os.getcwd(), 'testing_' + str(time.time())[0:10], '')
-    os.mkdir(test_folder)
-
-    try:
-        f = subprocess.run([sys.executable, '-m', 'S3Scanner', 'dump', '--bucket', 'flaws.cloud', '--dump-dir', test_folder],
-                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        assert_scanner_output(s, f"flaws.cloud | Enumerating bucket objects...{os.linesep}flaws.cloud | Total Objects: 7, Total Size: 25.0KB{os.linesep}flaws.cloud | Dumping contents using 4 threads...{os.linesep}flaws.cloud | Dumping completed", f.stdout.decode('utf-8').strip())
-
-        g = subprocess.run([sys.executable, '-m', 'S3Scanner', 'dump', '--bucket', 'asdfasdf,asdfasd,', '--dump-dir', test_folder],
-                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        assert_scanner_output(s, "asdfasdf,asdfasd, | bucket_name_invalid", g.stdout.decode('utf-8').strip())
-
-        h = subprocess.run([sys.executable, '-m', 'S3Scanner', 'dump', '--bucket', 'isurehopethisbucketdoesntexistasdfasdf', '--dump-dir', test_folder],
-                           stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        assert_scanner_output(s, 'isurehopethisbucketdoesntexistasdfasdf | bucket_not_exist', h.stdout.decode('utf-8').strip())
-    finally:
-        shutil.rmtree(test_folder)  # Cleanup the testing folder
-
-
-def test_endpoints():
-    """
-    Test the handling of non-AWS endpoints
-    :return:
-    """
-    s = S3Service()
-    b = subprocess.run([sys.executable, '-m', 'S3Scanner', '--endpoint-url', 'https://sfo2.digitaloceanspaces.com',
-                        'scan', '--bucket', 's3scanner'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert_scanner_output(s, 's3scanner | bucket_not_exist',
-                          b.stdout.decode('utf-8').strip())
-
-    c = subprocess.run([sys.executable, '-m', 'S3Scanner', '--endpoint-url', 'http://example.com', 'scan', '--bucket',
-                        's3scanner'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    assert c.stdout.decode('utf-8').strip() == "Error: Endpoint 'http://example.com' does not appear to be S3-compliant"
-
-
-def assert_scanner_output(service, expected_output, found_output):
-    """
-    If the tests are run without AWS creds configured, all the output from scanner.py will have a warning banner.
-    This is a convenience method to simplify comparing the expected output to the found output
-
-    :param service: s3service
-    :param expected_output: string
-    :param found_output: string
-    :return: boolean
-    """
-    creds_warning = "Warning: AWS credentials not configured - functionality will be limited. Run: `aws configure` to fix this."
-
-    if service.aws_creds_configured:
-        assert expected_output == found_output
-    else:
-        assert f"{creds_warning}{os.linesep}{os.linesep}{expected_output}" == found_output
-
-
-def test_check_aws_creds():
-    """
-    Scenario checkAwsCreds.1 - Output of checkAwsCreds() matches a more intense check for creds
-    """
-    print("test_checkAwsCreds temporarily disabled.")
-
-    # test_setup()
-    #
-    # # Check more thoroughly for creds being set.
-    # vars = os.environ
-    #
-    # keyid = vars.get("AWS_ACCESS_KEY_ID")
-    # key = vars.get("AWS_SECRET_ACCESS_KEY")
-    # credsFile = os.path.expanduser("~") + "/.aws/credentials"
-    #
-    # if keyid is not None and len(keyid) == 20:
-    #     if key is not None and len(key) == 40:
-    #         credsActuallyConfigured = True
-    #     else:
-    #         credsActuallyConfigured = False
-    # else:
-    #     credsActuallyConfigured = False
-    #
-    # if os.path.exists(credsFile):
-    #     print("credsFile path exists")
-    #     if not credsActuallyConfigured:
-    #         keyIdSet = None
-    #         keySet = None
-    #
-    #         # Check the ~/.aws/credentials file
-    #         with open(credsFile, "r") as f:
-    #             for line in f:
-    #                 line = line.strip()
-    #                 if line[0:17].lower() == 'aws_access_key_id':
-    #                     if len(line) >= 38:  # key + value = length of at least 38 if no spaces around equals
-    #                         keyIdSet = True
-    #                     else:
-    #                         keyIdSet = False
-    #
-    #                 if line[0:21].lower() == 'aws_secret_access_key':
-    #                     if len(line) >= 62:
-    #                         keySet = True
-    #                     else:
-    #                         keySet = False
-    #
-    #         if keyIdSet and keySet:
-    #             credsActuallyConfigured = True
-    #
-    # # checkAwsCreds.1
-    # assert s3.checkAwsCreds() == credsActuallyConfigured
-
diff --git a/tests/test_service.py b/tests/test_service.py
deleted file mode 100644
index 3502743..0000000
--- a/tests/test_service.py
+++ /dev/null
@@ -1,587 +0,0 @@
-import os
-
-import pytest
-
-from S3Scanner.S3Service import S3Service
-from S3Scanner.S3Bucket import BucketExists, Permission, S3BucketObject, S3Bucket
-from TestUtils import TestBucketService
-from S3Scanner.exceptions import AccessDeniedException, BucketMightNotExistException
-from pathlib import Path
-from urllib3 import disable_warnings
-
-testingFolder = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'test/')
-setupRan = False
-
-
-"""
-S3Service.py methods to test:
-
-- init()
-    - βœ”οΈ Test service.aws_creds_configured is false when forceNoCreds = False
-- check_bucket_exists()
-    - βœ”οΈ Test against that exists
-    - βœ”οΈ Test against one that doesn't
-- check_perm_read_acl()
-    - βœ”οΈ Test against bucket with AllUsers allowed
-    - βœ”οΈ Test against bucket with AuthUsers allowed
-    - βœ”οΈ Test against bucket with all denied
-- check_perm_read()
-    - βœ”οΈ Test against bucket with AuthUsers read permission
-    - βœ”οΈ Test against bucket with AllUsers read permission
-    - βœ”οΈ Test against bucket with no read permission
-- check_perm_write()
-    - βœ”οΈ Test against bucket with no write permissions
-    - βœ”οΈ Test against bucket with AuthUsers write permission
-    - βœ”οΈ Test against bucket with AllUsers write permission
-    - βœ”οΈ Test against bucket with AllUsers and AuthUsers write permission
-- check_perm_write_acl()
-    - βœ”οΈ Test against bucket with AllUsers allowed
-    - βœ”οΈ Test against bucket with AuthUsers allowed
-    - βœ”οΈ Test against bucket with both AllUsers allowed
-    - βœ”οΈ Test against bucket with no groups allowed
-- enumerate_bucket_objects()
-    - βœ”οΈ Test against empty bucket
-    - βœ”οΈ Test against not empty bucket with read permission
-    - βœ”οΈ Test against bucket without read permission
-- parse_found_acl()
-    - βœ”οΈ Test against JSON with FULL_CONTROL for AllUsers
-    - βœ”οΈ Test against JSON with FULL_CONTROL for AuthUsers
-    - βœ”οΈ Test against empty JSON
-    - βœ”οΈ Test against JSON with ReadACP for AuthUsers and Write for AllUsers
-"""
-
-
-def test_setup_new():
-    global setupRan
-    if setupRan:    # We only need to run this once per test-run
-        return
-
-    # Create testingFolder if it doesn't exist
-    if not os.path.exists(testingFolder) or not os.path.isdir(testingFolder):
-        os.makedirs(testingFolder)
-    setupRan = True
-
-
-def test_init():
-    test_setup_new()
-
-    s = S3Service(forceNoCreds=True)
-    assert s.aws_creds_configured is False
-
-
-def test_bucket_exists():
-    test_setup_new()
-
-    s = S3Service()
-
-    # Bucket that does exist
-    b1 = S3Bucket('s3scanner-private')
-    s.check_bucket_exists(b1)
-    assert b1.exists is BucketExists.YES
-
-    # Bucket that doesn't exist (hopefully)
-    b2 = S3Bucket('asfasfasdfasdfasdf')
-    s.check_bucket_exists(b2)
-    assert b2.exists is BucketExists.NO
-
-    # Pass a thing that's not a bucket
-    with pytest.raises(ValueError):
-        s.check_bucket_exists("asdfasdf")
-
-
-def test_check_perm_read():
-    test_setup_new()
-
-    s = S3Service()
-
-    # Bucket that no one can list
-    b1 = S3Bucket('s3scanner-private')
-    b1.exists = BucketExists.YES
-    s.check_perm_read(b1)
-    if s.aws_creds_configured:
-        assert b1.AuthUsersRead == Permission.DENIED
-    else:
-        assert b1.AllUsersRead == Permission.DENIED
-
-    # Bucket that only AuthenticatedUsers can list
-    b2 = S3Bucket('s3scanner-auth-read')
-    b2.exists = BucketExists.YES
-    s.check_perm_read(b2)
-    if s.aws_creds_configured:
-        assert b2.AuthUsersRead == Permission.ALLOWED
-    else:
-        assert b2.AllUsersRead == Permission.DENIED
-
-    # Bucket that Everyone can list
-    b3 = S3Bucket('s3scanner-long')
-    b3.exists = BucketExists.YES
-    s.check_perm_read(b3)
-    if s.aws_creds_configured:
-        assert b3.AuthUsersRead == Permission.ALLOWED
-    else:
-        assert b3.AllUsersRead == Permission.ALLOWED
-
-
-def test_enumerate_bucket_objects():
-    test_setup_new()
-
-    s = S3Service()
-
-    # Empty bucket
-    b1 = S3Bucket('s3scanner-empty')
-    b1.exists = BucketExists.YES
-    s.check_perm_read(b1)
-    if s.aws_creds_configured:
-        assert b1.AuthUsersRead == Permission.ALLOWED
-    else:
-        assert b1.AllUsersRead == Permission.ALLOWED
-    s.enumerate_bucket_objects(b1)
-    assert b1.objects_enumerated is True
-    assert b1.bucketSize == 0
-
-    # Bucket with > 1000 items
-    if s.aws_creds_configured:
-        b2 = S3Bucket('s3scanner-auth-read')
-        b2.exists = BucketExists.YES
-        s.check_perm_read(b2)
-        assert b2.AuthUsersRead == Permission.ALLOWED
-        s.enumerate_bucket_objects(b2)
-        assert b2.objects_enumerated is True
-        assert b2.bucketSize == 4143
-        assert b2.get_human_readable_size() == "4.0KB"
-    else:
-        print("[test_enumerate_bucket_objects] Skipping test due to no AWS creds")
-
-    # Bucket without read permission
-    b3 = S3Bucket('s3scanner-private')
-    b3.exists = BucketExists.YES
-    s.check_perm_read(b3)
-    if s.aws_creds_configured:
-        assert b3.AuthUsersRead == Permission.DENIED
-    else:
-        assert b3.AllUsersRead == Permission.DENIED
-    try:
-        s.enumerate_bucket_objects(b3)
-    except AccessDeniedException:
-        pass
-
-    # Try to enumerate before checking if bucket exists
-    b4 = S3Bucket('s3scanner-enumerate-bucket')
-    with pytest.raises(Exception):
-        s.enumerate_bucket_objects(b4)
-
-
-def test_check_perm_read_acl():
-    test_setup_new()
-    s = S3Service()
-
-    # Bucket with no read ACL perms
-    b1 = S3Bucket('s3scanner-private')
-    b1.exists = BucketExists.YES
-    s.check_perm_read_acl(b1)
-    if s.aws_creds_configured:
-        assert b1.AuthUsersReadACP == Permission.DENIED
-    else:
-        assert b1.AllUsersReadACP == Permission.DENIED
-
-    # Bucket that allows AuthenticatedUsers to read ACL
-    if s.aws_creds_configured:
-        b2 = S3Bucket('s3scanner-auth-read-acl')
-        b2.exists = BucketExists.YES
-        s.check_perm_read_acl(b2)
-        if s.aws_creds_configured:
-            assert b2.AuthUsersReadACP == Permission.ALLOWED
-        else:
-            assert b2.AllUsersReadACP == Permission.DENIED
-
-    # Bucket that allows AllUsers to read ACL
-    b3 = S3Bucket('s3scanner-all-readacp')
-    b3.exists = BucketExists.YES
-    s.check_perm_read_acl(b3)
-    assert b3.AllUsersReadACP == Permission.ALLOWED
-    assert b3.AllUsersWrite == Permission.DENIED
-    assert b3.AllUsersWriteACP == Permission.DENIED
-    assert b3.AuthUsersReadACP == Permission.DENIED
-    assert b3.AuthUsersWriteACP == Permission.DENIED
-    assert b3.AuthUsersWrite == Permission.DENIED
-
-
-def test_check_perm_write(do_dangerous_test):
-    test_setup_new()
-    s = S3Service()
-    sAnon = S3Service(forceNoCreds=True)
-
-    # Bucket with no write perms
-    b1 = S3Bucket('flaws.cloud')
-    b1.exists = BucketExists.YES
-    s.check_perm_write(b1)
-
-    if s.aws_creds_configured:
-        assert b1.AuthUsersWrite == Permission.DENIED
-    else:
-        assert b1.AllUsersWrite == Permission.DENIED
-
-    if do_dangerous_test:
-        print("[test_check_perm_write] Doing dangerous test")
-        ts = TestBucketService()
-
-        danger_bucket_1 = ts.create_bucket(1)  # Bucket with AuthUser Write, WriteACP permissions
-        try:
-            b2 = S3Bucket(danger_bucket_1)
-            b2.exists = BucketExists.YES
-            sAnon.check_perm_write(b2)
-            s.check_perm_write(b2)
-            assert b2.AuthUsersWrite == Permission.ALLOWED
-            assert b2.AllUsersWrite == Permission.DENIED
-        finally:
-            ts.delete_bucket(danger_bucket_1)
-
-        danger_bucket_2 = ts.create_bucket(2)  # Bucket with AllUser Write, WriteACP permissions
-        try:
-            b3 = S3Bucket(danger_bucket_2)
-            b3.exists = BucketExists.YES
-            sAnon.check_perm_write(b3)
-            s.check_perm_write(b3)
-            assert b3.AllUsersWrite == Permission.ALLOWED
-            assert b3.AuthUsersWrite == Permission.UNKNOWN
-        finally:
-            ts.delete_bucket(danger_bucket_2)
-
-        # Bucket with AllUsers and AuthUser Write permissions
-        danger_bucket_4 = ts.create_bucket(4)
-        try:
-            b4 = S3Bucket(danger_bucket_4)
-            b4.exists = BucketExists.YES
-            sAnon.check_perm_write(b4)
-            s.check_perm_write(b4)
-            assert b4.AllUsersWrite == Permission.ALLOWED
-            assert b4.AuthUsersWrite == Permission.UNKNOWN
-        finally:
-            ts.delete_bucket(danger_bucket_4)
-    else:
-        print("[test_check_perm_write] Skipping dangerous test")
-
-
-def test_check_perm_write_acl(do_dangerous_test):
-    test_setup_new()
-    s = S3Service()
-    sNoCreds = S3Service(forceNoCreds=True)
-
-    # Bucket with no permissions
-    b1 = S3Bucket('s3scanner-private')
-    b1.exists = BucketExists.YES
-    s.check_perm_write_acl(b1)
-    if s.aws_creds_configured:
-        assert b1.AuthUsersWriteACP == Permission.DENIED
-        assert b1.AllUsersWriteACP == Permission.UNKNOWN
-    else:
-        assert b1.AllUsersWriteACP == Permission.DENIED
-        assert b1.AuthUsersWriteACP == Permission.UNKNOWN
-    
-    if do_dangerous_test:
-        print("[test_check_perm_write_acl] Doing dangerous tests...")
-        ts = TestBucketService()
-
-        # Bucket with WRITE_ACP enabled for AuthUsers
-        danger_bucket_3 = ts.create_bucket(3)
-        try:
-            b2 = S3Bucket(danger_bucket_3)
-            b2.exists = BucketExists.YES
-
-            # Check for read/write permissions so when we check for write_acl we
-            # send the same perms that it had originally
-            sNoCreds.check_perm_read(b2)
-            s.check_perm_read(b2)
-            sNoCreds.check_perm_write(b2)
-            s.check_perm_write(b2)
-
-            # Check for WriteACP
-            sNoCreds.check_perm_write_acl(b2)
-            s.check_perm_write_acl(b2)
-
-            # Grab permissions after our check so we can compare to original
-            sNoCreds.check_perm_write(b2)
-            s.check_perm_write(b2)
-            sNoCreds.check_perm_read(b2)
-            s.check_perm_read(b2)
-            if s.aws_creds_configured:
-                assert b2.AuthUsersWriteACP == Permission.ALLOWED
-
-                # Make sure we didn't change the original permissions
-                assert b2.AuthUsersWrite == Permission.ALLOWED
-                assert b2.AllUsersWrite == Permission.DENIED
-                assert b2.AllUsersRead == Permission.ALLOWED
-                assert b2.AuthUsersRead == Permission.UNKNOWN
-            else:
-                assert b2.AllUsersRead == Permission.ALLOWED
-                assert b2.AuthUsersWriteACP == Permission.UNKNOWN
-        except Exception as e:
-            raise e
-        finally:
-            ts.delete_bucket(danger_bucket_3)
-
-        # Bucket with WRITE_ACP enabled for AllUsers
-        danger_bucket_2 = ts.create_bucket(2)
-        try:
-            b3 = S3Bucket(danger_bucket_2)
-            b3.exists = BucketExists.YES
-            sNoCreds.check_perm_read(b3)
-            s.check_perm_read(b3)
-            sNoCreds.check_perm_write(b3)
-            s.check_perm_write(b3)
-            sNoCreds.check_perm_write_acl(b3)
-            s.check_perm_write_acl(b3)
-            sNoCreds.check_perm_write(b3)
-            s.check_perm_write(b3)
-            sNoCreds.check_perm_read(b3)
-            s.check_perm_read(b3)
-            if s.aws_creds_configured:
-                assert b3.AllUsersWriteACP == Permission.ALLOWED
-                assert b3.AuthUsersWriteACP == Permission.UNKNOWN
-                assert b3.AllUsersWrite == Permission.ALLOWED
-            else:
-                assert b3.AllUsersRead == Permission.ALLOWED
-                assert b3.AuthUsersWriteACP == Permission.UNKNOWN
-        except Exception as e:
-            raise e
-        finally:
-            ts.delete_bucket(danger_bucket_2)
-
-        # Bucket with WRITE_ACP enabled for both AllUsers and AuthUsers
-        danger_bucket_5 = ts.create_bucket(5)
-        try:
-            b5 = S3Bucket(danger_bucket_5)
-            b5.exists = BucketExists.YES
-            sNoCreds.check_perm_read(b5)
-            s.check_perm_read(b5)
-            sNoCreds.check_perm_write(b5)
-            s.check_perm_write(b5)
-            sNoCreds.check_perm_write_acl(b5)
-            s.check_perm_write_acl(b5)
-            sNoCreds.check_perm_write(b5)
-            s.check_perm_write(b5)
-            sNoCreds.check_perm_read(b5)
-            s.check_perm_read(b5)
-            assert b5.AllUsersWriteACP == Permission.ALLOWED
-            assert b5.AuthUsersWriteACP == Permission.UNKNOWN
-            assert b5.AllUsersWrite == Permission.DENIED
-            assert b5.AuthUsersWrite == Permission.DENIED
-        except Exception as e:
-            raise e
-        finally:
-            ts.delete_bucket(danger_bucket_5)
-    else:
-        print("[test_check_perm_write_acl] Skipping dangerous test...")
-
-
-def test_parse_found_acl():
-    test_setup_new()
-    sAnon = S3Service(forceNoCreds=True)
-
-    b1 = S3Bucket('s3scanner-all-read-readacl')
-    b1.exists = BucketExists.YES
-    sAnon.check_perm_read_acl(b1)
-
-    assert b1.foundACL is not None
-    assert b1.AllUsersRead == Permission.ALLOWED
-    assert b1.AllUsersReadACP == Permission.ALLOWED
-    assert b1.AllUsersWrite == Permission.DENIED
-    assert b1.AllUsersWriteACP == Permission.DENIED
-    assert b1.AllUsersFullControl == Permission.DENIED
-
-    assert b1.AuthUsersReadACP == Permission.DENIED
-    assert b1.AuthUsersRead == Permission.DENIED
-    assert b1.AuthUsersWrite == Permission.DENIED
-    assert b1.AuthUsersWriteACP == Permission.DENIED
-    assert b1.AuthUsersFullControl == Permission.DENIED
-
-    test_acls_1 = {
-        'Grants': [
-            {
-                'Grantee': {
-                    'Type': 'Group',
-                    'URI': 'http://acs.amazonaws.com/groups/global/AllUsers'
-                },
-                'Permission': 'FULL_CONTROL'
-            }
-        ]
-    }
-
-    b2 = S3Bucket('test-acl-doesnt-exist')
-    b2.exists = BucketExists.YES
-    b2.foundACL = test_acls_1
-    sAnon.parse_found_acl(b2)
-    assert b2.AllUsersRead == Permission.ALLOWED
-    assert b2.AllUsersReadACP == Permission.ALLOWED
-    assert b2.AllUsersWrite == Permission.ALLOWED
-    assert b2.AllUsersWriteACP == Permission.ALLOWED
-    assert b2.AllUsersFullControl == Permission.ALLOWED
-    assert b2.AuthUsersRead == Permission.DENIED
-    assert b2.AuthUsersReadACP == Permission.DENIED
-    assert b2.AuthUsersWrite == Permission.DENIED
-    assert b2.AuthUsersWriteACP == Permission.DENIED
-    assert b2.AuthUsersFullControl == Permission.DENIED
-
-    test_acls_2 = {
-        'Grants': [
-            {
-                'Grantee': {
-                    'Type': 'Group',
-                    'URI': 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
-                },
-                'Permission': 'FULL_CONTROL'
-            }
-        ]
-    }
-
-    b3 = S3Bucket('test-acl2-doesnt-exist')
-    b3.exists = BucketExists.YES
-    b3.foundACL = test_acls_2
-    sAnon.parse_found_acl(b3)
-    assert b3.AllUsersRead == Permission.DENIED
-    assert b3.AllUsersReadACP == Permission.DENIED
-    assert b3.AllUsersWrite == Permission.DENIED
-    assert b3.AllUsersWriteACP == Permission.DENIED
-    assert b3.AllUsersFullControl == Permission.DENIED
-    assert b3.AuthUsersRead == Permission.ALLOWED
-    assert b3.AuthUsersReadACP == Permission.ALLOWED
-    assert b3.AuthUsersWrite == Permission.ALLOWED
-    assert b3.AuthUsersWriteACP == Permission.ALLOWED
-    assert b3.AuthUsersFullControl == Permission.ALLOWED
-
-    test_acls_3 = {
-        'Grants': [
-            {
-                'Grantee': {
-                    'Type': 'Group',
-                    'URI': 'asdfasdf'
-                },
-                'Permission': 'READ'
-            }
-        ]
-    }
-
-    b4 = S3Bucket('test-acl3-doesnt-exist')
-    b4.exists = BucketExists.YES
-    b4.foundACL = test_acls_3
-    sAnon.parse_found_acl(b4)
-
-    all_permissions = [b4.AllUsersRead, b4.AllUsersReadACP, b4.AllUsersWrite, b4.AllUsersWriteACP,
-                       b4.AllUsersFullControl, b4.AuthUsersRead, b4.AuthUsersReadACP, b4.AuthUsersWrite,
-                       b4.AuthUsersWriteACP, b4.AuthUsersFullControl]
-
-    for p in all_permissions:
-        assert p == Permission.DENIED
-
-    test_acls_4 = {
-        'Grants': [
-            {
-                'Grantee': {
-                    'Type': 'Group',
-                    'URI': 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
-                },
-                'Permission': 'READ_ACP'
-            },
-            {
-                'Grantee': {
-                    'Type': 'Group',
-                    'URI': 'http://acs.amazonaws.com/groups/global/AllUsers'
-                },
-                'Permission': 'READ_ACP'
-            }
-        ]
-    }
-
-    b5 = S3Bucket('test-acl4-doesnt-exist')
-    b5.exists = BucketExists.YES
-    b5.foundACL = test_acls_4
-    sAnon.parse_found_acl(b5)
-    assert b5.AllUsersRead == Permission.DENIED
-    assert b5.AllUsersReadACP == Permission.ALLOWED
-    assert b5.AllUsersWrite == Permission.DENIED
-    assert b5.AllUsersWriteACP == Permission.DENIED
-    assert b5.AllUsersFullControl == Permission.DENIED
-    assert b5.AuthUsersRead == Permission.DENIED
-    assert b5.AuthUsersReadACP == Permission.ALLOWED
-    assert b5.AuthUsersWrite == Permission.DENIED
-    assert b5.AuthUsersWriteACP == Permission.DENIED
-    assert b5.AuthUsersFullControl == Permission.DENIED
-
-
-def test_check_perms_without_checking_bucket_exists():
-    test_setup_new()
-    sAnon = S3Service(forceNoCreds=True)
-
-    b1 = S3Bucket('blahblah')
-    with pytest.raises(BucketMightNotExistException):
-        sAnon.check_perm_read_acl(b1)
-
-    with pytest.raises(BucketMightNotExistException):
-        sAnon.check_perm_read(b1)
-
-    with pytest.raises(BucketMightNotExistException):
-        sAnon.check_perm_write(b1)
-
-    with pytest.raises(BucketMightNotExistException):
-        sAnon.check_perm_write_acl(b1)
-
-
-def test_no_ssl():
-    test_setup_new()
-    S3Service(verify_ssl=False)
-
-
-def test_download_file():
-    test_setup_new()
-    s = S3Service()
-
-    # Try to download a file that already exists
-    dest_folder = os.path.realpath(testingFolder)
-    Path(os.path.join(dest_folder, 'test_download_file.txt')).touch()
-    size = Path(os.path.join(dest_folder, 'test_download_file.txt')).stat().st_size
-
-    o = S3BucketObject(size=size, last_modified="2020-12-31_03-02-11z", key="test_download_file.txt")
-
-    b = S3Bucket("bucket-no-existo")
-    s.download_file(os.path.join(dest_folder, ''), b, True, o)
-
-
-def test_validate_endpoint_url_nonaws():
-    disable_warnings()
-    s = S3Service()
-
-    # Test CenturyLink_Lumen
-    s.endpoint_url = 'https://useast.os.ctl.io'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test DigitalOcean
-    s.endpoint_url = 'https://sfo2.digitaloceanspaces.com'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test Dreamhost
-    s.endpoint_url = 'https://objects.dreamhost.com'
-    assert s.validate_endpoint_url(use_ssl=False, verify_ssl=False, endpoint_address_style='vhost') is True
-
-    # Test GCP
-    s.endpoint_url = 'https://storage.googleapis.com'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test IBM
-    s.endpoint_url = 'https://s3.us-east.cloud-object-storage.appdomain.cloud'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test Linode
-    s.endpoint_url = 'https://eu-central-1.linodeobjects.com'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test Scaleway
-    s.endpoint_url = 'https://s3.nl-ams.scw.cloud'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test Vultr
-    s.endpoint_url = 'https://ewr1.vultrobjects.com'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True
-
-    # Test Wasabi
-    s.endpoint_url = 'https://s3.wasabisys.com'
-    assert s.validate_endpoint_url(use_ssl=True, verify_ssl=True, endpoint_address_style='path') is True