Run of fresh-releases for s3scanner
Try this locally package):
debcheckout s3scanner
cd s3scanner
DEB_UPDATE_CHANGELOG=auto deb-new-upstream --debian-revision=0kali1 --require-uscan --refresh-patches
Summary
DEB_UPDATE_CHANGELOG=auto deb-new-upstream --debian-revision=0kali1 --require-uscan --refresh-patchesDiff
Branch: main
diff --git a/S3Scanner/S3Service.py b/S3Scanner/S3Service.py
index 64788ec..2ac38d1 100644
--- a/S3Scanner/S3Service.py
+++ b/S3Scanner/S3Service.py
@@ -10,11 +10,12 @@ from botocore import UNSIGNED
from botocore.client import Config
import datetime
from S3Scanner.exceptions import AccessDeniedException, InvalidEndpointException, BucketMightNotExistException
-from os.path import normpath
import pathlib
from concurrent.futures import ThreadPoolExecutor, as_completed
from functools import partial
from urllib3 import disable_warnings
+import os
+
ALL_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AllUsers'
AUTH_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
@@ -284,7 +285,7 @@ class S3Service:
for future in as_completed(futures):
if future.exception():
- print(f"{bucket.name} | Download failed: {futures[future]}")
+ print(f"{bucket.name} | Download failed: {futures[future]} | {future.exception()}")
print(f"{bucket.name} | Dumping completed")
@@ -292,13 +293,17 @@ class S3Service:
"""
Download `obj` from `bucket` into `dest_directory`
- :param str dest_directory: Directory to store the object into
+ :param str dest_directory: Directory to store the object into. _Must_ end in a slash
:param S3Bucket bucket: Bucket to download the object from
:param bool verbose: Output verbose messages to the user
:param S3BucketObject obj: Object to downlaod
:return: None
"""
- dest_file_path = pathlib.Path(normpath(dest_directory + obj.key))
+ dest_file_path = pathlib.Path(os.path.normpath(os.path.join(dest_directory, obj.key)))
+
+ if not self.is_safe_file_to_download(obj.key, dest_directory):
+ print(f"{bucket.name} | Skipping file {obj.key}. File references a parent directory.")
+ return
if dest_file_path.exists():
if dest_file_path.stat().st_size == obj.size:
if verbose:
@@ -342,6 +347,20 @@ class S3Service:
raise AccessDeniedException("AccessDenied while enumerating bucket objects")
bucket.objects_enumerated = True
+ def is_safe_file_to_download(self, file_to_check, dest_directory):
+ """
+ Check if bucket object would be saved outside of `dest_directory` if downloaded.
+ AWS allows object keys to include relative path characters like '../' which can lead to a
+ path traversal-like issue where objects get saved outside of the intended directory.
+
+ :param string file_to_check: Bucket object key
+ :param string dest_directory: Path to directory to save file in
+ :return: bool
+ """
+ file_to_check = os.path.abspath(os.path.join(dest_directory, file_to_check))
+ safe_dir = os.path.abspath(dest_directory)
+ return os.path.commonpath([safe_dir]) == os.path.commonpath([safe_dir, file_to_check])
+
def parse_found_acl(self, bucket):
"""
Translate ACL grants into permission properties. If we were able to read the ACLs, we should be able to skip
diff --git a/S3Scanner/__main__.py b/S3Scanner/__main__.py
index aeeee36..ad9c069 100644
--- a/S3Scanner/__main__.py
+++ b/S3Scanner/__main__.py
@@ -16,7 +16,7 @@ from .S3Service import S3Service
from concurrent.futures import ThreadPoolExecutor, as_completed
from .exceptions import InvalidEndpointException
-CURRENT_VERSION = '2.0.1'
+CURRENT_VERSION = '2.0.2'
AWS_ENDPOINT = 'https://s3.amazonaws.com'
diff --git a/debian/changelog b/debian/changelog
index 2029e9f..5264532 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+s3scanner (2.0.2-0kali1) UNRELEASED; urgency=low
+
+ * New upstream release.
+
+ -- Kali Janitor <janitor@kali.org> Fri, 18 Feb 2022 01:27:00 -0000
+
s3scanner (2.0.1-0kali1) kali-dev; urgency=medium
* Initial release (see 4611)
diff --git a/debian/patches/Add-a-setup.py.patch b/debian/patches/Add-a-setup.py.patch
index 4f79940..9ace489 100644
--- a/debian/patches/Add-a-setup.py.patch
+++ b/debian/patches/Add-a-setup.py.patch
@@ -12,11 +12,10 @@ Last-Update: 2021-08-20
1 file changed, 34 insertions(+)
create mode 100644 setup.py
-diff --git a/setup.py b/setup.py
-new file mode 100644
-index 0000000..b094c7f
+Index: s3scanner/setup.py
+===================================================================
--- /dev/null
-+++ b/setup.py
++++ s3scanner/setup.py
@@ -0,0 +1,34 @@
+from setuptools import setup, find_packages
+
diff --git a/setup.cfg b/setup.cfg
index 818f41f..79fa863 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = S3Scanner
-version = 2.0.1
+version = 2.0.2
author = Dan Salmon
author_email = dan@salmon.cat
description = Scan for open S3 buckets and dump the contents
diff --git a/tests/test_scanner.py b/tests/test_scanner.py
index a892a2c..664fc99 100644
--- a/tests/test_scanner.py
+++ b/tests/test_scanner.py
@@ -11,7 +11,7 @@ def test_arguments():
s = S3Service()
a = subprocess.run([sys.executable, '-m', 'S3Scanner', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- assert a.stdout.decode('utf-8').strip() == '2.0.1'
+ assert a.stdout.decode('utf-8').strip() == '2.0.2'
b = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'flaws.cloud'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert_scanner_output(s, 'flaws.cloud | bucket_exists | AuthUsers: [], AllUsers: [Read]', b.stdout.decode('utf-8').strip())
diff --git a/tests/test_service.py b/tests/test_service.py
index 3502743..e51d094 100644
--- a/tests/test_service.py
+++ b/tests/test_service.py
@@ -545,6 +545,19 @@ def test_download_file():
b = S3Bucket("bucket-no-existo")
s.download_file(os.path.join(dest_folder, ''), b, True, o)
+def test_is_safe_file_to_download():
+ test_setup_new()
+ s = S3Service()
+
+ # Check a good file name
+ assert s.is_safe_file_to_download("file.txt", "./bucket_dir/") == True
+ assert s.is_safe_file_to_download("file.txt", "./bucket_dir") == True
+
+ # Check file with relative name
+ assert s.is_safe_file_to_download("../file.txt", "./buckets/") == False
+ assert s.is_safe_file_to_download("../", "./buckets/") == False
+ assert s.is_safe_file_to_download("/file.txt", "./buckets/") == False
+
def test_validate_endpoint_url_nonaws():
disable_warnings()
Branch: pristine-tar
diff --git a/s3scanner_2.0.2.orig.tar.gz.delta b/s3scanner_2.0.2.orig.tar.gz.delta
new file mode 100644
index 0000000..5f5c181
Binary files /dev/null and b/s3scanner_2.0.2.orig.tar.gz.delta differ
diff --git a/s3scanner_2.0.2.orig.tar.gz.id b/s3scanner_2.0.2.orig.tar.gz.id
new file mode 100644
index 0000000..68a3243
--- /dev/null
+++ b/s3scanner_2.0.2.orig.tar.gz.id
@@ -0,0 +1 @@
+4b1f6e78a1a9325aa58e418b471224ac0262fef4
Branch: upstream
Tag: upstream/2.0.2diff --git a/S3Scanner/S3Service.py b/S3Scanner/S3Service.py
index 64788ec..2ac38d1 100644
--- a/S3Scanner/S3Service.py
+++ b/S3Scanner/S3Service.py
@@ -10,11 +10,12 @@ from botocore import UNSIGNED
from botocore.client import Config
import datetime
from S3Scanner.exceptions import AccessDeniedException, InvalidEndpointException, BucketMightNotExistException
-from os.path import normpath
import pathlib
from concurrent.futures import ThreadPoolExecutor, as_completed
from functools import partial
from urllib3 import disable_warnings
+import os
+
ALL_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AllUsers'
AUTH_USERS_URI = 'uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
@@ -284,7 +285,7 @@ class S3Service:
for future in as_completed(futures):
if future.exception():
- print(f"{bucket.name} | Download failed: {futures[future]}")
+ print(f"{bucket.name} | Download failed: {futures[future]} | {future.exception()}")
print(f"{bucket.name} | Dumping completed")
@@ -292,13 +293,17 @@ class S3Service:
"""
Download `obj` from `bucket` into `dest_directory`
- :param str dest_directory: Directory to store the object into
+ :param str dest_directory: Directory to store the object into. _Must_ end in a slash
:param S3Bucket bucket: Bucket to download the object from
:param bool verbose: Output verbose messages to the user
:param S3BucketObject obj: Object to downlaod
:return: None
"""
- dest_file_path = pathlib.Path(normpath(dest_directory + obj.key))
+ dest_file_path = pathlib.Path(os.path.normpath(os.path.join(dest_directory, obj.key)))
+
+ if not self.is_safe_file_to_download(obj.key, dest_directory):
+ print(f"{bucket.name} | Skipping file {obj.key}. File references a parent directory.")
+ return
if dest_file_path.exists():
if dest_file_path.stat().st_size == obj.size:
if verbose:
@@ -342,6 +347,20 @@ class S3Service:
raise AccessDeniedException("AccessDenied while enumerating bucket objects")
bucket.objects_enumerated = True
+ def is_safe_file_to_download(self, file_to_check, dest_directory):
+ """
+ Check if bucket object would be saved outside of `dest_directory` if downloaded.
+ AWS allows object keys to include relative path characters like '../' which can lead to a
+ path traversal-like issue where objects get saved outside of the intended directory.
+
+ :param string file_to_check: Bucket object key
+ :param string dest_directory: Path to directory to save file in
+ :return: bool
+ """
+ file_to_check = os.path.abspath(os.path.join(dest_directory, file_to_check))
+ safe_dir = os.path.abspath(dest_directory)
+ return os.path.commonpath([safe_dir]) == os.path.commonpath([safe_dir, file_to_check])
+
def parse_found_acl(self, bucket):
"""
Translate ACL grants into permission properties. If we were able to read the ACLs, we should be able to skip
diff --git a/S3Scanner/__main__.py b/S3Scanner/__main__.py
index aeeee36..ad9c069 100644
--- a/S3Scanner/__main__.py
+++ b/S3Scanner/__main__.py
@@ -16,7 +16,7 @@ from .S3Service import S3Service
from concurrent.futures import ThreadPoolExecutor, as_completed
from .exceptions import InvalidEndpointException
-CURRENT_VERSION = '2.0.1'
+CURRENT_VERSION = '2.0.2'
AWS_ENDPOINT = 'https://s3.amazonaws.com'
diff --git a/setup.cfg b/setup.cfg
index 818f41f..79fa863 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = S3Scanner
-version = 2.0.1
+version = 2.0.2
author = Dan Salmon
author_email = dan@salmon.cat
description = Scan for open S3 buckets and dump the contents
diff --git a/tests/test_scanner.py b/tests/test_scanner.py
index a892a2c..664fc99 100644
--- a/tests/test_scanner.py
+++ b/tests/test_scanner.py
@@ -11,7 +11,7 @@ def test_arguments():
s = S3Service()
a = subprocess.run([sys.executable, '-m', 'S3Scanner', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- assert a.stdout.decode('utf-8').strip() == '2.0.1'
+ assert a.stdout.decode('utf-8').strip() == '2.0.2'
b = subprocess.run([sys.executable, '-m', 'S3Scanner', 'scan', '--bucket', 'flaws.cloud'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert_scanner_output(s, 'flaws.cloud | bucket_exists | AuthUsers: [], AllUsers: [Read]', b.stdout.decode('utf-8').strip())
diff --git a/tests/test_service.py b/tests/test_service.py
index 3502743..e51d094 100644
--- a/tests/test_service.py
+++ b/tests/test_service.py
@@ -545,6 +545,19 @@ def test_download_file():
b = S3Bucket("bucket-no-existo")
s.download_file(os.path.join(dest_folder, ''), b, True, o)
+def test_is_safe_file_to_download():
+ test_setup_new()
+ s = S3Service()
+
+ # Check a good file name
+ assert s.is_safe_file_to_download("file.txt", "./bucket_dir/") == True
+ assert s.is_safe_file_to_download("file.txt", "./bucket_dir") == True
+
+ # Check file with relative name
+ assert s.is_safe_file_to_download("../file.txt", "./buckets/") == False
+ assert s.is_safe_file_to_download("../", "./buckets/") == False
+ assert s.is_safe_file_to_download("/file.txt", "./buckets/") == False
+
def test_validate_endpoint_url_nonaws():
disable_warnings()
Resulting package
The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:
apt install -t kali-experimental s3scanner