Codebase list python-simplekv / 41eb04bb-bab7-4ae2-bebc-d819284c59a6/upstream
Import upstream version 0.14.1 Kali Janitor 3 years ago
30 changed file(s) with 892 addition(s) and 452 deletion(s). Raw diff Collapse all Expand all
9696
9797 # PyCharm
9898 .idea/
99
100 azure_credentials.ini
99 export MINIO_SECRET_KEY=miniostorage
1010
1111 mkdir -p ~/s3
12 ~/minio version
12 ~/minio --version
1313 ~/minio server ~/s3 &
55 - python: 2.7
66 env:
77 - TOXENV=py27
8 - python: 3.4
9 env:
10 - TOXENV=py34
118 - python: 3.5
129 env:
1310 - TOXENV=py35
1411 - python: 3.6
1512 env:
1613 - TOXENV=py36
14 - python: 3.7
15 env:
16 - TOXENV=py37
17 - python: 3.8
18 env:
19 - TOXENV=py38
20 - python: 3.6
21 env:
22 - TOXENV=py36-old-azure-storage-blob
1723
1824 services:
25 - docker
1926 - redis-server
2027 - mongodb
28 - postgresql
29 - mysql
2130
2231 install: pip install tox coveralls
2332
2433 before_script:
2534 - bash .travis/start_minio.sh
35 - docker run -p 10000:10000 mcr.microsoft.com/azure-storage/azurite azurite-blob --blobHost 0.0.0.0 &
2636 - psql -c 'create database simplekv_test;' -U postgres
2737 - psql -c 'ALTER ROLE travis CONNECTION LIMIT -1;' -U postgres
2838 - mysql -e 'create database simplekv_test;'
2939 - mysql -e 'set global max_connections = 200;'
30 - '[ -z "$encrypted_dea9dfb12f4a_key" ] || openssl aes-256-cbc -K $encrypted_dea9dfb12f4a_key -iv $encrypted_dea9dfb12f4a_iv -in azure_credentials.ini.enc -out azure_credentials.ini -d'
3140
3241 script: tox
3342
0 # Specify the azure account to use in the tests.
1 # The default assumes you are running azurite (https://github.com/Azure/Azurite).
2 # To skip the azure store tests, remove the ``account_name``.
3 #
4 # Note that only the first config section is parsed.
5
6 [azure-tests]
7 account_name=devstoreaccount1
8 account_key=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==
9 # these are not needed for a real azure blob store accounts; only set them for azurite:
10 protocol=http
11 endpoint=http://127.0.0.1:10000/devstoreaccount1
+0
-2
azure_credentials.ini.enc less more
0 ÝĊr‹a°0l¢ø®§j1"Éá£5K<·¨±¸¢³1¢¼÷ÿ}5ìÞ"Š\¹Y“Wþ~gw?nÖM*Ñ7ÓÞÄô…El0—’YU‡¶Ë”SæŸ|–€%iŽë(û»Ï|_áÁéñŒ†ÔØåI<F}ó„x
1 ­sãžÛÃ<µ„;.!K‰žóÂ{lX~”‹L»v‹-Ž¶)>uØAâtg
33 Simplekv supports storing data in `Microsoft Azure Block Blob Storage <https://azure.microsoft.com/en-us/services/storage/blobs/>`_.
44
55 The backend uses the `azure-storage-blob <https://github.com/Azure/azure-storage-python/tree/master/azure-storage-blob>`_
6 python distribution to access the azure blob storage. Note that `azure-storage-blob` is not
7 a dependency for simplekv. You need to install it manually, otherwise you will see an :exc:`~exceptions.ImportError`.
6 python distribution to access the azure blob storage and currently supports versions 2.x and 12.x.
7
8 Note that ``azure-storage-blob`` is not a dependency for simplekv. You need to install it
9 manually, otherwise you will see an :exc:`~exceptions.ImportError`.
810
911 Here is a short example:
1012
2426 print store.get(u'some-key')
2527
2628
27 Unit testing
28 ============
29 Testing
30 =======
2931
30 The unit-tests for the azure backend are either run by travis on the main repo, or if you provide
31 your own credentials via a `azure_credentials.ini`` file in the root folder of the project.
32 An example looks like this:
32 The tests for the azure backend either
3333
34 ::
34 * use a real azure blob store account or
35 * use the `Azurite <https://github.com/Azure/Azurite>`_ blob storage emulator
3536
36 [my-azure-test-account]
37 account_name=my_account_name
38 account_key=AZURE_TEST_KEY
37 The travis tests use the second method.
38
39 To test with a real blob store account, edit the file ``azure_credentials.ini``
40 s.t. the first config section contains the actual account_name and account_key
41 of your test account.
42
43 To test against a locally running azurite instance make sure to start azurite::
44
45 docker run -p 10000:10000 mcr.microsoft.com/azure-storage/azurite azurite-blob --blobHost 0.0.0.0 &
46
47 before running the tests.
48
49 To skip the tests of the azure backend, comment out the ``account_name`` in the ``azure_credentials.ini`` file.
3950
4051 .. autoclass:: simplekv.net.azurestore.AzureBlockBlobStore
00 Changelog
11 *********
2
3 0.14.1
4 ======
5
6 * Fix support for ``key in store`` for azure with ``azure-storage-blob`` 12
7
8 0.14.0
9 ======
10
11 * Add support for ``azure-storage-blob`` version 12. (``azure-storage-blob`` version 2 is still supported.)
12
13 0.13.1
14 ======
15
16 * Add the optional parameters of the Azure API max_block_size and max_single_put_size to the AzureBlockBlobStore.
217
318 0.13.0
419 ======
5858 # built documents.
5959 #
6060 # The short X.Y version.
61 version = '0.13.0'
61 version = '0.14.1'
6262 # The full version, including alpha/beta/rc tags.
63 release = '0.13.0'
63 release = '0.14.1'
6464
6565 # The language for content autogenerated by Sphinx. Refer to documentation
6666 # for a list of supported languages.
1010
1111
1212 setup(name='simplekv',
13 version='0.13.0',
13 version='0.14.1',
1414 description=('A key-value storage for binary data, support many '
1515 'backends.'),
1616 long_description=read('README.rst'),
44 from io import BytesIO
55 from ._compat import key_type
66
7 __version__ = '0.13.0'
7 __version__ = '0.14.1'
88
99 VALID_NON_NUM = r"""\`\!"#$%&'()+,-.<=>?@[]^_{}~"""
1010 VALID_KEY_REGEXP = "^[%s0-9a-zA-Z]+$" % re.escape(VALID_NON_NUM)
111111
112112 def copy(self, source, dest):
113113 """Implementation of :meth:`~simplekv.CopyMixin.copy`.
114
114
115115 Copies the data in the backing store and removes the destination key from the cache,
116116 in case it was already populated.
117117 Does not work when the backing store does not implement copy.
66
77 VALID_NON_NUM_EXTENDED = VALID_NON_NUM + r"/ "
88 VALID_KEY_REGEXP_EXTENDED = "^[%s0-9a-zA-Z]+$" % re.escape(VALID_NON_NUM_EXTENDED)
9 """This regular expression tests if a key is valid when the extended keyspace mixin is used. Allowed are all
10 alphanumeric characters, as well as ``!"`#$%&'()+,-.<=>?@[]^_{}~/``. and spaces"""
9 """This regular expression tests if a key is valid when the extended keyspace mixin is used.
10 Allowed are all alphanumeric characters, as well as ``!"`#$%&'()+,-.<=>?@[]^_{}~/``. and spaces"""
1111 VALID_KEY_RE_EXTENDED = re.compile(VALID_KEY_REGEXP_EXTENDED)
1212 """A compiled version of :data:`~simplekv.VALID_KEY_REGEXP_EXTENDED`."""
1313
2020 '(too small)')
2121
2222 def read(self, n=None):
23 if '' == self.buffer or 0 == n:
24 return ''
25
26 new_read = self.source.read(n) if None != n else self.source.read()
27 finished = (None == n or len(new_read) != n)
23 if b'' == self.buffer or 0 == n:
24 return b''
25
26 new_read = self.source.read(n) if n is not None else self.source.read()
27 finished = (n is None or len(new_read) != n)
2828 self.buffer += new_read
2929
30 if None != n:
30 if n is not None:
3131 offset = min(n, len(self.buffer) - self.hm.digest_size)
3232 else:
3333 offset = len(self.buffer) - self.hm.digest_size
9393 self.__hashfunc = hashfunc
9494 self.__secret_key = bytes(secret_key)
9595
96 @property
97 def hmac_digestsize(self):
98 # returns, in bytes, the size of the digest
99 return self.hmac_mixin_hashfunc().digestsize
100
10196 def __new_hmac(self, key, msg=None):
10297 if not msg:
10398 msg = b''
129124 if isinstance(file, str):
130125 try:
131126 f = open(file, 'wb')
132 except OSError as e:
127 except (OSError, IOError) as e:
133128 raise IOError('Error opening %s for writing: %r' % (
134129 file, e
135130 ))
1212 def __init__(self, bind, metadata, tablename):
1313 self.bind = bind
1414
15 self.table = Table(tablename, metadata,
15 self.table = Table(
16 tablename, metadata,
1617 # 250 characters is the maximum key length that we guarantee can be
1718 # handled by any kind of backend
1819 Column('key', String(250), primary_key=True),
3132
3233 def _get(self, key):
3334 rv = self.bind.execute(
34 select([self.table.c.value], self.table.c.key == key).limit(1)
35 ).scalar()
35 select([self.table.c.value], self.table.c.key == key).limit(1)
36 ).scalar()
3637
3738 if not rv:
3839 raise KeyError(key)
2424 return obj.v
2525
2626 def _has_key(self, key):
27 return None != self.obj_class.get_by_id(id=key)
27 return self.obj_class.get_by_id(id=key) is not None
2828
2929 def iter_keys(self, prefix=u""):
3030 qry_iter = self.obj_class.query().iter(keys_only=True)
31 return filter(lambda k: k.string_id().startswith(prefix), (k.string_id() for k in qry_iter))
31 return filter(lambda k: k.string_id().startswith(prefix),
32 (k.string_id() for k in qry_iter)
33 )
3234
3335 def _open(self, key):
3436 return StringIO(self._get(key))
8080 def _key_components(self, key):
8181 return [c.encode('ascii') for c in key.split('/')]
8282
83
8483 @property
8584 def _refname(self):
8685 return b'refs/heads/' + self.branch
44
55 from .. import KeyValueStore, TimeToLiveMixin, NOT_SET, FOREVER
66 import re
7
78
89 class RedisStore(TimeToLiveMixin, KeyValueStore):
910 """Uses a redis-database as the backend.
0 """
1 Internal utilities for aztorestore_old and azurestore_new
2 """
3
4 import hashlib
5 import base64
6
7 LAZY_PROPERTY_ATTR_PREFIX = "_lazy_"
8
9
10 def lazy_property(fn):
11 """Decorator that makes a property lazy-evaluated.
12
13 On first access, lazy properties are computed and saved
14 as instance attribute with the name `'_lazy_' + method_name`
15 Any subsequent property access then returns the cached value."""
16 attr_name = LAZY_PROPERTY_ATTR_PREFIX + fn.__name__
17
18 @property
19 def _lazy_property(self):
20 if not hasattr(self, attr_name):
21 setattr(self, attr_name, fn(self))
22 return getattr(self, attr_name)
23
24 return _lazy_property
25
26
27 def _file_md5(file_, b64encode=True):
28 """
29 Compute the md5 digest of a file in base64 encoding.
30
31 For ``b64encode``, returns the base64 encoded string; otherwise, returns the
32 bytes directly.
33 """
34 md5 = hashlib.md5()
35 chunk_size = 128 * md5.block_size
36 for chunk in iter(lambda: file_.read(chunk_size), b""):
37 md5.update(chunk)
38 file_.seek(0)
39 byte_digest = md5.digest()
40 if b64encode:
41 return base64.b64encode(byte_digest).decode()
42 else:
43 return byte_digest
44
45
46 def _filename_md5(filename, b64encode=True):
47 """
48 Compute the md5 digest of a file in base64 encoding.
49 """
50 with open(filename, "rb") as f:
51 return _file_md5(f, b64encode=b64encode)
52
53
54 def _byte_buffer_md5(buffer_, b64encode=True):
55 """
56 Computes the md5 digest of a byte buffer in base64 encoding.
57 """
58 md5 = hashlib.md5(buffer_)
59 byte_digest = md5.digest()
60 if b64encode:
61 return base64.b64encode(byte_digest).decode()
62 else:
63 return byte_digest
0 """
1 This implements the AzureBlockBlobStore for `azure-storage-blob~=12`
2 """
3 import io
4 from contextlib import contextmanager
5
6 from .._compat import PY2
7 from .. import KeyValueStore
8
9 from ._azurestore_common import (
10 _byte_buffer_md5,
11 _file_md5,
12 lazy_property,
13 LAZY_PROPERTY_ATTR_PREFIX,
14 )
15
16
17 if PY2:
18
19 def _blobname_to_texttype(name):
20 """
21 Convert the str `name` to unicode
22 """
23 return name.decode('utf-8')
24 else:
25
26 def _blobname_to_texttype(name):
27 return name
28
29
30 @contextmanager
31 def map_azure_exceptions(key=None, error_codes_pass=()):
32 """Map Azure-specific exceptions to the simplekv-API."""
33 from azure.core.exceptions import AzureError
34
35 try:
36 yield
37 except AzureError as ex:
38 error_code = getattr(ex, "error_code", None)
39 if error_code is not None and error_code in error_codes_pass:
40 return
41 if error_code == "BlobNotFound":
42 raise KeyError(key)
43 raise IOError(str(ex))
44
45
46 class AzureBlockBlobStore(KeyValueStore):
47 def __init__(
48 self,
49 conn_string=None,
50 container=None,
51 public=False,
52 create_if_missing=True,
53 max_connections=2,
54 max_block_size=None,
55 max_single_put_size=None,
56 checksum=False,
57 socket_timeout=None,
58 ):
59 """
60 Note that socket_timeout is unused;
61 it only exist for backward compatibility.
62 """
63 self.conn_string = conn_string
64 self.container = container
65 self.public = public
66 self.create_if_missing = create_if_missing
67 self.max_connections = max_connections
68 self.max_block_size = max_block_size
69 self.max_single_put_size = max_single_put_size
70 self.checksum = checksum
71
72 # Using @lazy_property will (re-)create block_blob_service instance needed.
73 # Together with the __getstate__ implementation below, this allows
74 # AzureBlockBlobStore to be pickled, even if
75 # azure.storage.blob.BlockBlobService does not support pickling.
76 @lazy_property
77 def blob_container_client(self):
78 from azure.storage.blob import BlobServiceClient
79
80 kwargs = {}
81 if self.max_single_put_size:
82 kwargs["max_single_put_size"] = self.max_single_put_size
83
84 if self.max_block_size:
85 kwargs["max_block_size"] = self.max_block_size
86
87 service_client = BlobServiceClient.from_connection_string(
88 self.conn_string, **kwargs
89 )
90 container_client = service_client.get_container_client(self.container)
91 if self.create_if_missing:
92 with map_azure_exceptions(error_codes_pass=("ContainerAlreadyExists")):
93 container_client.create_container(
94 public_access="container" if self.public else None
95 )
96 return container_client
97
98 def _delete(self, key):
99 with map_azure_exceptions(key, error_codes_pass=("BlobNotFound",)):
100 self.blob_container_client.delete_blob(key)
101
102 def _get(self, key):
103 with map_azure_exceptions(key):
104 blob_client = self.blob_container_client.get_blob_client(key)
105 downloader = blob_client.download_blob(max_concurrency=self.max_connections)
106 return downloader.readall()
107
108 def _has_key(self, key):
109 blob_client = self.blob_container_client.get_blob_client(key)
110 with map_azure_exceptions(key, ("BlobNotFound",)):
111 blob_client.get_blob_properties()
112 return True
113 return False
114
115 def iter_keys(self, prefix=None):
116 with map_azure_exceptions():
117 blobs = self.blob_container_client.list_blobs(name_starts_with=prefix)
118
119 def gen_names():
120 with map_azure_exceptions():
121 for blob in blobs:
122 yield _blobname_to_texttype(blob.name)
123 return gen_names()
124
125 def iter_prefixes(self, delimiter, prefix=u""):
126 return (
127 _blobname_to_texttype(blob_prefix.name)
128 for blob_prefix in self.blob_container_client.walk_blobs(
129 name_starts_with=prefix, delimiter=delimiter
130 )
131 )
132
133 def _open(self, key):
134 with map_azure_exceptions(key):
135 blob_client = self.blob_container_client.get_blob_client(key)
136 return IOInterface(blob_client, self.max_connections)
137
138 def _put(self, key, data):
139 from azure.storage.blob import ContentSettings
140
141 if self.checksum:
142 content_settings = ContentSettings(
143 content_md5=_byte_buffer_md5(data, b64encode=False)
144 )
145 else:
146 content_settings = ContentSettings()
147
148 with map_azure_exceptions(key):
149 blob_client = self.blob_container_client.get_blob_client(key)
150
151 blob_client.upload_blob(
152 data,
153 overwrite=True,
154 content_settings=content_settings,
155 max_concurrency=self.max_connections,
156 )
157 return key
158
159 def _put_file(self, key, file):
160 from azure.storage.blob import ContentSettings
161
162 if self.checksum:
163 content_settings = ContentSettings(content_md5=_file_md5(file, b64encode=False))
164 else:
165 content_settings = ContentSettings()
166
167 with map_azure_exceptions(key):
168 blob_client = self.blob_container_client.get_blob_client(key)
169
170 blob_client.upload_blob(
171 file,
172 overwrite=True,
173 content_settings=content_settings,
174 max_concurrency=self.max_connections,
175 )
176 return key
177
178 def _get_file(self, key, file):
179 with map_azure_exceptions(key):
180 blob_client = self.blob_container_client.get_blob_client(key)
181 downloader = blob_client.download_blob(max_concurrency=self.max_connections)
182 downloader.readinto(file)
183
184 def __getstate__(self):
185 # keep all of __dict__, except lazy properties:
186 return {
187 key: value
188 for key, value in self.__dict__.items()
189 if not key.startswith(LAZY_PROPERTY_ATTR_PREFIX)
190 }
191
192
193 class IOInterface(io.BufferedIOBase):
194 """
195 Class which provides a file-like interface to selectively read from a blob in the blob store.
196 """
197
198 def __init__(self, blob_client, max_connections):
199 super(IOInterface, self).__init__()
200 self.blob_client = blob_client
201 self.max_connections = max_connections
202
203 blob_props = self.blob_client.get_blob_properties()
204 self.size = blob_props.size
205 self.pos = 0
206
207 def tell(self):
208 """Returns he current offset as int. Always >= 0."""
209 if self.closed:
210 raise ValueError("I/O operation on closed file")
211 return self.pos
212
213 def read(self, size=-1):
214 """Returns 'size' amount of bytes or less if there is no more data.
215 If no size is given all data is returned. size can be >= 0."""
216 if self.closed:
217 raise ValueError("I/O operation on closed file")
218 max_size = max(0, self.size - self.pos)
219 if size < 0 or size > max_size:
220 size = max_size
221 if size == 0:
222 return b""
223 downloader = self.blob_client.download_blob(
224 self.pos, size, max_concurrency=self.max_connections
225 )
226 b = downloader.readall()
227 self.pos += len(b)
228 return b
229
230 def seek(self, offset, whence=0):
231 """Move to a new offset either relative or absolute. whence=0 is
232 absolute, whence=1 is relative, whence=2 is relative to the end.
233
234 Any relative or absolute seek operation which would result in a
235 negative position is undefined and that case can be ignored
236 in the implementation.
237
238 Any seek operation which moves the position after the stream
239 should succeed. tell() should report that position and read()
240 should return an empty bytes object."""
241 if self.closed:
242 raise ValueError("I/O operation on closed file")
243 if whence == 0:
244 if offset < 0:
245 raise IOError("seek would move position outside the file")
246 self.pos = offset
247 elif whence == 1:
248 if self.pos + offset < 0:
249 raise IOError("seek would move position outside the file")
250 self.pos += offset
251 elif whence == 2:
252 if self.size + offset < 0:
253 raise IOError("seek would move position outside the file")
254 self.pos = self.size + offset
255 return self.pos
256
257 def seekable(self):
258 return True
259
260 def readable(self):
261 return True
0 """
1 This implements the AzureBlockBlobStore for `azure-storage-blob<12`
2 """
3 import io
4 from contextlib import contextmanager
5
6 from ._azurestore_common import _byte_buffer_md5, _file_md5, _filename_md5,\
7 lazy_property, LAZY_PROPERTY_ATTR_PREFIX
8
9 from .._compat import binary_type
10 from .. import KeyValueStore
11
12
13 @contextmanager
14 def map_azure_exceptions(key=None, exc_pass=()):
15 """Map Azure-specific exceptions to the simplekv-API."""
16 from azure.common import AzureMissingResourceHttpError, AzureHttpError,\
17 AzureException
18 try:
19 yield
20 except AzureMissingResourceHttpError as ex:
21 if ex.__class__.__name__ not in exc_pass:
22 s = str(ex)
23 if s.startswith(u"The specified container does not exist."):
24 raise IOError(s)
25 raise KeyError(key)
26 except AzureHttpError as ex:
27 if ex.__class__.__name__ not in exc_pass:
28 raise IOError(str(ex))
29 except AzureException as ex:
30 if ex.__class__.__name__ not in exc_pass:
31 raise IOError(str(ex))
32
33
34 class AzureBlockBlobStore(KeyValueStore):
35 def __init__(self, conn_string=None, container=None, public=False,
36 create_if_missing=True, max_connections=2,
37 max_block_size=None, max_single_put_size=None,
38 checksum=False,
39 socket_timeout=None):
40 self.conn_string = conn_string
41 self.container = container
42 self.public = public
43 self.create_if_missing = create_if_missing
44 self.max_connections = max_connections
45 self.max_single_put_size = max_single_put_size
46 self.max_block_size = max_block_size
47 self.checksum = checksum
48 self.socket_timeout = socket_timeout
49
50 # Using @lazy_property will (re-)create block_blob_service instance needed.
51 # Together with the __getstate__ implementation below, this allows
52 # AzureBlockBlobStore to be pickled, even if
53 # azure.storage.blob.BlockBlobService does not support pickling.
54 @lazy_property
55 def block_blob_service(self):
56 from azure.storage.blob import BlockBlobService, PublicAccess
57 block_blob_service = BlockBlobService(
58 connection_string=self.conn_string,
59 socket_timeout=self.socket_timeout,
60 )
61 if self.max_block_size is not None:
62 block_blob_service.MAX_BLOCK_SIZE = self.max_block_size
63 if self.max_block_size is not None:
64 block_blob_service.MAX_SINGLE_PUT_SIZE = self.max_single_put_size
65
66 if self.create_if_missing:
67 block_blob_service.create_container(
68 self.container,
69 public_access=PublicAccess.Container if self.public else None
70 )
71 return block_blob_service
72
73 def _delete(self, key):
74 with map_azure_exceptions(key=key,
75 exc_pass=['AzureMissingResourceHttpError']):
76 self.block_blob_service.delete_blob(self.container, key)
77
78 def _get(self, key):
79 with map_azure_exceptions(key=key):
80 return self.block_blob_service.get_blob_to_bytes(
81 container_name=self.container,
82 blob_name=key,
83 max_connections=self.max_connections,
84 ).content
85
86 def _has_key(self, key):
87 with map_azure_exceptions(key=key):
88 return self.block_blob_service.exists(self.container, key)
89
90 def iter_keys(self, prefix=u""):
91 if prefix == "":
92 prefix = None
93 with map_azure_exceptions():
94 blobs = self.block_blob_service.list_blob_names(self.container, prefix=prefix)
95 return (blob.decode('utf-8') if isinstance(blob, binary_type)
96 else blob for blob in blobs)
97
98 def iter_prefixes(self, delimiter, prefix=u""):
99 if prefix == "":
100 prefix = None
101 with map_azure_exceptions():
102 blobs = self.block_blob_service.list_blob_names(
103 self.container, prefix=prefix, delimiter=delimiter
104 )
105 return (blob.decode('utf-8') if isinstance(blob, binary_type)
106 else blob for blob in blobs)
107
108 def _open(self, key):
109 with map_azure_exceptions(key=key):
110 return IOInterface(self.block_blob_service, self.container, key, self.max_connections)
111
112 def _put(self, key, data):
113 from azure.storage.blob.models import ContentSettings
114
115 if self.checksum:
116 content_settings = ContentSettings(content_md5=_byte_buffer_md5(data))
117 else:
118 content_settings = ContentSettings()
119
120 with map_azure_exceptions(key=key):
121 self.block_blob_service.create_blob_from_bytes(
122 container_name=self.container,
123 blob_name=key,
124 blob=data,
125 max_connections=self.max_connections,
126 content_settings=content_settings,
127 )
128 return key
129
130 def _put_file(self, key, file):
131 from azure.storage.blob.models import ContentSettings
132
133 if self.checksum:
134 content_settings = ContentSettings(content_md5=_file_md5(file))
135 else:
136 content_settings = ContentSettings()
137
138 with map_azure_exceptions(key=key):
139 self.block_blob_service.create_blob_from_stream(
140 container_name=self.container,
141 blob_name=key,
142 stream=file,
143 max_connections=self.max_connections,
144 content_settings=content_settings,
145 )
146 return key
147
148 def _get_file(self, key, file):
149 with map_azure_exceptions(key=key):
150 self.block_blob_service.get_blob_to_stream(
151 container_name=self.container,
152 blob_name=key,
153 stream=file,
154 max_connections=self.max_connections,
155 )
156
157 def _get_filename(self, key, filename):
158 with map_azure_exceptions(key=key):
159 self.block_blob_service.get_blob_to_path(
160 container_name=self.container,
161 blob_name=key,
162 file_path=filename,
163 max_connections=self.max_connections,
164 )
165
166 def _put_filename(self, key, filename):
167 from azure.storage.blob.models import ContentSettings
168
169 if self.checksum:
170 content_settings = ContentSettings(content_md5=_filename_md5(filename))
171 else:
172 content_settings = ContentSettings()
173
174 with map_azure_exceptions(key=key):
175 self.block_blob_service.create_blob_from_path(
176 container_name=self.container,
177 blob_name=key,
178 file_path=filename,
179 max_connections=self.max_connections,
180 content_settings=content_settings,
181 )
182 return key
183
184 def __getstate__(self):
185 # keep all of __dict__, except lazy properties:
186 return {
187 key: value
188 for key, value in self.__dict__.items()
189 if not key.startswith(LAZY_PROPERTY_ATTR_PREFIX)
190 }
191
192
193 class IOInterface(io.BufferedIOBase):
194 """
195 Class which provides a file-like interface to selectively read from a blob in the blob store.
196 """
197
198 def __init__(self, block_blob_service, container_name, key, max_connections):
199 super(IOInterface, self).__init__()
200 self.block_blob_service = block_blob_service
201 self.container_name = container_name
202 self.key = key
203 self.max_connections = max_connections
204
205 blob = self.block_blob_service.get_blob_properties(container_name, key)
206 self.size = blob.properties.content_length
207 self.pos = 0
208
209 def tell(self):
210 """Returns he current offset as int. Always >= 0."""
211 if self.closed:
212 raise ValueError("I/O operation on closed file")
213 return self.pos
214
215 def read(self, size=-1):
216 """Returns 'size' amount of bytes or less if there is no more data.
217 If no size is given all data is returned. size can be >= 0."""
218 if self.closed:
219 raise ValueError("I/O operation on closed file")
220 with map_azure_exceptions(key=self.key):
221 if size < 0:
222 size = self.size - self.pos
223
224 end = min(self.pos + size - 1, self.size - 1)
225 if self.pos > end:
226 return b''
227 b = self.block_blob_service.get_blob_to_bytes(
228 container_name=self.container_name,
229 blob_name=self.key,
230 start_range=self.pos,
231 end_range=end, # end_range is inclusive
232 max_connections=self.max_connections,
233 )
234 self.pos += len(b.content)
235 return b.content
236
237 def seek(self, offset, whence=0):
238 """Move to a new offset either relative or absolute. whence=0 is
239 absolute, whence=1 is relative, whence=2 is relative to the end.
240
241 Any relative or absolute seek operation which would result in a
242 negative position is undefined and that case can be ignored
243 in the implementation.
244
245 Any seek operation which moves the position after the stream
246 should succeed. tell() should report that position and read()
247 should return an empty bytes object."""
248 if self.closed:
249 raise ValueError("I/O operation on closed file")
250 if whence == 0:
251 if offset < 0:
252 raise IOError('seek would move position outside the file')
253 self.pos = offset
254 elif whence == 1:
255 if self.pos + offset < 0:
256 raise IOError('seek would move position outside the file')
257 self.pos += offset
258 elif whence == 2:
259 if self.size + offset < 0:
260 raise IOError('seek would move position outside the file')
261 self.pos = self.size + offset
262 return self.pos
263
264 def seekable(self):
265 return True
266
267 def readable(self):
268 return True
0 #!/usr/bin/env python
10 # coding=utf8
21
3 import base64
4 import hashlib
5 import io
6 from contextlib import contextmanager
2 try:
3 from azure.storage.blob import BlockBlobService # noqa: F401
4 from ._azurestore_old import AzureBlockBlobStore
5 except ImportError:
6 from ._azurestore_new import AzureBlockBlobStore
77
8 from .._compat import binary_type
9 from .. import KeyValueStore
10
11 LAZY_PROPERTY_ATTR_PREFIX = '_lazy_'
12
13
14 def lazy_property(fn):
15 """Decorator that makes a property lazy-evaluated.
16
17 On first access, lazy properties are computed and saved
18 as instance attribute with the name `'_lazy_' + method_name`
19 Any subsequent property access then returns the cached value."""
20 attr_name = LAZY_PROPERTY_ATTR_PREFIX + fn.__name__
21
22 @property
23 def _lazy_property(self):
24 if not hasattr(self, attr_name):
25 setattr(self, attr_name, fn(self))
26 return getattr(self, attr_name)
27 return _lazy_property
28
29
30 def _file_md5(file_):
31 """
32 Compute the md5 digest of a file in base64 encoding.
33 """
34 md5 = hashlib.md5()
35 chunk_size = 128 * md5.block_size
36 for chunk in iter(lambda: file_.read(chunk_size), b''):
37 md5.update(chunk)
38 file_.seek(0)
39 byte_digest = md5.digest()
40 return base64.b64encode(byte_digest).decode()
41
42
43 def _filename_md5(filename):
44 """
45 Compute the md5 digest of a file in base64 encoding.
46 """
47 with open(filename, 'rb') as f:
48 return _file_md5(f)
49
50
51 def _byte_buffer_md5(buffer_):
52 """
53 Computes the md5 digest of a byte buffer in base64 encoding.
54 """
55 md5 = hashlib.md5(buffer_)
56 byte_digest = md5.digest()
57 return base64.b64encode(byte_digest).decode()
58
59
60 @contextmanager
61 def map_azure_exceptions(key=None, exc_pass=()):
62 """Map Azure-specific exceptions to the simplekv-API."""
63 from azure.common import AzureMissingResourceHttpError, AzureHttpError,\
64 AzureException
65 try:
66 yield
67 except AzureMissingResourceHttpError as ex:
68 if ex.__class__.__name__ not in exc_pass:
69 s = str(ex)
70 if s.startswith(u"The specified container does not exist."):
71 raise IOError(s)
72 raise KeyError(key)
73 except AzureHttpError as ex:
74 if ex.__class__.__name__ not in exc_pass:
75 raise IOError(str(ex))
76 except AzureException as ex:
77 if ex.__class__.__name__ not in exc_pass:
78 raise IOError(str(ex))
79
80
81 class AzureBlockBlobStore(KeyValueStore):
82 def __init__(self, conn_string=None, container=None, public=False,
83 create_if_missing=True, max_connections=2, checksum=False,
84 socket_timeout=None):
85 self.conn_string = conn_string
86 self.container = container
87 self.public = public
88 self.create_if_missing = create_if_missing
89 self.max_connections = max_connections
90 self.checksum = checksum
91 self.socket_timeout = socket_timeout
92
93 # Using @lazy_property will (re-)create block_blob_service instance needed.
94 # Together with the __getstate__ implementation below, this allows
95 # AzureBlockBlobStore to be pickled, even if
96 # azure.storage.blob.BlockBlobService does not support pickling.
97 @lazy_property
98 def block_blob_service(self):
99 from azure.storage.blob import BlockBlobService, PublicAccess
100 block_blob_service = BlockBlobService(
101 connection_string=self.conn_string,
102 socket_timeout=self.socket_timeout,
103 )
104 if self.create_if_missing:
105 block_blob_service.create_container(
106 self.container,
107 public_access=PublicAccess.Container if self.public else None
108 )
109 return block_blob_service
110
111 def _delete(self, key):
112 with map_azure_exceptions(key=key,
113 exc_pass=['AzureMissingResourceHttpError']):
114 self.block_blob_service.delete_blob(self.container, key)
115
116 def _get(self, key):
117 with map_azure_exceptions(key=key):
118 return self.block_blob_service.get_blob_to_bytes(
119 container_name=self.container,
120 blob_name=key,
121 max_connections=self.max_connections,
122 ).content
123
124 def _has_key(self, key):
125 with map_azure_exceptions(key=key):
126 return self.block_blob_service.exists(self.container, key)
127
128 def iter_keys(self, prefix=u""):
129 if prefix == "":
130 prefix = None
131 with map_azure_exceptions():
132 blobs = self.block_blob_service.list_blob_names(self.container, prefix=prefix)
133 return (blob.decode('utf-8') if isinstance(blob, binary_type)
134 else blob for blob in blobs)
135
136 def iter_prefixes(self, delimiter, prefix=u""):
137 if prefix == "":
138 prefix = None
139 with map_azure_exceptions():
140 blobs = self.block_blob_service.list_blob_names(self.container, prefix=prefix, delimiter=delimiter)
141 return (blob.decode('utf-8') if isinstance(blob, binary_type)
142 else blob for blob in blobs)
143
144 def _open(self, key):
145 with map_azure_exceptions(key=key):
146 return IOInterface(self.block_blob_service, self.container, key, self.max_connections)
147
148 def _put(self, key, data):
149 from azure.storage.blob.models import ContentSettings
150
151 if self.checksum:
152 content_settings = ContentSettings(content_md5=_byte_buffer_md5(data))
153 else:
154 content_settings = ContentSettings()
155
156 with map_azure_exceptions(key=key):
157 self.block_blob_service.create_blob_from_bytes(
158 container_name=self.container,
159 blob_name=key,
160 blob=data,
161 max_connections=self.max_connections,
162 content_settings=content_settings,
163 )
164 return key
165
166 def _put_file(self, key, file):
167 from azure.storage.blob.models import ContentSettings
168
169 if self.checksum:
170 content_settings = ContentSettings(content_md5=_file_md5(file))
171 else:
172 content_settings = ContentSettings()
173
174 with map_azure_exceptions(key=key):
175 self.block_blob_service.create_blob_from_stream(
176 container_name=self.container,
177 blob_name=key,
178 stream=file,
179 max_connections=self.max_connections,
180 content_settings=content_settings,
181 )
182 return key
183
184 def _get_file(self, key, file):
185 with map_azure_exceptions(key=key):
186 self.block_blob_service.get_blob_to_stream(
187 container_name=self.container,
188 blob_name=key,
189 stream=file,
190 max_connections=self.max_connections,
191 )
192
193 def _get_filename(self, key, filename):
194 with map_azure_exceptions(key=key):
195 self.block_blob_service.get_blob_to_path(
196 container_name=self.container,
197 blob_name=key,
198 file_path=filename,
199 max_connections=self.max_connections,
200 )
201
202 def _put_filename(self, key, filename):
203 from azure.storage.blob.models import ContentSettings
204
205 if self.checksum:
206 content_settings = ContentSettings(content_md5=_filename_md5(filename))
207 else:
208 content_settings = ContentSettings()
209
210 with map_azure_exceptions(key=key):
211 self.block_blob_service.create_blob_from_path(
212 container_name=self.container,
213 blob_name=key,
214 file_path=filename,
215 max_connections=self.max_connections,
216 content_settings=content_settings,
217 )
218 return key
219
220 def __getstate__(self):
221 # keep all of __dict__, except lazy properties:
222 return {
223 key: value
224 for key, value in self.__dict__.items()
225 if not key.startswith(LAZY_PROPERTY_ATTR_PREFIX)
226 }
227
228
229 class IOInterface(io.BufferedIOBase):
230 """
231 Class which provides a file-like interface to selectively read from a blob in the blob store.
232 """
233 def __init__(self, block_blob_service, container_name, key, max_connections):
234 super(IOInterface, self).__init__()
235 self.block_blob_service = block_blob_service
236 self.container_name = container_name
237 self.key = key
238 self.max_connections = max_connections
239
240 blob = self.block_blob_service.get_blob_properties(container_name, key)
241 self.size = blob.properties.content_length
242 self.pos = 0
243
244 def tell(self):
245 """Returns he current offset as int. Always >= 0."""
246 if self.closed:
247 raise ValueError("I/O operation on closed file")
248 return self.pos
249
250 def read(self, size=-1):
251 """Returns 'size' amount of bytes or less if there is no more data.
252 If no size is given all data is returned. size can be >= 0."""
253 if self.closed:
254 raise ValueError("I/O operation on closed file")
255 with map_azure_exceptions(key=self.key):
256 if size < 0:
257 size = self.size - self.pos
258
259 end = min(self.pos + size - 1, self.size - 1)
260 if self.pos > end:
261 return b''
262 b = self.block_blob_service.get_blob_to_bytes(
263 container_name=self.container_name,
264 blob_name=self.key,
265 start_range=self.pos,
266 end_range=end, # end_range is inclusive
267 max_connections=self.max_connections,
268 )
269 self.pos += len(b.content)
270 return b.content
271
272 def seek(self, offset, whence=0):
273 """Move to a new offset either relative or absolute. whence=0 is
274 absolute, whence=1 is relative, whence=2 is relative to the end.
275
276 Any relative or absolute seek operation which would result in a
277 negative position is undefined and that case can be ignored
278 in the implementation.
279
280 Any seek operation which moves the position after the stream
281 should succeed. tell() should report that position and read()
282 should return an empty bytes object."""
283 if self.closed:
284 raise ValueError("I/O operation on closed file")
285 if whence == 0:
286 if offset < 0:
287 raise IOError('seek would move position outside the file')
288 self.pos = offset
289 elif whence == 1:
290 if self.pos + offset < 0:
291 raise IOError('seek would move position outside the file')
292 self.pos += offset
293 elif whence == 2:
294 if self.size + offset < 0:
295 raise IOError('seek would move position outside the file')
296 self.pos = self.size + offset
297 return self.pos
298
299 def seekable(self):
300 return True
301
302 def readable(self):
303 return True
8 __all__ = ["AzureBlockBlobStore"]
111111 if not self._has_key(source):
112112 raise KeyError(source)
113113 with map_boto_exceptions(key=source):
114 self.bucket.copy_key(self.prefix + dest, self.bucket.name, self.prefix + source)
114 self.bucket.copy_key(self.prefix + dest, self.bucket.name, self.prefix + source)
115115
116116 def _put(self, key, data):
117117 k = self.__new_key(key)
22 import os
33 import time
44 import tempfile
5 from tempdir import TempDir
65
76 import pytest
87 from simplekv._compat import BytesIO, xrange, text_type
132131
133132 assert store.get(key) == value
134133
135 def test_get_into_file(self, store, key, value):
136 with TempDir() as tmpdir:
137 store.put(key, value)
138 out_filename = os.path.join(tmpdir, 'output')
139
140 store.get_file(key, out_filename)
141
142 assert open(out_filename, 'rb').read() == value
134 def test_get_into_file(self, store, key, value, tmp_path):
135 store.put(key, value)
136 out_filename = os.path.join(str(tmp_path), 'output')
137
138 store.get_file(key, out_filename)
139
140 assert open(out_filename, 'rb').read() == value
143141
144142 def test_get_into_stream(self, store, key, value):
145143 store.put(key, value)
4747 cfg_fn = 'boto_credentials.ini'
4848
4949 parser = ConfigParser({'host': 's3.amazonaws.com',
50 'is_secure': True,
51 'ordinary_calling_format': False,
52 'port': None})
50 'is_secure': 'true',
51 'ordinary_calling_format': 'false',
52 })
5353 if not parser.read(cfg_fn):
5454 pytest.skip('file {} not found'.format(cfg_fn))
5555
44 from basic_store import BasicStore, OpenSeekTellStore
55 from conftest import ExtendedKeyspaceTests
66 import pytest
7
8 pytest.importorskip('azure.storage.blob')
9
10
11 def load_azure_credentials():
12 # loaded from the same place as tox.ini. here's a sample
13 #
14 # [my-azure-storage-account]
15 # account_name=foo
16 # account_key=bar
7 from base64 import b64encode
8
9 asb = pytest.importorskip('azure.storage.blob')
10
11
12 def get_azure_conn_string():
1713 cfg_fn = 'azure_credentials.ini'
18
19 parser = ConfigParser()
14 parser = ConfigParser({
15 'protocol': 'https',
16 'endpoint': '',
17 'account_name': '',
18 })
2019 result = parser.read(cfg_fn)
2120 if not result:
2221 pytest.skip('file {} not found'.format(cfg_fn))
2322
2423 for section in parser.sections():
25 return {
26 'account_name': parser.get(section, 'account_name'),
27 'account_key': parser.get(section, 'account_key'),
28 }
29
30
31 def create_azure_conn_string(credentials):
32 account_name = credentials['account_name']
33 account_key = credentials['account_key']
34 fmt_str = 'DefaultEndpointsProtocol=https;AccountName={};AccountKey={}'
35 return fmt_str.format(account_name, account_key)
24 account_name = parser.get(section, 'account_name')
25 if not account_name:
26 pytest.skip("no 'account_name' found in file {}".format(cfg_fn))
27
28 account_key = parser.get(section, 'account_key')
29 protocol = parser.get(section, 'protocol')
30 endpoint = parser.get(section, 'endpoint')
31 conn_string = 'DefaultEndpointsProtocol={};AccountName={};AccountKey={}'.format(
32 protocol, account_name, account_key
33 )
34 if endpoint:
35 conn_string += ';BlobEndpoint={}'.format(endpoint)
36 return conn_string
37
38
39 def _delete_container(conn_string, container):
40 try:
41 # for azure-storage-blob>=12:
42 from azure.storage.blob import BlobServiceClient
43 from azure.core.exceptions import AzureError
44
45 s = BlobServiceClient.from_connection_string(conn_string)
46 try:
47 s.delete_container(container)
48 except AzureError as ex:
49 # ignore the ContainerNotFound error:
50 if ex.error_code != 'ContainerNotFound':
51 raise
52 except ImportError:
53 # for azure-storage-blob<12
54 from azure.storage.blob import BlockBlobService
55 s = BlockBlobService(connection_string=conn_string)
56 s.delete_container(container)
3657
3758
3859 class TestAzureStorage(BasicStore, OpenSeekTellStore):
3960 @pytest.fixture
4061 def store(self):
41 from azure.storage.blob import BlockBlobService
42
43 container = uuid()
44 conn_string = create_azure_conn_string(load_azure_credentials())
45 s = BlockBlobService(connection_string=conn_string)
46
62 container = str(uuid())
63 conn_string = get_azure_conn_string()
4764 yield AzureBlockBlobStore(conn_string=conn_string, container=container,
4865 public=False)
49 s.delete_container(container)
66 _delete_container(conn_string, container)
5067
5168
5269 class TestExtendedKeysAzureStorage(TestAzureStorage, ExtendedKeyspaceTests):
5370 @pytest.fixture
5471 def store(self):
72 azure_storage_blob_major_version = int(asb.__version__.split('.', 1)[0])
73 conn_string = get_azure_conn_string()
74 use_azurite = 'http://127.0.0.1:10000/devstoreaccount1' in conn_string
75 if use_azurite and azure_storage_blob_major_version < 12:
76 pytest.skip("Compatibility issues with azurite and azure-storage-blob<12")
77 container = str(uuid())
78
5579 class ExtendedKeysStore(ExtendedKeyspaceMixin, AzureBlockBlobStore):
5680 pass
57 from azure.storage.blob import BlockBlobService
58
59 container = uuid()
60 conn_string = create_azure_conn_string(load_azure_credentials())
61 s = BlockBlobService(connection_string=conn_string)
62
6381 yield ExtendedKeysStore(conn_string=conn_string,
6482 container=container, public=False)
65 s.delete_container(container)
83 _delete_container(conn_string, container)
6684
6785
6886 def test_azure_setgetstate():
69 from azure.storage.blob import BlockBlobService
70 container = uuid()
71 conn_string = create_azure_conn_string(load_azure_credentials())
72 s = BlockBlobService(connection_string=conn_string)
87 container = str(uuid())
88 conn_string = get_azure_conn_string()
7389 store = AzureBlockBlobStore(conn_string=conn_string, container=container)
7490 store.put(u'key1', b'value1')
7591
7793 store = pickle.loads(buf)
7894
7995 assert store.get(u'key1') == b'value1'
80 s.delete_container(container)
96 _delete_container(conn_string, container)
8197
8298
8399 def test_azure_store_attributes():
99115 assert abbs2.checksum is True
100116
101117
118 def test_azure_special_args():
119 # For azure-storage-blob 12,
120 # test that the special arguments `max_block_size` and
121 # `max_single_put_size` propagate to the constructed ContainerClient
122 conn_string = get_azure_conn_string()
123 MBS = 983645
124 MSP = 756235
125 abbs = AzureBlockBlobStore(
126 conn_string=conn_string,
127 container='container-unused',
128 max_block_size=MBS,
129 max_single_put_size=MSP,
130 create_if_missing=False
131 )
132 if hasattr(abbs, "blob_container_client"):
133 cfg = abbs.blob_container_client._config
134 assert cfg.max_single_put_size == MSP
135 assert cfg.max_block_size == MBS
136
137
102138 class TestAzureExceptionHandling(object):
103139 def test_missing_container(self):
104 container = uuid()
105 conn_string = create_azure_conn_string(load_azure_credentials())
140 container = str(uuid())
141 conn_string = get_azure_conn_string()
106142 store = AzureBlockBlobStore(conn_string=conn_string,
107143 container=container,
108144 create_if_missing=False)
109145 with pytest.raises(IOError) as exc:
110 store.iter_keys()
146 store.keys()
111147 assert u"The specified container does not exist." in str(exc.value)
112148
113149 def test_wrong_endpoint(self):
114 from azure.storage.common.retry import ExponentialRetry
115 container = uuid()
116 conn_string = create_azure_conn_string(load_azure_credentials())
117 conn_string += \
118 ";BlobEndpoint=https://hopenostorethere.blob.core.windows.net;"
150 container = str(uuid())
151 conn_string = get_azure_conn_string()
152 conn_settings = dict([s.split("=", 1) for s in conn_string.split(";") if s])
153 conn_settings['BlobEndpoint'] = 'https://host-does-not-exist/'
154 conn_string = ';'.join('{}={}'.format(key, value) for key, value in conn_settings.items())
119155 store = AzureBlockBlobStore(conn_string=conn_string,
120156 container=container,
121157 create_if_missing=False)
122 store.block_blob_service.retry = ExponentialRetry(max_attempts=0).retry
158 if hasattr(store, 'block_blob_service'):
159 from azure.storage.common.retry import ExponentialRetry
160 store.block_blob_service.retry = ExponentialRetry(
161 max_attempts=0
162 ).retry
163 else:
164 store.blob_container_client._config.retry_policy.total_retries = 0
123165
124166 with pytest.raises(IOError) as exc:
125167 store.put(u"key", b"data")
126 assert u"Failed to establish a new connection" in str(exc.value)
168 assert u"connect" in str(exc.value)
127169
128170 def test_wrong_credentials(self):
129 from azure.storage.common.retry import ExponentialRetry
130 container = uuid()
171 container = str(uuid())
131172 conn_string = \
132173 'DefaultEndpointsProtocol=https;AccountName={};AccountKey={}'.\
133174 format("testaccount", "wrongsecret")
134175 store = AzureBlockBlobStore(conn_string=conn_string,
135176 container=container,
136177 create_if_missing=False)
137 store.block_blob_service.retry = ExponentialRetry(max_attempts=0).retry
178
179 if hasattr(store, 'block_blob_service'):
180 from azure.storage.common.retry import ExponentialRetry
181 store.block_blob_service.retry = ExponentialRetry(
182 max_attempts=0
183 ).retry
184 else:
185 store.blob_container_client._config.retry_policy.total_retries = 0
138186
139187 with pytest.raises(IOError) as exc:
140188 store.put(u"key", b"data")
148196
149197 @pytest.fixture
150198 def store(self):
151 from azure.storage.blob import BlockBlobService
152
153 container = uuid()
154 conn_string = create_azure_conn_string(load_azure_credentials())
155 s = BlockBlobService(connection_string=conn_string)
199 container = str(uuid())
200 conn_string = get_azure_conn_string()
156201
157202 yield AzureBlockBlobStore(
158203 conn_string=conn_string,
160205 public=False,
161206 checksum=True,
162207 )
163 s.delete_container(container)
208 _delete_container(conn_string, container)
164209
165210 def _checksum(self, store):
166 return store.block_blob_service.get_blob_properties(
167 store.container,
168 self.KEY,
169 ).properties.content_settings.content_md5
211 # request the md5 checksum from azure and return the b64 encoded value
212 if hasattr(store, 'block_blob_service'):
213 return store.block_blob_service.get_blob_properties(
214 store.container,
215 self.KEY,
216 ).properties.content_settings.content_md5
217 else:
218 checksum_bytes = store.blob_container_client.get_blob_client(
219 self.KEY
220 ).get_blob_properties().content_settings.content_md5
221 return b64encode(checksum_bytes).decode()
170222
171223 def test_checksum_put(self, store):
172224 store.put(self.KEY, self.CONTENT)
00 #!/usr/bin/env python
11
22 import os
3 from tempdir import TempDir
43
54 import pytest
65
4746 # Disable max key length test as it leads to problems with minio
4847 test_max_key_length = None
4948
50 def test_get_filename_nonexistant(self, store, key):
49 def test_get_filename_nonexistant(self, store, key, tmp_path):
5150 # NOTE: boto misbehaves here and tries to erase the target file
5251 # the parent tests use /dev/null, which you really should not try
5352 # to os.remove!
54 with TempDir() as tmpdir:
55 with pytest.raises(KeyError):
56 store.get_file(key, os.path.join(tmpdir, 'a'))
53 with pytest.raises(KeyError):
54 store.get_file(key, os.path.join(str(tmp_path), 'a'))
5755
58 def test_key_error_on_nonexistant_get_filename(self, store, key):
59 with TempDir() as tmpdir:
60 with pytest.raises(KeyError):
61 store.get_file(key, os.path.join(tmpdir, 'a'))
56 def test_key_error_on_nonexistant_get_filename(self, store, key, tmp_path):
57 with pytest.raises(KeyError):
58 store.get_file(key, os.path.join(str(tmp_path), 'a'))
6259
6360 def test_storage_class_put(
6461 self, store, prefix, key, value, storage_class, bucket
66 from simplekv._compat import urlparse
77
88 from simplekv.fs import FilesystemStore, WebFilesystemStore
9 from tempdir import TempDir
109
1110 from basic_store import BasicStore
1211 from url_store import UrlStore
2120
2221 class TestBaseFilesystemStore(BasicStore, UrlStore, UUIDGen, HashGen):
2322 @pytest.yield_fixture
24 def tmpdir(self):
25 with TempDir() as tmpdir:
26 yield tmpdir
23 def tmpdir(self, tmp_path):
24 yield str(tmp_path)
2725
2826 @pytest.fixture
2927 def store(self, tmpdir):
00 from basic_store import BasicStore
11 from dulwich.repo import Repo
22 from idgens import UUIDGen, HashGen
3 from tempdir import TempDir
43 import pytest
54
65 from simplekv.git import GitCommitStore
2019 return request.param
2120
2221 @pytest.yield_fixture
23 def repo_path(self):
24 with TempDir() as tmpdir:
25 Repo.init_bare(tmpdir)
26 yield tmpdir
22 def repo_path(self, tmp_path):
23 Repo.init_bare(str(tmp_path))
24 yield str(tmp_path)
2725
2826 @pytest.fixture
2927 def store(self, repo_path, branch, subdir_name):
3838 def chunk_sizes(self, value):
3939 return [10 ** n for n in xrange(2, 8)]
4040
41 def test_close(self, create_reader):
42 reader = create_reader()
43 assert not reader.source.closed
44 reader.close()
45 assert reader.source.closed
46
47 def test_close_via_context(self, create_reader):
48 reader = create_reader()
49 assert not reader.source.closed
50 with reader as r:
51 assert r is reader
52 assert reader.source.closed
53
4154 def test_reading_limit_0(self, create_reader):
4255 reader = create_reader()
43 assert reader.read(0) == ''
44 assert reader.read(0) == ''
56 data = reader.read(0)
57 assert isinstance(data, bytes)
58 assert len(data) == 0
59 data = reader.read(0)
60 assert isinstance(data, bytes)
61 assert len(data) == 0
4562
4663 def test_reading_with_limit(self, secret_key, hashfunc, value,
4764 create_reader, chunk_sizes):
99116 # this only works with dicts, as we access the internal structures to
100117 # manipulate values
101118 class HMACDec(object):
119
102120 @pytest.fixture
103121 def hmacstore(self, secret_key, store):
104122 return HMACDecorator(secret_key, store)
109127
110128 with pytest.raises(VerificationException):
111129 hmacstore.get(key)
130
131 def test_copy_raises_not_implemented(self, store):
132 with pytest.raises(NotImplementedError):
133 HMACDecorator(b'secret', store).copy(u'src', u'dest')
134
135 def test_put_file_obj(self, key, value, hmacstore):
136 hmacstore.put_file(key, BytesIO(value))
137 assert hmacstore.get(key) == value
138
139 def test_put_file_str(self, key, value, hmacstore):
140 with tempfile.NamedTemporaryFile(mode='wb', delete=False) as f:
141 f.write(value)
142 hmacstore.put_file(key, f.name)
143 assert hmacstore.get(key) == value
144
145 def test_get_file_obj(self, key, value, hmacstore):
146 hmacstore.put(key, value)
147 b = BytesIO()
148 hmacstore.get_file(key, b)
149 assert b.getvalue() == value
150
151 def test_get_file_non_writable_target(self, key, value, hmacstore):
152 hmacstore.put(key, value)
153 path = '/tmp/this/file/does/not/exist'
154 with pytest.raises(IOError, match='Error opening {} for writing'.format(path)):
155 hmacstore.get_file(key, path)
112156
113157 def test_get_file_fails_on_manipulation(self, hmacstore, key, value):
114158 hmacstore.put(key, value)
00 [tox]
1 envlist = py{27,34,35,36}
2
3 [pytest]
4 pep8ignore = test_* E402
1 envlist = py{27,35,36,37,38},py36-old-azure-storage-blob
52
63 [testenv]
74 # See https://github.com/boto/boto/issues/3717
96 BOTO_CONFIG=/dev/null
107 BOTO_PATH=/dev/null
118 deps=
12 pytest
13 pytest-pep8
9 # as workaround for https://github.com/pytest-dev/pytest/issues/6925
10 # use a somewhat older version of pytest:
11 pytest <= 5.3.5
12 pycodestyle
1413 pytest-cov
1514 pytest-mock
1615 pytest-xdist
1716 mock
18 tempdir
1917 redis
2018 psycopg2
2119 sqlalchemy
2624 azure-storage-blob
2725 futures
2826 # ideally we would not need futures here but it doesn't work otherwise
29 commands=py.test -n 4 --dist=loadfile --cov=simplekv -rs --pep8 --doctest-modules simplekv/idgen.py simplekv/fs.py tests
27 commands=
28 pycodestyle --ignore=E402,E741 --max-line-length 98 simplekv tests
29 pytest -n 4 --dist=loadfile --cov=simplekv -rs --doctest-modules simplekv/idgen.py simplekv/fs.py tests
30
31 [testenv:py36-old-azure-storage-blob]
32 # Test on one python version with the "old" code for azure-storage-blob<12
33 deps=
34 pytest
35 pytest-cov
36 pytest-mock
37 mock
38 azure-storage-blob<12
39 # For some reason, switching on usedevelop makes collecting coverage data much more reliable
40 usedevelop=True
41 commands=pytest --cov=simplekv tests/test_azure_store.py