diff --git a/.travis.yml b/.travis.yml
index 2cf97ce..c0c1d19 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,12 +1,12 @@
 sudo: false
 language: python
 python:
-  - "2.6"
   - "2.7"
-  - "3.3"
   - "3.4"
   - "3.5"
   - "3.6"
+  - "3.7-dev"
+  - nightly
 services:
   - mongodb
 
diff --git a/README.rst b/README.rst
index a0dbe76..29ce898 100644
--- a/README.rst
+++ b/README.rst
@@ -16,6 +16,17 @@ DEPOT - File Storage Made Easy
 DEPOT is a framework for easily storing and serving files in
 web applications on Python2.6+ and Python3.2+.
 
+DEPOT supports storing files in multiple backends, like:
+
+    * Local Disk
+    * In Memory (for tests)
+    * On GridFS
+    * On Amazon S3 (or compatible services)
+
+and integrates with database by providing files
+attached to your **SQLAlchemy** or **Ming/MongoDB** models
+with respect to transactions behaviours (files are rolled back too).
+
 Installing
 ----------
 
@@ -30,6 +41,9 @@ To start using Depot refer to `Documentation <https://depot.readthedocs.io/en/la
 
 DEPOT was `presented at PyConUK and PyConFR <http://www.slideshare.net/__amol__/pyconfr-2014-depot-story-of-a-filewrite-gone-wrong>`_ in 2014
 
+standalone
+~~~~~~~~~~
+
 Here is a simple example of using depot standalone to store files on MongoDB::
 
     from depot.manager import DepotManager
@@ -50,20 +64,74 @@ Here is a simple example of using depot standalone to store files on MongoDB::
     print stored_file.filename
     print stored_file.content_type
 
+models
+~~~~~~
+
+Or you can use depot with SQLAlchemy to store attachments::
+
+    from depot.fields.sqlalchemy import UploadedFileField
+    from depot.fields.specialized.image import UploadedImageWithThumb
+
+
+    class Document(Base):
+        __tablename__ = 'document'
+
+        uid = Column(Integer, autoincrement=True, primary_key=True)
+        name = Column(Unicode(16), unique=True)
+        content = Column('content_col', UploadedFileField)  # plain attached file
+
+        # photo field will automatically generate thumbnail
+        photo = Column(UploadedFileField(upload_type=UploadedImageWithThumb))
+
+
+    # Store documents with attached files, the source can be a file or bytes
+    doc = Document(name=u'Foo',
+                content=b'TEXT CONTENT STORED AS FILE',
+                photo=open('/tmp/file.png'))
+    DBSession.add(doc)
+    DBSession.flush()
+
+    # DEPOT is session aware, commit/rollback to keep or delete the stored files.
+    DBSession.commit()
+
 ChangeLog
 ---------
 
+0.8.0
+~~~~~
+
+- Replaced ``unidecode`` dependency with ``anyascii`` to better cope with MIT License.
+
+0.7.1
+~~~~~
+
+- Fix a bug in AWS-S3 support for unicode filenames.
+
+0.7.0
+~~~~~
+
+- Support for ``storage_class`` option in ``depot.io.boto3.S3Storage`` backend. Detaults to ``STANDARD``
+
+0.6.0
+~~~~~
+
+- Officially support Python 3.7
+- Fix DEPOT wrongly serving requests for any url that starts with the mountpoint. (IE: ``/depotsomething`` was wrongly served for ``/depot`` mountpoint)
+- In SQLAlchemy properly handle deletion of objects deleted through ``Relationship.remove`` (IE: ``parent.children.remove(X)``)
+- In SQLAlchemy properly handle entities deleted through ``cascade='delete-orphan'``
+
 0.5.2
 ~~~~~
 
 - Fixed an *start_response called a second time without providing exc_info* error with storages supporting plublic urls
 
+
 0.5.1
 ~~~~~
 
 - URLs generated by ``DepotMiddleware`` are now guaranteed to be plain ascii
-- Bucket existance with S3 storages should now be more reliable when the 
-  bucket didn't already exist
+- [Breaking change]: Bucket existance with S3 storages should now be more reliable when the
+  bucket didn't already exist, but it requires an additional AWS policy: `s3:ListAllMyBuckets` that wasn't required on 0.5.0
 
 0.5.0
 ~~~~~
diff --git a/debian/changelog b/debian/changelog
index b9ec055..e3f4d53 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+python-filedepot (0.8.0-0kali1) UNRELEASED; urgency=low
+
+  * New upstream release.
+
+ -- Kali Janitor <janitor@kali.org>  Fri, 30 Apr 2021 22:53:08 -0000
+
 python-filedepot (0.5.2-0kali5) kali-dev; urgency=medium
 
   * Remove Python 2 module
diff --git a/debian/patches/fix-privacy-breach.patch b/debian/patches/fix-privacy-breach.patch
index 46648f8..a3a0443 100644
--- a/debian/patches/fix-privacy-breach.patch
+++ b/debian/patches/fix-privacy-breach.patch
@@ -3,8 +3,10 @@ Author: Sophie Brun <sophie@freexian.com>
 Last-Update: 2018-08-23
 ---
 This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
---- a/docs/_themes/artichoke/layout.html
-+++ b/docs/_themes/artichoke/layout.html
+Index: python-filedepot/docs/_themes/artichoke/layout.html
+===================================================================
+--- python-filedepot.orig/docs/_themes/artichoke/layout.html
++++ python-filedepot/docs/_themes/artichoke/layout.html
 @@ -11,9 +11,6 @@
  
  {% block header %}
@@ -15,8 +17,10 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
  
        <div class="header">
          {%- if logo %}
---- a/docs/index.rst
-+++ b/docs/index.rst
+Index: python-filedepot/docs/index.rst
+===================================================================
+--- python-filedepot.orig/docs/index.rst
++++ python-filedepot/docs/index.rst
 @@ -37,7 +37,7 @@ you can have a look at the PyConFR slide
  
  .. raw:: html
diff --git a/depot/_compat.py b/depot/_compat.py
index 0cc9450..12cbb92 100644
--- a/depot/_compat.py
+++ b/depot/_compat.py
@@ -27,6 +27,9 @@ if not PY2:  # pragma: no cover
     def percent_encode(string, safe, encoding):
         return quote(string, safe, encoding, errors='strict')
 
+    def percent_decode(string):
+        return unquote(string)
+
 else:  # pragma: no cover
     from urllib import quote, unquote
 
@@ -45,7 +48,10 @@ else:  # pragma: no cover
         encoding = kwargs.pop('encoding')
         return quote(string.encode(encoding), **kwargs)
 
+    def percent_decode(string):
+        return unquote(string)
+
 
 def with_metaclass(meta, base=object):
     """Create a base class with a metaclass."""
-    return meta("NewBase", (base,), {})
\ No newline at end of file
+    return meta("NewBase", (base,), {})
diff --git a/depot/fields/sqlalchemy.py b/depot/fields/sqlalchemy.py
index 09ede89..4a3c06b 100644
--- a/depot/fields/sqlalchemy.py
+++ b/depot/fields/sqlalchemy.py
@@ -153,6 +153,24 @@ class _SQLAMutationTracker(object):
                 session._depot_old = getattr(session, '_depot_old', set())
                 session._depot_old.update(deleted_files)
 
+    @classmethod
+    def _session_after_flush(cls, session, flush_context):
+        # Tracking deleted object _after_ flush
+        # is the way we can track for objects deleted through
+        # a relationship.remove, because those only get
+        # deleted _after_ the session was flushed. Not before.
+        for state in flush_context.states.keys():
+            if not state.deleted:
+                continue
+            obj = state.obj()
+            class_ = obj.__class__
+            tracked_columns = cls.mapped_entities.get(class_, tuple())
+            for col in tracked_columns:
+                value = getattr(obj, col)
+                if value is not None:
+                    session._depot_old = getattr(session, '_depot_old', set())
+                    session._depot_old.update(value.files)
+
     @classmethod
     def _session_attached(cls, session, instance):
         session._depot_new = getattr(session, '_depot_new', set())
@@ -165,6 +183,7 @@ class _SQLAMutationTracker(object):
         event.listen(Session, 'after_commit', cls._session_committed)
         event.listen(Session, 'before_attach', cls._session_attached)
         event.listen(Session, 'before_flush', cls._session_flush)
+        event.listen(Session, 'after_flush_postexec', cls._session_after_flush)
 
 _SQLAMutationTracker.setup()
 
diff --git a/depot/io/awss3.py b/depot/io/awss3.py
index 7adc241..278a283 100644
--- a/depot/io/awss3.py
+++ b/depot/io/awss3.py
@@ -9,7 +9,7 @@ from __future__ import absolute_import
 from datetime import datetime
 import uuid
 from boto.s3.connection import S3Connection
-from depot._compat import unicode_text
+from depot._compat import unicode_text, percent_encode, percent_decode
 from depot.utils import make_content_disposition
 
 from .interfaces import FileStorage, StoredFile
@@ -23,8 +23,10 @@ class S3StoredFile(StoredFile):
     def __init__(self, file_id, key):
         _check_file_id(file_id)
         self._key = key
-
-        metadata_info = {'filename': key.get_metadata('x-depot-filename'),
+        filename = key.metadata.get('x-depot-filename')
+        if filename:
+            filename = percent_decode(filename)
+        metadata_info = {'filename': filename,
                          'content_type': key.content_type,
                          'content_length': key.size,
                          'last_modified': None}
@@ -130,8 +132,13 @@ class S3Storage(FileStorage):
         return S3StoredFile(fileid, key)
 
     def __save_file(self, key, content, filename, content_type=None):
+        if filename:
+            filename = percent_encode(filename, safe='!#$&+-.^_`|~', encoding='utf-8')
         key.set_metadata('content-type', content_type)
-        key.set_metadata('x-depot-filename', filename)
+        key.set_metadata(
+            'x-depot-filename',
+            filename
+        )
         key.set_metadata('x-depot-modified', utils.timestamp())
         key.set_metadata(
             'Content-Disposition',
diff --git a/depot/io/boto3.py b/depot/io/boto3.py
index b94b5ed..b558e8f 100644
--- a/depot/io/boto3.py
+++ b/depot/io/boto3.py
@@ -10,7 +10,7 @@ from datetime import datetime
 import uuid
 import boto3
 from botocore.exceptions import ClientError
-from depot._compat import unicode_text
+from depot._compat import unicode_text, percent_encode, percent_decode
 from depot.utils import make_content_disposition
 
 from .interfaces import FileStorage, StoredFile
@@ -26,8 +26,11 @@ class S3StoredFile(StoredFile):
         self._closed = False
         self._key = key
         self._body = None
+        filename = key.metadata.get('x-depot-filename')
+        if filename:
+            filename = percent_decode(filename)
 
-        metadata_info = {'filename': key.metadata.get('x-depot-filename'),
+        metadata_info = {'filename': filename,
                          'content_type': key.content_type,
                          'content_length': key.content_length,
                          'last_modified': None}
@@ -115,17 +118,19 @@ class S3Storage(FileStorage):
           AWS S3 Storage
         * ``policy`` which can be used to specify a canned ACL policy of either
           ``private`` or ``public-read``.
+        * ``storage_class`` which can be used to specify a class of storage.
         * ``prefix`` parameter can be used to store all files under 
           specified prefix. Use a prefix like **dirname/** (*see trailing slash*)
           to store in a subdirectory.
     """
 
     def __init__(self, access_key_id, secret_access_key, bucket=None, region_name=None,
-                 policy=None, endpoint_url=None, prefix=''):
+                 policy=None, storage_class=None, endpoint_url=None, prefix=''):
         policy = policy or CANNED_ACL_PUBLIC_READ
         assert policy in [CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE], (
             "Key policy must be %s or %s" % (CANNED_ACL_PUBLIC_READ, CANNED_ACL_PRIVATE))
         self._policy = policy or CANNED_ACL_PUBLIC_READ
+        self._storage_class = storage_class or 'STANDARD'
 
         if bucket is None:
             bucket = 'filedepot-%s' % (access_key_id.lower(),)
@@ -159,8 +164,11 @@ class S3Storage(FileStorage):
         return S3StoredFile(fileid, key)
 
     def __save_file(self, key, content, filename, content_type=None):
+        if filename:
+            filename = percent_encode(filename, safe='!#$&+-.^_`|~', encoding='utf-8')
         attrs = {
             'ACL': self._policy,
+            'StorageClass': self._storage_class,
             'Metadata': {
                 'x-depot-filename': filename,
                 'x-depot-modified': utils.timestamp()
diff --git a/depot/middleware.py b/depot/middleware.py
index 0c56db0..7975e94 100644
--- a/depot/middleware.py
+++ b/depot/middleware.py
@@ -137,6 +137,9 @@ class DepotMiddleware(object):
     """
     def __init__(self, app, mountpoint='/depot', cache_max_age=3600*24*7,
                  replace_wsgi_filewrapper=False):
+        if not mountpoint.startswith('/'):
+            raise ValueError('DepotMiddleware mountpoint must be an absolute path')
+
         self.app = app
         self.mountpoint = mountpoint
         self.cache_max_age = cache_max_age
@@ -177,7 +180,9 @@ class DepotMiddleware(object):
         req_method = environ['REQUEST_METHOD']
         full_path = environ['PATH_INFO']
 
-        if req_method not in ('GET', 'HEAD') or not full_path.startswith(self.mountpoint):
+        mtpointlen = len(self.mountpoint)
+        if not (req_method in ('GET', 'HEAD') and full_path.startswith(self.mountpoint) and 
+                full_path[mtpointlen:mtpointlen+1] in ('', '/')):
             return self.app(environ, start_response)
 
         path = full_path.split('/')
diff --git a/depot/utils.py b/depot/utils.py
index 0b2d452..d64847e 100644
--- a/depot/utils.py
+++ b/depot/utils.py
@@ -1,8 +1,17 @@
-from unidecode import unidecode
-from ._compat import percent_encode
+from ._compat import percent_encode, unicode_text
+
+try:
+    from anyascii import anyascii
+except ImportError:
+    # Python2 doesn't support anyascii
+    import unicodedata
+    def anyascii(text):
+        if not isinstance(text, unicode_text):
+            text = text.decode("utf-8")
+        return unicodedata.normalize("NFKD", text).encode("ascii", "ignore") or "unknown"
 
 
 def make_content_disposition(disposition, fname):
     rfc6266_part = "filename*=utf-8''%s" % (percent_encode(fname, safe='!#$&+-.^_`|~', encoding='utf-8'), )
-    ascii_part = 'filename="%s"' % (unidecode(fname), )
+    ascii_part = 'filename="%s"' % (anyascii(fname), )
     return ';'.join((disposition, ascii_part, rfc6266_part))
diff --git a/docs/conf.py b/docs/conf.py
index 157eefc..5ab9e34 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -44,16 +44,16 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'File Depot'
-copyright = u'2015, Alessandro Molina'
+copyright = u'2019, Alessandro Molina'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
 #
 # The short X.Y version.
-version = '0.3.0'
+version = '0.7.1'
 # The full version, including alpha/beta/rc tags.
-release = '0.3.0'
+release = '0.7.1'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/setup.py b/setup.py
index 56e69fb..99a8cff 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
 from setuptools import setup, find_packages
 import os, sys
 
-version = '0.5.2'
+version = '0.8.0'
 
 here = os.path.abspath(os.path.dirname(__file__))
 try:
@@ -22,8 +22,10 @@ if py_version != (3, 2):
 else:
     TEST_DEPENDENCIES += ['coverage < 4.0']
 
+INSTALL_DEPENDENCIES = []
+if py_version >= (3, 0):
+    INSTALL_DEPENDENCIES += ["anyascii"]
 
-INSTALL_DEPENDENCIES = ['unidecode']
 if py_version == (2, 6):
     INSTALL_DEPENDENCIES += ['importlib']
     TEST_DEPENDENCIES += ['ordereddict', 'pillow < 4.0.0', 'WebTest < 2.0.24', 'sqlalchemy < 1.2']
@@ -31,6 +33,7 @@ else:
     TEST_DEPENDENCIES += ['pillow', 'WebTest', 'sqlalchemy']
 
 
+
 setup(name='filedepot',
       version=version,
       description="Toolkit for storing files and attachments in web applications",
@@ -44,7 +47,7 @@ setup(name='filedepot',
         ],
       keywords='storage files s3 gridfs mongodb aws sqlalchemy wsgi',
       author='Alessandro Molina',
-      author_email='alessandro.molina@axant.it',
+      author_email='amol@turbogears.org',
       url='https://github.com/amol-/depot',
       license='MIT',
       packages=find_packages(exclude=['ez_setup', 'tests']),
diff --git a/tests/test_awss3_storage.py b/tests/test_awss3_storage.py
index a2feaf6..1e58de8 100644
--- a/tests/test_awss3_storage.py
+++ b/tests/test_awss3_storage.py
@@ -17,13 +17,12 @@ FILE_CONTENT = b'HELLO WORLD'
 @flaky
 class TestS3FileStorage(object):
     @classmethod
-    def setupClass(self):
+    def setupClass(cls):
         # Travis runs multiple tests concurrently on fake machines that might
         # collide on pid and hostid, so use an uuid1 which should be fairly random
         # thanks to clock_seq
-        self.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid())
+        cls.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid())
 
-    def setup(self):
         try:
             global S3Storage
             from depot.io.awss3 import S3Storage
@@ -36,15 +35,32 @@ class TestS3FileStorage(object):
         if access_key_id is None or secret_access_key is None:
             raise SkipTest('Amazon S3 credentials not available')
 
-        self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), )
-        self.cred = (access_key_id, secret_access_key)
+        cls.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), )
+        cls.cred = (access_key_id, secret_access_key)
 
-        bucket_name = 'filedepot-testfs-%s' % self.run_id
-        self.fs = S3Storage(access_key_id, secret_access_key, bucket_name)
-        while not self.fs._conn.lookup(bucket_name):
+        bucket_name = 'filedepot-testfs-%s' % cls.run_id
+        cls.fs = S3Storage(access_key_id, secret_access_key, bucket_name)
+        while not cls.fs._conn.lookup(bucket_name):
             # Wait for bucket to exist, to avoid flaky tests...
             time.sleep(0.5)
 
+    @classmethod
+    def teardownClass(cls):
+        if not cls.fs._conn.lookup(cls.fs._bucket_driver.bucket.name):
+            return
+        
+        keys = [key.name for key in cls.fs._bucket_driver.bucket]
+        if keys:
+            cls.fs._bucket_driver.bucket.delete_keys(keys)
+
+        try:
+            cls.fs._conn.delete_bucket(cls.fs._bucket_driver.bucket.name)
+            while cls.fs._conn.lookup(cls.fs._bucket_driver.bucket.name):
+                # Wait for bucket to be deleted, to avoid flaky tests...
+                time.sleep(0.5)
+        except:
+            pass
+
     def test_fileoutside_depot(self):
         fid = str(uuid.uuid1())
         key = self.fs._bucket_driver.new_key(fid)
@@ -88,19 +104,3 @@ class TestS3FileStorage(object):
         test_file = self.fs.get(file_id)
         response = requests.get(test_file.public_url)
         assert response.headers['Content-Disposition'] == "inline;filename=\"test.txt\";filename*=utf-8''test.txt"
-
-    def teardown(self):
-        if not self.fs._conn.lookup(self.fs._bucket_driver.bucket.name):
-            return
-        
-        keys = [key.name for key in self.fs._bucket_driver.bucket]
-        if keys:
-            self.fs._bucket_driver.bucket.delete_keys(keys)
-
-        try:
-            self.fs._conn.delete_bucket(self.fs._bucket_driver.bucket.name)
-            while self.fs._conn.lookup(self.fs._bucket_driver.bucket.name):
-                # Wait for bucket to be deleted, to avoid flaky tests...
-                time.sleep(0.5)
-        except:
-            pass
diff --git a/tests/test_boto3_storage.py b/tests/test_boto3_storage.py
index b5ff2bd..adb83f4 100644
--- a/tests/test_boto3_storage.py
+++ b/tests/test_boto3_storage.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 import os
 import uuid
 import mock
@@ -21,7 +22,6 @@ class TestS3FileStorage(object):
         # thanks to clock_seq
         self.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid())
 
-    def setup(self):
         try:
             global S3Storage
             from depot.io.boto3 import S3Storage
@@ -33,12 +33,31 @@ class TestS3FileStorage(object):
         access_key_id = env.get('AWS_ACCESS_KEY_ID')
         secret_access_key = env.get('AWS_SECRET_ACCESS_KEY')
         if access_key_id is None or secret_access_key is None:
-            raise SkipTest('Amazon S3 credentials not available')
+           raise SkipTest('Amazon S3 credentials not available')
 
         self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), )
         self.cred = (access_key_id, secret_access_key)
-        self.fs = S3Storage(access_key_id, secret_access_key,
-                            'filedepot-testfs-%s' % self.run_id)
+        self.bucket = 'filedepot-testfs-%s' % self.run_id
+        self.fs = S3Storage(*self.cred, bucket=self.bucket)
+
+    @classmethod
+    def teardownClass(self):
+        buckets = set(
+            b['Name'] for b in self.fs._bucket_driver.s3.meta.client.list_buckets()['Buckets']
+        )
+        if self.fs._bucket_driver.bucket.name not in buckets:
+            # Bucket wasn't created, probably due to monkey patching, just skip.
+            return
+
+        for obj in self.fs._bucket_driver.bucket.objects.all():
+            obj.delete()
+
+        try:
+            self.fs._bucket_driver.bucket.delete()
+        except:
+            pass
+        else:
+            self.fs._bucket_driver.bucket.wait_until_not_exists()
 
     def test_fileoutside_depot(self):
         fid = str(uuid.uuid1())
@@ -132,20 +151,20 @@ class TestS3FileStorage(object):
         response = requests.get(test_file.public_url)
         assert response.headers['Content-Disposition'] == "inline;filename=\"test.txt\";filename*=utf-8''test.txt"
 
-    def teardown(self):
-        buckets = set(
-            b['Name'] for b in self.fs._bucket_driver.s3.meta.client.list_buckets()['Buckets']
-        )
-        if self.fs._bucket_driver.bucket.name not in buckets:
-            # Bucket wasn't created, probably due to monkey patching, just skip.
-            return
+    def test_storage_class(self):
+        fs_ia = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA')
+        fid = fs_ia.create(FILE_CONTENT)
 
-        for obj in self.fs._bucket_driver.bucket.objects.all():
-            obj.delete()
+        key = self.fs._bucket_driver.get_key(fid)
+        assert key.storage_class == 'STANDARD_IA'
 
-        try:
-            self.fs._bucket_driver.bucket.delete()
-        except:
-            pass
-        else:
-            self.fs._bucket_driver.bucket.wait_until_not_exists()
\ No newline at end of file
+    def test_storage_non_ascii_filenames(self):
+        filename = u'些公.pdf'
+        storage = S3Storage(*self.cred, bucket=self.bucket, storage_class='STANDARD_IA')
+        new_file_id = storage.create(
+            FILE_CONTENT,
+            filename=filename,
+            content_type='application/pdf'
+        )
+
+        assert new_file_id is not None
diff --git a/tests/test_fields_sqlalchemy.py b/tests/test_fields_sqlalchemy.py
index 634dd8c..e9a0169 100644
--- a/tests/test_fields_sqlalchemy.py
+++ b/tests/test_fields_sqlalchemy.py
@@ -5,7 +5,8 @@ import tempfile, os, cgi, base64
 from PIL import Image
 from nose.tools import raises
 from sqlalchemy.exc import StatementError
-from sqlalchemy.schema import Column
+from sqlalchemy.orm import relationship
+from sqlalchemy.schema import Column, ForeignKey
 from sqlalchemy.types import Unicode, Integer, Text
 from .base_sqla import setup_database, clear_database, DeclarativeBase, DBSession
 from depot.fields.sqlalchemy import UploadedFileField
@@ -43,12 +44,22 @@ class Document(DeclarativeBase):
     second_photo = Column(UploadedFileField(filters=(WithThumbnailFilter((12, 12), 'PNG'),)))
     targeted_content = Column(UploadedFileField(upload_storage='another_alias'))
     type = Column(Text, nullable=True)
+    directory_id = Column(Integer, ForeignKey('dir.uid'))
 
     __mapper_args__ = {
         'polymorphic_on': 'type'
     }
 
 
+class Directory(DeclarativeBase):
+    __tablename__ = 'dir'
+
+    uid = Column(Integer, autoincrement=True, primary_key=True)
+    name = Column(Unicode(16), unique=True)
+
+    documents = relationship(Document, cascade="all, delete-orphan")
+
+
 class Confidential(Document):
     __mapper_args__ = {'polymorphic_identity': 'confidential'}
 
@@ -220,6 +231,79 @@ class TestSQLAAttachments(SQLATestCase):
         assert d.targeted_content.file.filename == os.path.basename(self.fake_file.name)
         assert d.targeted_content.depot_name == 'another'
 
+    def test_relationship(self):
+        directory = Directory(name='Parent')
+        DBSession.add(directory)
+        directory.documents.append(Document(name=u_('Foo'), content=open(self.fake_file.name, 'rb')))
+        self._session_flush()
+        DBSession.commit()
+        
+        d = DBSession.query(Directory).filter_by(name=u_('Parent')).first()
+        doc = d.documents[0]
+        old_file = doc.content.path
+        assert self.file_exists(old_file)
+
+        d.documents.remove(doc)
+        self._session_flush()
+        DBSession.commit()
+
+        assert not self.file_exists(old_file)
+
+    def test_relationship_rollback(self):
+        directory = Directory(name='Parent')
+        DBSession.add(directory)
+        directory.documents.append(Document(name=u_('Foo'), content=open(self.fake_file.name, 'rb')))
+        self._session_flush()
+        DBSession.commit()
+        
+        d = DBSession.query(Directory).filter_by(name=u_('Parent')).first()
+        doc = d.documents[0]
+        old_file = doc.content.path
+        assert self.file_exists(old_file)
+
+        d.documents.remove(doc)
+        self._session_flush()
+        DBSession.rollback()
+
+        assert self.file_exists(old_file)
+
+    def test_relationship_cascade_delete(self):
+        directory = Directory(name='Parent')
+        DBSession.add(directory)
+        directory.documents.append(Document(name=u_('Foo'), content=open(self.fake_file.name, 'rb')))
+        self._session_flush()
+        DBSession.commit()
+        
+        d = DBSession.query(Directory).filter_by(name=u_('Parent')).first()
+        doc = d.documents[0]
+        old_file = doc.content.path
+        assert self.file_exists(old_file)
+
+        DBSession.delete(d)
+        self._session_flush()
+        DBSession.commit()
+
+        assert not self.file_exists(old_file)
+
+    def test_relationship_cascade_delete_rollback(self):
+        directory = Directory(name='Parent')
+        DBSession.add(directory)
+        directory.documents.append(Document(name=u_('Foo'), content=open(self.fake_file.name, 'rb')))
+        self._session_flush()
+        DBSession.commit()
+        
+        d = DBSession.query(Directory).filter_by(name=u_('Parent')).first()
+        doc = d.documents[0]
+        old_file = doc.content.path
+        assert self.file_exists(old_file)
+
+        DBSession.delete(d)
+        self._session_flush()
+        DBSession.rollback()
+
+        assert self.file_exists(old_file)
+
+
 
 class TestSQLAAttachmentsNoFlush(TestSQLAAttachments):
     FLUSH_SESSION = False
diff --git a/tests/test_storage_interface.py b/tests/test_storage_interface.py
index 9bfbcc9..3a2ac61 100644
--- a/tests/test_storage_interface.py
+++ b/tests/test_storage_interface.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 import uuid
 
 from flaky import flaky
@@ -94,6 +95,22 @@ class BaseStorageTestFixture(object):
         assert temp.name.endswith(f.filename)
         assert f.read() == FILE_CONTENT
 
+    def test_filewithnonasciiname(self):
+        filename = u'些公.pdf'
+        temp = NamedTemporaryFile()
+        temp.write(FILE_CONTENT)
+        temp.seek(0)
+
+        file_id = self.fs.create(
+            temp,
+            filename=filename,
+            content_type='application/pdf'
+        )
+
+        f = self.fs.get(file_id)
+        assert f.content_type == 'application/pdf'
+        assert f.read() == FILE_CONTENT
+
     def test_another_storage(self):
         file_id = self.fs.create(FILE_CONTENT, filename='file.txt', content_type='text/plain')
         f = self.fs.get(file_id)
diff --git a/tests/test_wsgi_middleware.py b/tests/test_wsgi_middleware.py
index 57c84ce..15cc016 100644
--- a/tests/test_wsgi_middleware.py
+++ b/tests/test_wsgi_middleware.py
@@ -7,7 +7,7 @@ import uuid
 from depot.manager import DepotManager
 from tg import expose, TGController, AppConfig
 from webtest import TestApp
-from depot._compat import u_, unquote
+from depot._compat import u_, unquote, PY2
 
 
 FILE_CONTENT = b'HELLO WORLD'
@@ -25,6 +25,15 @@ class RootController(TGController):
         self.UPLOADED_FILES = []
         return dict(files=self.UPLOADED_FILES)
 
+    @expose('json')
+    def depotskipped(self):
+        return dict(ok=True)
+
+    @expose('json')
+    def depot(self):
+        # this should never be called
+        return dict(ok=False)
+
     @expose('json')
     def create_file(self, lang='en'):
         fname = {'en': 'hello.txt',
@@ -38,12 +47,19 @@ class RootController(TGController):
                     last=self.UPLOADED_FILES[-1])
 
 
-class TestWSGIMiddleware(object):
+class BaseWSGITests(object):
     @classmethod
     def setup_class(cls):
         config = AppConfig(minimal=True, root_controller=RootController())
         cls.wsgi_app = config.make_wsgi_app()
 
+    def make_app(self, **options):
+        wsgi_app = DepotManager.make_middleware(self.wsgi_app, **options)
+        return TestApp(wsgi_app)
+
+
+
+class TestWSGIMiddleware(BaseWSGITests):
     def setup(self):
         DepotManager._clear()
         DepotManager.configure('default', {'depot.storage_path': './lfs'})
@@ -51,9 +67,13 @@ class TestWSGIMiddleware(object):
     def teardown(cls):
         shutil.rmtree('./lfs', ignore_errors=True)
 
-    def make_app(self, **options):
-        wsgi_app = DepotManager.make_middleware(self.wsgi_app, **options)
-        return TestApp(wsgi_app)
+    def test_invalid_mountpoint(self):
+        try:
+            DepotManager.make_middleware(self.wsgi_app, mountpoint='hello')
+        except ValueError as err:
+            assert 'mountpoint must be an absolute path' in str(err)
+        else:
+            assert False, 'Should have raised ValueError'
 
     def test_serving_files(self):
         app = self.make_app()
@@ -111,6 +131,18 @@ class TestWSGIMiddleware(object):
         files = app.get('/').json
         assert new_file['last'] in files['files'], (new_file, files)
 
+    def test_forwards_to_app_begins_with_endpoint(self):
+        app = self.make_app()
+
+        resp = app.get('/depotskipped').json
+        assert resp['ok'] == True
+
+    def test_404_on_nofile(self):
+        app = self.make_app()
+
+        missing = app.get('/depot', status=404)
+        assert 'Not Found' in missing.status
+
     def test_404_on_missing_file(self):
         app = self.make_app()
         missing = app.get('/depot/default/hello', status=404)
@@ -131,7 +163,39 @@ class TestWSGIMiddleware(object):
                                   status=400)
         assert 'Bad Request' in unmodified_file.status, unmodified_file
 
-    def test_public_url_gets_redirect(self):
+    def test_serving_files_with_wsgifilewrapper(self):
+        app = self.make_app(replace_wsgi_filewrapper=True)
+        new_file = app.post('/create_file').json
+
+        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
+        assert uploaded_file.body == FILE_CONTENT
+        assert uploaded_file.request.environ['wsgi.file_wrapper'] is _FileIter
+
+    def test_serving_files_content_disposition(self):
+        app = self.make_app()
+        new_file = app.post('/create_file', params={'lang': 'ru'}).json
+
+        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
+        content_disposition = uploaded_file.headers['Content-Disposition']
+
+        if PY2:
+            assert content_disposition == "inline;filename=\"unknown\";filename*=utf-8''%D0%9A%D1%80%D1%83%D0%BF%D0%BD%D1%8B%D0%B9", content_disposition
+        else:
+            assert content_disposition == "inline;filename=\"Krupnyy\";filename*=utf-8''%D0%9A%D1%80%D1%83%D0%BF%D0%BD%D1%8B%D0%B9", content_disposition
+
+
+        new_file = app.post('/create_file', params={'lang': 'it'}).json
+        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
+        content_disposition = uploaded_file.headers['Content-Disposition']
+        _, asciiname, uniname = content_disposition.split(';')
+        assert asciiname == 'filename="aeiou"', asciiname
+        assert u_(unquote(uniname[17:])) == u_('àèìòù'), unquote(uniname[17:])
+
+
+class TestS3TestWSGIMiddleware(BaseWSGITests):
+    def setup(self):
+        DepotManager._clear()
+
         try:
             global S3Storage
             from depot.io.awss3 import S3Storage
@@ -156,32 +220,29 @@ class TestWSGIMiddleware(object):
                                          'depot.bucket': bucket_name})
         DepotManager.set_default('awss3')
 
+    def teardown(self):
+        store = DepotManager.get('awss3')
+        if not store._conn.lookup(store._bucket_driver.bucket.name):
+            return
+        
+        keys = [key.name for key in store._bucket_driver.bucket]
+        if keys:
+            store._bucket_driver.bucket.delete_keys(keys)
+
+        try:
+            store._conn.delete_bucket(store._bucket_driver.bucket.name)
+            while store._conn.lookup(store._bucket_driver.bucket.name):
+                # Wait for bucket to be deleted, to avoid flaky tests...
+                time.sleep(0.5)
+        except:
+            pass
+    
+    def test_public_url_gets_redirect(self):
         app = self.make_app()
         new_file = app.post('/create_file').json
 
         file_path = DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file)
-        uploaded_file = app.get(file_path)
-        assert uploaded_file.body == FILE_CONTENT, uploaded_file
-
-    def test_serving_files_with_wsgifilewrapper(self):
-        app = self.make_app(replace_wsgi_filewrapper=True)
-        new_file = app.post('/create_file').json
-
-        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
-        assert uploaded_file.body == FILE_CONTENT
-        assert uploaded_file.request.environ['wsgi.file_wrapper'] is _FileIter
-
-    def test_serving_files_content_disposition(self):
-        app = self.make_app()
-        new_file = app.post('/create_file', params={'lang': 'ru'}).json
-
-        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
-        content_disposition = uploaded_file.headers['Content-Disposition']
-        assert content_disposition == "inline;filename=\"Krupnyi\";filename*=utf-8''%D0%9A%D1%80%D1%83%D0%BF%D0%BD%D1%8B%D0%B9", content_disposition
-
-        new_file = app.post('/create_file', params={'lang': 'it'}).json
-        uploaded_file = app.get(DepotManager.url_for('%(uploaded_to)s/%(last)s' % new_file))
-        content_disposition = uploaded_file.headers['Content-Disposition']
-        _, asciiname, uniname = content_disposition.split(';')
-        assert asciiname == 'filename="aeiou"', asciiname
-        assert u_(unquote(uniname[17:])) == u_('àèìòù'), unquote(uniname[17:])
+        uploaded_file = app.get(file_path, status=301)
+        location = uploaded_file.headers['Location']
+        assert 'https://filedepot-testfs-' in location
+        assert 's3.amazonaws.com' in location
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index d3453a2..f6e69ef 100644
--- a/tox.ini
+++ b/tox.ini
@@ -12,6 +12,7 @@ envlist =
     py34
     py35
     py36
+    py37
 
 [testenv]
 deps = 
@@ -23,7 +24,7 @@ passenv =
     AWS_SECRET_ACCESS_KEY
 commands =
     pip install -e .[testing]
-    nosetests --with-coverage --cover-package=depot --cover-erase --with-flaky
+    nosetests --with-coverage --cover-package=depot --cover-erase --with-flaky --force-flaky
 
 [testenv:docs]
 changedir = {toxinidir}/docs