summaryrefslogtreecommitdiff
path: root/fs/tests/test_s3fs.py
blob: cbc5f929f59cf3a250a5d0a23a4ef04394bb86fb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
"""

  fs.tests.test_s3fs:  testcases for the S3FS module

These tests are set up to be skipped by default, since they're very slow,
require a valid AWS account, and cost money.  You'll have to set the '__test__'
attribute the True on te TestS3FS class to get them running.

"""

import unittest

from fs.tests import FSTestCases, ThreadingTestCases
from fs.path import *

from six import PY3
try:
    from fs import s3fs
except ImportError:
    raise unittest.SkipTest("s3fs wasn't importable")    
    

class TestS3FS(unittest.TestCase,FSTestCases,ThreadingTestCases):

    #  Disable the tests by default
    __test__ = False

    bucket = "test-s3fs.rfk.id.au"

    def setUp(self):        
        self.fs = s3fs.S3FS(self.bucket)
        for k in self.fs._s3bukt.list():
            self.fs._s3bukt.delete_key(k)

    def tearDown(self):
        self.fs.close()

    def test_concurrent_copydir(self):
        #  makedir() on S3FS is currently not atomic
        pass

    def test_makedir_winner(self):
        #  makedir() on S3FS is currently not atomic
        pass

    def test_multiple_overwrite(self):
        # S3's eventual-consistency seems to be breaking this test
        pass


class TestS3FS_prefix(TestS3FS):

    def setUp(self):
        self.fs = s3fs.S3FS(self.bucket,"/unittest/files")
        for k in self.fs._s3bukt.list():
            self.fs._s3bukt.delete_key(k)

    def tearDown(self):
        self.fs.close()