summaryrefslogtreecommitdiff
path: root/tests/test_sftp_big.py
diff options
context:
space:
mode:
authorJeff Forcier <jeff@bitprophet.org>2017-10-23 12:33:45 -0700
committerJeff Forcier <jeff@bitprophet.org>2017-10-23 12:33:45 -0700
commit67329edd7945a603a571c60d4fadc9e861b9cec9 (patch)
tree968c4d41de5c911181248570f93c9421dce3872a /tests/test_sftp_big.py
parent79bfe3e94ff1e0ae517ce3e912527ff8b51f4eab (diff)
downloadparamiko-67329edd7945a603a571c60d4fadc9e861b9cec9.tar.gz
Get sftp-big tests apparently passing.
Very slowly. Pretty sure we will want to retain module-level fixtures for that stuff. heh.
Diffstat (limited to 'tests/test_sftp_big.py')
-rw-r--r--tests/test_sftp_big.py109
1 files changed, 46 insertions, 63 deletions
diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py
index 580ba64e..ef12b05c 100644
--- a/tests/test_sftp_big.py
+++ b/tests/test_sftp_big.py
@@ -32,7 +32,7 @@ import unittest
from paramiko.common import o660
-from .test_sftp import get_sftp
+from .test_sftp import make_loopback_sftp
FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000')
@@ -42,43 +42,42 @@ class BigSFTPTest (unittest.TestCase):
def setUp(self):
global FOLDER
- sftp = get_sftp()
+ self.sftp, _ = make_loopback_sftp()
+ # TODO: same TODOs as in test_sftp.py re: not doing this awful crap
for i in range(1000):
FOLDER = FOLDER[:-3] + '%03d' % i
try:
- sftp.mkdir(FOLDER)
+ self.sftp.mkdir(FOLDER)
break
except (IOError, OSError):
pass
def tearDown(self):
- sftp = get_sftp()
- sftp.rmdir(FOLDER)
+ self.sftp.rmdir(FOLDER)
def test_1_lots_of_files(self):
"""
create a bunch of files over the same session.
"""
- sftp = get_sftp()
numfiles = 100
try:
for i in range(numfiles):
- with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f:
+ with self.sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f:
f.write('this is file #%d.\n' % i)
- sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660)
+ self.sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660)
# now make sure every file is there, by creating a list of filenmes
# and reading them in random order.
numlist = list(range(numfiles))
while len(numlist) > 0:
r = numlist[random.randint(0, len(numlist) - 1)]
- with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f:
+ with self.sftp.open('%s/file%d.txt' % (FOLDER, r)) as f:
self.assertEqual(f.readline(), 'this is file #%d.\n' % r)
numlist.remove(r)
finally:
for i in range(numfiles):
try:
- sftp.remove('%s/file%d.txt' % (FOLDER, i))
+ self.sftp.remove('%s/file%d.txt' % (FOLDER, i))
except:
pass
@@ -86,23 +85,22 @@ class BigSFTPTest (unittest.TestCase):
"""
write a 1MB file with no buffering.
"""
- sftp = get_sftp()
kblob = (1024 * b'x')
start = time.time()
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
start = time.time()
- with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
for n in range(1024):
data = f.read(1024)
self.assertEqual(data, kblob)
@@ -110,17 +108,16 @@ class BigSFTPTest (unittest.TestCase):
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_3_big_file_pipelined(self):
"""
write a 1MB file, with no linefeeds, using pipelining.
"""
- sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
start = time.time()
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -128,12 +125,12 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
start = time.time()
- with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
file_size = f.stat().st_size
f.prefetch(file_size)
@@ -153,13 +150,12 @@ class BigSFTPTest (unittest.TestCase):
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_4_prefetch_seek(self):
- sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -167,13 +163,13 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
file_size = f.stat().st_size
f.prefetch(file_size)
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
@@ -190,13 +186,12 @@ class BigSFTPTest (unittest.TestCase):
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_5_readv_seek(self):
- sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -204,13 +199,13 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
# make a bunch of offsets and put them in random order
offsets = [base_offset + j * chunk for j in range(100)]
@@ -227,17 +222,16 @@ class BigSFTPTest (unittest.TestCase):
end = time.time()
sys.stderr.write('%ds ' % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_6_lots_of_prefetching(self):
"""
prefetch a 1MB file a bunch of times, discarding the file object
without using it, to verify that paramiko doesn't get confused.
"""
- sftp = get_sftp()
kblob = (1024 * b'x')
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -245,13 +239,13 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
for i in range(10):
- with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
file_size = f.stat().st_size
f.prefetch(file_size)
- with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
file_size = f.stat().st_size
f.prefetch(file_size)
for n in range(1024):
@@ -261,16 +255,15 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_7_prefetch_readv(self):
"""
verify that prefetch and readv don't conflict with each other.
"""
- sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -278,9 +271,9 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
- with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
file_size = f.stat().st_size
f.prefetch(file_size)
data = f.read(1024)
@@ -298,17 +291,16 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write(' ')
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_8_large_readv(self):
"""
verify that a very large readv is broken up correctly and still
returned as a single blob.
"""
- sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -316,9 +308,9 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('.')
sys.stderr.write(' ')
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
- with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
data = list(f.readv([(23 * 1024, 128 * 1024)]))
self.assertEqual(1, len(data))
data = data[0]
@@ -326,52 +318,43 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write(' ')
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_9_big_file_big_buffer(self):
"""
write a 1MB file, with no linefeeds, and a big buffer.
"""
- sftp = get_sftp()
mblob = (1024 * 1024 * 'x')
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
f.write(mblob)
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
def test_A_big_file_renegotiate(self):
"""
write a 1MB file, forcing key renegotiation in the middle.
"""
- sftp = get_sftp()
- t = sftp.sock.get_transport()
+ t = self.sftp.sock.get_transport()
t.packetizer.REKEY_BYTES = 512 * 1024
k32blob = (32 * 1024 * 'x')
try:
- with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
for i in range(32):
f.write(k32blob)
- self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
+ self.assertEqual(self.sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
self.assertNotEqual(t.H, t.session_id)
# try to read it too.
- with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f:
+ with self.sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f:
file_size = f.stat().st_size
f.prefetch(file_size)
total = 0
while total < 1024 * 1024:
total += len(f.read(32 * 1024))
finally:
- sftp.remove('%s/hongry.txt' % FOLDER)
+ self.sftp.remove('%s/hongry.txt' % FOLDER)
t.packetizer.REKEY_BYTES = pow(2, 30)
-
-
-if __name__ == '__main__':
- from tests.test_sftp import SFTPTest
- SFTPTest.init_loopback()
- from unittest import main
- main()