From 2a33b9e1af6600f32f9eaf1536e6a3fa2feb9ee3 Mon Sep 17 00:00:00 2001 From: "willmcgugan@gmail.com" Date: Sat, 1 Dec 2012 16:13:08 +0000 Subject: Implemented generic validatepath method and optimized normpath git-svn-id: http://pyfilesystem.googlecode.com/svn/trunk@837 67cdc799-7952-0410-af00-57a81ceafa0f --- fs/base.py | 33 ++++++++++++++++++++ fs/errors.py | 9 +++++- fs/osfs/__init__.py | 12 ++------ fs/path.py | 84 +++++++++++++++++++++++++-------------------------- fs/tempfs.py | 24 +++++++++------ fs/tests/__init__.py | 53 ++++++++++++++++++-------------- fs/tests/test_fs.py | 9 ++++++ fs/tests/test_path.py | 9 ++++++ fs/wrapfs/__init__.py | 44 +++++++++++++++------------ 9 files changed, 173 insertions(+), 104 deletions(-) diff --git a/fs/base.py b/fs/base.py index 2091e41..3e07e01 100644 --- a/fs/base.py +++ b/fs/base.py @@ -252,6 +252,7 @@ class FS(object): * *free_space* The free space (in bytes) available on the file system * *total_space* The total space (in bytes) available on the file system * *virtual* True if the filesystem defers to other filesystems + * *invalid_path_chars* A string containing characters that may not be used in paths FS implementations may expose non-generic meta data through a self-named namespace. e.g. ``"somefs.some_meta"`` @@ -282,6 +283,38 @@ class FS(object): return False return True + def validatepath(self, path): + """Validate an fs path, throws an :class:`~fs.errors.InvalidPathError` exception if validation fails. + + A path is invalid if it fails to map to a path on the underlaying filesystem. The default + implementation checks for the presence of any of the characters in the meta value 'invalid_path_chars', + but implementations may have other requirements for paths. + + :param path: an fs path to validatepath + :raises `fs.errors.InvalidPathError`: if `path` does not map on to a valid path on this filesystem + + """ + invalid_chars = self.getmeta('invalid_path_chars', default=None) + if invalid_chars: + re_invalid_chars = getattr(self, '_re_invalid_chars', None) + if re_invalid_chars is None: + self._re_invalid_chars = re_invalid_chars = re.compile('|'.join(re.escape(c) for c in invalid_chars), re.UNICODE) + if re_invalid_chars.search(path): + raise InvalidCharsInPathError(path) + + def isvalidpath(self, path): + """Check if a path is valid on this filesystem + + :param path: an fs path + + """ + try: + self.validatepath(path) + except InvalidPathError: + return False + else: + return True + def getsyspath(self, path, allow_none=False): """Returns the system path (a path recognized by the OS) if one is present. diff --git a/fs/errors.py b/fs/errors.py index eea21d0..076de4b 100644 --- a/fs/errors.py +++ b/fs/errors.py @@ -11,6 +11,7 @@ catch-all exception. __all__ = ['FSError', 'CreateFailedError', 'PathError', + 'InvalidPathError', 'InvalidCharsInPathError', 'OperationFailedError', 'UnsupportedError', @@ -83,7 +84,13 @@ class PathError(FSError): super(PathError,self).__init__(**kwds) -class InvalidCharsInPathError(PathError): +class InvalidPathError(PathError): + """Base exception for fs paths that can't be mapped on to the underlaying filesystem.""" + default_message = "Path is invalid on this filesystem %(path)s" + + +class InvalidCharsInPathError(InvalidPathError): + """The path contains characters that are invalid on this filesystem""" default_message = "Path contains invalid characters: %(path)s" diff --git a/fs/osfs/__init__.py b/fs/osfs/__init__.py index 13511eb..b652299 100644 --- a/fs/osfs/__init__.py +++ b/fs/osfs/__init__.py @@ -88,10 +88,9 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS): } if platform.system() == 'Windows': - _invalid_path_chars = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|' + _meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|' else: - _invalid_path_chars = '\0' - _re_invalid_path_chars = re.compile('|'.join(re.escape(c) for c in _invalid_path_chars), re.UNICODE) + _meta["invalid_path_chars"] = '\0' def __init__(self, root_path, thread_synchronize=_thread_synchronize_default, encoding=None, create=False, dir_mode=0700, use_long_paths=True): """ @@ -153,13 +152,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS): return p return p.decode(self.encoding, 'replace') - def _validate_path(self, path): - """Raise an error if there are any invalid characters in the path""" - if self._re_invalid_path_chars.search(path): - raise InvalidCharsInPathError(path) - def getsyspath(self, path, allow_none=False): - self._validate_path(path) + self.validatepath(path) path = relpath(normpath(path)).replace(u"/", os.sep) path = os.path.join(self.root_path, path) if not path.startswith(self.root_path): diff --git a/fs/path.py b/fs/path.py index 4c0814f..f56b119 100644 --- a/fs/path.py +++ b/fs/path.py @@ -14,7 +14,9 @@ import re import os -_requires_normalization = re.compile(r'/\.\.|\./|\.|//').search +#_requires_normalization = re.compile(r'/\.\.|\./|\.|//').search +# New improved re that avoids normalizing paths that don't need it - WM +_requires_normalization = re.compile(r'/\.\.|\./|^\.$|\.$|//').search def normpath(path): @@ -75,11 +77,6 @@ else: return path -def normospath(path): - """Normalizes a path with os separators""" - return normpath(ospath(path)) - - def iteratepath(path, numsplits=None): """Iterate over the individual components of a path. @@ -374,7 +371,7 @@ def isprefix(path1, path2): bits1.pop() if len(bits1) > len(bits2): return False - for (bit1,bit2) in zip(bits1,bits2): + for (bit1, bit2) in zip(bits1, bits2): if bit1 != bit2: return False return True @@ -434,7 +431,7 @@ class PathMap(object): def __init__(self): self._map = {} - def __getitem__(self,path): + def __getitem__(self, path): """Get the value stored under the given path.""" m = self._map for name in iteratepath(path): @@ -447,7 +444,7 @@ class PathMap(object): except KeyError: raise KeyError(path) - def __contains__(self,path): + def __contains__(self, path): """Check whether the given path has a value stored in the map.""" try: self[path] @@ -456,22 +453,22 @@ class PathMap(object): else: return True - def __setitem__(self,path,value): + def __setitem__(self, path, value): """Set the value stored under the given path.""" m = self._map for name in iteratepath(path): try: m = m[name] except KeyError: - m = m.setdefault(name,{}) + m = m.setdefault(name, {}) m[""] = value - def __delitem__(self,path): + def __delitem__(self, path): """Delete the value stored under the given path.""" - ms = [[self._map,None]] + ms = [[self._map, None]] for name in iteratepath(path): try: - ms.append([ms[-1][0][name],None]) + ms.append([ms[-1][0][name], None]) except KeyError: raise KeyError(path) else: @@ -485,19 +482,19 @@ class PathMap(object): del ms[-1] del ms[-1][0][ms[-1][1]] - def get(self,path,default=None): + def get(self, path, default=None): """Get the value stored under the given path, or the given default.""" try: return self[path] except KeyError: return default - def pop(self,path,default=None): + def pop(self, path, default=None): """Pop the value stored under the given path, or the given default.""" - ms = [[self._map,None]] + ms = [[self._map, None]] for name in iteratepath(path): try: - ms.append([ms[-1][0][name],None]) + ms.append([ms[-1][0][name], None]) except KeyError: return default else: @@ -512,16 +509,16 @@ class PathMap(object): del ms[-1][0][ms[-1][1]] return val - def setdefault(self,path,value): + def setdefault(self, path, value): m = self._map for name in iteratepath(path): try: m = m[name] except KeyError: - m = m.setdefault(name,{}) - return m.setdefault("",value) + m = m.setdefault(name, {}) + return m.setdefault("", value) - def clear(self,root="/"): + def clear(self, root="/"): """Clear all entries beginning with the given root path.""" m = self._map for name in iteratepath(root): @@ -531,7 +528,7 @@ class PathMap(object): return m.clear() - def iterkeys(self,root="/",m=None): + def iterkeys(self, root="/", m=None): """Iterate over all keys beginning with the given root path.""" if m is None: m = self._map @@ -540,12 +537,12 @@ class PathMap(object): m = m[name] except KeyError: return - for (nm,subm) in m.iteritems(): + for (nm, subm) in m.iteritems(): if not nm: yield abspath(root) else: - k = pathcombine(root,nm) - for subk in self.iterkeys(k,subm): + k = pathcombine(root, nm) + for subk in self.iterkeys(k, subm): yield subk def __iter__(self): @@ -554,7 +551,7 @@ class PathMap(object): def keys(self,root="/"): return list(self.iterkeys(root)) - def itervalues(self,root="/",m=None): + def itervalues(self, root="/", m=None): """Iterate over all values whose keys begin with the given root path.""" root = normpath(root) if m is None: @@ -564,18 +561,18 @@ class PathMap(object): m = m[name] except KeyError: return - for (nm,subm) in m.iteritems(): + for (nm, subm) in m.iteritems(): if not nm: yield subm else: - k = pathcombine(root,nm) - for subv in self.itervalues(k,subm): + k = pathcombine(root, nm) + for subv in self.itervalues(k, subm): yield subv - def values(self,root="/"): + def values(self, root="/"): return list(self.itervalues(root)) - def iteritems(self,root="/",m=None): + def iteritems(self, root="/", m=None): """Iterate over all (key,value) pairs beginning with the given root.""" root = normpath(root) if m is None: @@ -585,18 +582,18 @@ class PathMap(object): m = m[name] except KeyError: return - for (nm,subm) in m.iteritems(): + for (nm, subm) in m.iteritems(): if not nm: - yield (abspath(normpath(root)),subm) + yield (abspath(normpath(root)), subm) else: - k = pathcombine(root,nm) - for (subk,subv) in self.iteritems(k,subm): - yield (subk,subv) + k = pathcombine(root, nm) + for (subk, subv) in self.iteritems(k, subm): + yield (subk, subv) - def items(self,root="/"): + def items(self, root="/"): return list(self.iteritems(root)) - def iternames(self,root="/"): + def iternames(self, root="/"): """Iterate over all names beneath the given root path. This is basically the equivalent of listdir() for a PathMap - it yields @@ -608,15 +605,17 @@ class PathMap(object): m = m[name] except KeyError: return - for (nm,subm) in m.iteritems(): + for (nm, subm) in m.iteritems(): if nm and subm: yield nm - def names(self,root="/"): + def names(self, root="/"): return list(self.iternames(root)) _wild_chars = frozenset('*?[]!{}') + + def iswildcard(path): """Check if a path ends with a wildcard @@ -627,8 +626,7 @@ def iswildcard(path): """ assert path is not None - base_chars = frozenset(basename(path)) - return bool(base_chars.intersection(_wild_chars)) + return not _wild_chars.isdisjoint(path) if __name__ == "__main__": print recursepath('a/b/c') diff --git a/fs/tempfs.py b/fs/tempfs.py index 4c63e56..8b5ec03 100644 --- a/fs/tempfs.py +++ b/fs/tempfs.py @@ -10,6 +10,7 @@ import os import os.path import time import tempfile +import platform from fs.osfs import OSFS from fs.errors import * @@ -20,7 +21,7 @@ class TempFS(OSFS): """Create a Filesystem in a temporary directory (with tempfile.mkdtemp), and removes it when the TempFS object is cleaned up.""" - + _meta = { 'thread_safe' : True, 'virtual' : False, 'read_only' : False, @@ -32,9 +33,14 @@ class TempFS(OSFS): 'atomic.copy' : True, 'atomic.makedir' : True, 'atomic.rename' : True, - 'atomic.setcontents' : False + 'atomic.setcontents' : False } + if platform.system() == 'Windows': + _meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|' + else: + _meta["invalid_path_chars"] = '\0' + def __init__(self, identifier=None, temp_dir=None, dir_mode=0700, thread_synchronize=_thread_synchronize_default): """Creates a temporary Filesystem @@ -56,21 +62,21 @@ class TempFS(OSFS): def __unicode__(self): return u'' % self._temp_dir - + def __getstate__(self): # If we are picking a TempFS, we want to preserve its contents, # so we *don't* do the clean state = super(TempFS, self).__getstate__() self._cleaned = True return state - - def __setstate__(self, state): - state = super(TempFS, self).__setstate__(state) - self._cleaned = False - #self._temp_dir = tempfile.mkdtemp(self.identifier or "TempFS", dir=self.temp_dir) + + def __setstate__(self, state): + state = super(TempFS, self).__setstate__(state) + self._cleaned = False + #self._temp_dir = tempfile.mkdtemp(self.identifier or "TempFS", dir=self.temp_dir) #super(TempFS, self).__init__(self._temp_dir, # dir_mode=self.dir_mode, - # thread_synchronize=self.thread_synchronize) + # thread_synchronize=self.thread_synchronize) def close(self): """Removes the temporary directory. diff --git a/fs/tests/__init__.py b/fs/tests/__init__.py index bbfeff1..73a6795 100644 --- a/fs/tests/__init__.py +++ b/fs/tests/__init__.py @@ -41,7 +41,7 @@ class FSTestCases(object): To apply the tests to your own FS implementation, simply use FSTestCase as a mixin for your own unittest.TestCase subclass and have the setUp method set self.fs to an instance of your FS implementation. - + NB. The Filesystem being tested must have a capacity of at least 3MB. This class is designed as a mixin so that it's not detected by test @@ -52,10 +52,19 @@ class FSTestCases(object): """Check that a file exists within self.fs""" return self.fs.exists(p) + def test_invalid_chars(self): + """Check paths validate ok""" + # Will have to be overriden selectively for custom validepath methods + self.assertEqual(self.fs.validatepath(''), None) + self.assertEqual(self.fs.validatepath('.foo'), None) + self.assertEqual(self.fs.validatepath('foo'), None) + self.assertEqual(self.fs.validatepath('foo/bar'), None) + self.assert_(self.fs.isvalidpath('foo/bar')) + def test_meta(self): """Checks getmeta / hasmeta are functioning""" # getmeta / hasmeta are hard to test, since there is no way to validate - # the implementations response + # the implementation's response meta_names = ["read_only", "network", "unicode_paths"] @@ -70,7 +79,7 @@ class FSTestCases(object): self.assertTrue(self.fs.hasmeta(meta_name)) except NoMetaError: self.assertFalse(self.fs.hasmeta(meta_name)) - + def test_root_dir(self): self.assertTrue(self.fs.isdir("")) @@ -108,7 +117,7 @@ class FSTestCases(object): else: f.close() assert False, "ResourceInvalidError was not raised" - + def test_writefile(self): self.assertRaises(ResourceNotFoundError,self.fs.open,"test1.txt") f = self.fs.open("test1.txt","wb") @@ -152,7 +161,7 @@ class FSTestCases(object): self.assertEquals(self.fs.getcontents("hello", "rb"), b("world")) # ...and a file-like object self.fs.setcontents_async("hello", StringIO(b("to you, good sir!")), chunk_size=2).wait() - self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!")) + self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!")) def test_isdir_isfile(self): self.assertFalse(self.fs.exists("dir1")) @@ -236,7 +245,7 @@ class FSTestCases(object): for (nm,info) in items: self.assertTrue(isinstance(nm,unicode)) def check_equal(items,target): - names = [nm for (nm,info) in items] + names = [nm for (nm,info) in items] self.assertEqual(sorted(names),sorted(target)) self.fs.setcontents(u"a", b('')) self.fs.setcontents("b", b('')) @@ -318,7 +327,7 @@ class FSTestCases(object): if "c" in files: found_c = True if "a.txt" in files: - break + break assert found_c, "depth search order was wrong: " + str(list(self.fs.walk(search="depth"))) def test_walk_wildcard(self): @@ -730,18 +739,18 @@ class FSTestCases(object): f.truncate() checkcontents("hello",b("12345")) - def test_truncate_to_larger_size(self): - with self.fs.open("hello","wb") as f: + def test_truncate_to_larger_size(self): + with self.fs.open("hello","wb") as f: f.truncate(30) - + self.assertEquals(self.fs.getsize("hello"), 30) - + # Some file systems (FTPFS) don't support both reading and writing if self.fs.getmeta('file.read_and_write', True): with self.fs.open("hello","rb+") as f: f.seek(25) f.write(b("123456")) - + with self.fs.open("hello","rb") as f: f.seek(25) self.assertEquals(f.read(),b("123456")) @@ -788,10 +797,10 @@ class FSTestCases(object): else: # Just make sure it doesn't throw an exception fs2 = pickle.loads(pickle.dumps(self.fs)) - + def test_big_file(self): - """Test handling of a big file (1MB)""" + """Test handling of a big file (1MB)""" chunk_size = 1024 * 256 num_chunks = 4 def chunk_stream(): @@ -821,19 +830,19 @@ class FSTestCases(object): finally: f.close() - def test_settimes(self): + def test_settimes(self): def cmp_datetimes(d1, d2): """Test datetime objects are the same to within the timestamp accuracy""" dts1 = time.mktime(d1.timetuple()) dts2 = time.mktime(d2.timetuple()) - return int(dts1) == int(dts2) + return int(dts1) == int(dts2) d1 = datetime.datetime(2010, 6, 20, 11, 0, 9, 987699) - d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000) - self.fs.setcontents('/dates.txt', b('check dates')) + d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000) + self.fs.setcontents('/dates.txt', b('check dates')) # If the implementation supports settimes, check that the times # can be set and then retrieved try: - self.fs.settimes('/dates.txt', d1, d2) + self.fs.settimes('/dates.txt', d1, d2) except UnsupportedError: pass else: @@ -847,7 +856,7 @@ class FSTestCases(object): # May be disabled - see end of file class ThreadingTestCases(object): """Testcases for thread-safety of FS implementations.""" - + # These are either too slow to be worth repeating, # or cannot possibly break cross-thread. _dont_retest = ("test_pickling","test_multiple_overwrite",) @@ -1026,7 +1035,7 @@ class ThreadingTestCases(object): self.fs.copydir("a","copy of a") def copydir_overwrite(): self._yield() - self.fs.copydir("a","copy of a",overwrite=True) + self.fs.copydir("a","copy of a",overwrite=True) # This should error out since we're not overwriting self.assertRaises(DestinationExistsError,self._runThreads,copydir,copydir) # This should run to completion and give a valid state, unless @@ -1059,4 +1068,4 @@ class ThreadingTestCases(object): # Uncomment to temporarily disable threading tests #class ThreadingTestCases(object): -# _dont_retest = () +# _dont_retest = () diff --git a/fs/tests/test_fs.py b/fs/tests/test_fs.py index 6414b2d..dd6c4de 100644 --- a/fs/tests/test_fs.py +++ b/fs/tests/test_fs.py @@ -31,7 +31,16 @@ class TestOSFS(unittest.TestCase,FSTestCases,ThreadingTestCases): return os.path.exists(os.path.join(self.temp_dir, relpath(p))) def test_invalid_chars(self): + self.assertEqual(self.fs.validatepath(''), None) + self.assertEqual(self.fs.validatepath('.foo'), None) + self.assertEqual(self.fs.validatepath('foo'), None) + self.assertEqual(self.fs.validatepath('foo/bar'), None) + self.assert_(self.fs.isvalidpath('foo/bar')) + self.assertRaises(errors.InvalidCharsInPathError, self.fs.open, 'invalid\0file', 'wb') + self.assertFalse(self.fs.isvalidpath('invalid\0file')) + self.assert_(self.fs.isvalidpath('validfile')) + self.assert_(self.fs.isvalidpath('completely_valid/path/foo.bar')) class TestSubFS(unittest.TestCase,FSTestCases,ThreadingTestCases): diff --git a/fs/tests/test_path.py b/fs/tests/test_path.py index 40ec742..b4e062f 100644 --- a/fs/tests/test_path.py +++ b/fs/tests/test_path.py @@ -138,6 +138,15 @@ class TestPathFunctions(unittest.TestCase): for path, test_basename in tests: self.assertEqual(basename(path), test_basename) + def test_iswildcard(self): + self.assert_(iswildcard('*')) + self.assert_(iswildcard('*.jpg')) + self.assert_(iswildcard('foo/*')) + self.assert_(iswildcard('foo/{}')) + self.assertFalse(iswildcard('foo')) + self.assertFalse(iswildcard('img.jpg')) + self.assertFalse(iswildcard('foo/bar')) + class Test_PathMap(unittest.TestCase): diff --git a/fs/wrapfs/__init__.py b/fs/wrapfs/__init__.py index df89f5b..e69d412 100644 --- a/fs/wrapfs/__init__.py +++ b/fs/wrapfs/__init__.py @@ -49,7 +49,7 @@ class WrapFS(FS): and/or contents of files in an FS. It could be used to implement e.g. compression or encryption in a relatively painless manner. - The following methods can be overridden to control how files are + The following methods can be overridden to control how files are accessed in the underlying FS object: * _file_wrap(file, mode): called for each file that is opened from @@ -66,10 +66,10 @@ class WrapFS(FS): """ def __init__(self, fs): - super(WrapFS, self).__init__() + super(WrapFS, self).__init__() try: self._lock = fs._lock - except (AttributeError,FSError): + except (AttributeError,FSError): self._lock = self._lock = threading.RLock() self.wrapped_fs = fs @@ -116,7 +116,7 @@ class WrapFS(FS): transparent file compression - in this case files from the wrapped FS cannot be opened in append mode. """ - return (mode,mode) + return (mode, mode) def __unicode__(self): return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,) @@ -128,18 +128,22 @@ class WrapFS(FS): @rewrite_errors def getmeta(self, meta_name, default=NoDefaultMeta): return self.wrapped_fs.getmeta(meta_name, default) - + @rewrite_errors def hasmeta(self, meta_name): return self.wrapped_fs.hasmeta(meta_name) + @rewrite_errors + def validatepath(self, path): + return self.wrapped_fs.validatepath(self._encode(path)) + @rewrite_errors def getsyspath(self, path, allow_none=False): - return self.wrapped_fs.getsyspath(self._encode(path),allow_none) + return self.wrapped_fs.getsyspath(self._encode(path), allow_none) @rewrite_errors def getpathurl(self, path, allow_none=False): - return self.wrapped_fs.getpathurl(self._encode(path),allow_none) + return self.wrapped_fs.getpathurl(self._encode(path), allow_none) @rewrite_errors def hassyspath(self, path): @@ -154,9 +158,9 @@ class WrapFS(FS): @rewrite_errors def setcontents(self, path, data, chunk_size=64*1024): # We can't pass setcontents() through to the wrapped FS if the - # wrapper has defined a _file_wrap method, as it would bypass + # wrapper has defined a _file_wrap method, as it would bypass # the file contents wrapping. - #if self._file_wrap.im_func is WrapFS._file_wrap.im_func: + #if self._file_wrap.im_func is WrapFS._file_wrap.im_func: if getattr(self.__class__, '_file_wrap', None) is getattr(WrapFS, '_file_wrap', None): return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size) else: @@ -184,7 +188,7 @@ class WrapFS(FS): full=full, absolute=absolute, dirs_only=dirs_only, - files_only=files_only) + files_only=files_only) full = kwds.pop("full",False) absolute = kwds.pop("absolute",False) wildcard = kwds.pop("wildcard",None) @@ -192,7 +196,7 @@ class WrapFS(FS): wildcard = lambda fn:True elif not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) entries = [] enc_path = self._encode(path) for e in self.wrapped_fs.listdir(enc_path,**kwds): @@ -203,7 +207,7 @@ class WrapFS(FS): e = pathcombine(path,e) elif absolute: e = abspath(pathcombine(path,e)) - entries.append(e) + entries.append(e) return entries @rewrite_errors @@ -212,7 +216,7 @@ class WrapFS(FS): full=full, absolute=absolute, dirs_only=dirs_only, - files_only=files_only) + files_only=files_only) full = kwds.pop("full",False) absolute = kwds.pop("absolute",False) wildcard = kwds.pop("wildcard",None) @@ -220,7 +224,7 @@ class WrapFS(FS): wildcard = lambda fn:True elif not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) enc_path = self._encode(path) for e in self.wrapped_fs.ilistdir(enc_path,**kwds): e = basename(self._decode(pathcombine(enc_path,e))) @@ -238,7 +242,7 @@ class WrapFS(FS): full=full, absolute=absolute, dirs_only=dirs_only, - files_only=files_only) + files_only=files_only) full = kwds.pop("full",False) absolute = kwds.pop("absolute",False) wildcard = kwds.pop("wildcard",None) @@ -246,7 +250,7 @@ class WrapFS(FS): wildcard = lambda fn:True elif not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) entries = [] enc_path = self._encode(path) for (nm,info) in self.wrapped_fs.listdirinfo(enc_path,**kwds): @@ -274,7 +278,7 @@ class WrapFS(FS): wildcard = lambda fn:True elif not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) enc_path = self._encode(path) for (nm,info) in self.wrapped_fs.ilistdirinfo(enc_path,**kwds): nm = basename(self._decode(pathcombine(enc_path,nm))) @@ -299,7 +303,7 @@ class WrapFS(FS): else: if wildcard is not None and not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) for (dirpath,filepaths) in self.wrapped_fs.walk(self._encode(path),search=search,ignore_errors=ignore_errors): filepaths = [basename(self._decode(pathcombine(dirpath,p))) for p in filepaths] @@ -321,7 +325,7 @@ class WrapFS(FS): else: if wildcard is not None and not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) - wildcard = lambda fn:bool (wildcard_re.match(fn)) + wildcard = lambda fn:bool (wildcard_re.match(fn)) for filepath in self.wrapped_fs.walkfiles(self._encode(path),search=search,ignore_errors=ignore_errors): filepath = abspath(self._decode(filepath)) if wildcard is not None: @@ -469,7 +473,7 @@ def wrap_fs_methods(decorator, cls=None, exclude=[]): wrap_fs_methods.method_names = ["open","exists","isdir","isfile","listdir", "makedir","remove","setcontents","removedir","rename","getinfo","copy", "move","copydir","movedir","close","getxattr","setxattr","delxattr", - "listxattrs","getsyspath","createfile", "hasmeta", "getmeta","listdirinfo", + "listxattrs","validatepath","getsyspath","createfile", "hasmeta", "getmeta","listdirinfo", "ilistdir","ilistdirinfo"] -- cgit v1.2.1