Add static typing
Showing 10 of 20 files from the diff.
wcmatch/_wcmatch.py
changed.
wcmatch/util.py
changed.
wcmatch/__init__.py
changed.
wcmatch/_wcparse.py
changed.
wcmatch/fnmatch.py
changed.
wcmatch/glob.py
changed.
wcmatch/posix.py
changed.
wcmatch/__meta__.py
changed.
wcmatch/wcmatch.py
changed.
wcmatch/pathlib.py
changed.
Other files ignored by Codecov
docs/src/markdown/about/changelog.md
has changed.
tox.ini
has changed.
requirements/test.txt
has changed.
tests/test_glob.py
has changed.
.pyspelling.yml
has changed.
tests/test_wcparse.py
has changed.
.github/workflows/build.yml
has changed.
tests/test_globmatch.py
has changed.
setup.py
has changed.
tests/test_fnmatch.py
has changed.
@@ -4,6 +4,7 @@
Loading
4 | 4 | import stat |
|
5 | 5 | import copyreg |
|
6 | 6 | from . import util |
|
7 | + | from typing import Pattern, Tuple, AnyStr, Optional, Generic, Any, Dict, cast |
|
7 | 8 | ||
8 | 9 | # `O_DIRECTORY` may not always be defined |
|
9 | 10 | DIR_FLAGS = os.O_RDONLY | getattr(os, 'O_DIRECTORY', 0) |
@@ -21,22 +22,39 @@
Loading
21 | 22 | ) |
|
22 | 23 | ||
23 | 24 | ||
24 | - | class _Match: |
|
25 | + | class _Match(Generic[AnyStr]): |
|
25 | 26 | """Match the given pattern.""" |
|
26 | 27 | ||
27 | - | def __init__(self, filename, include, exclude, real, path, follow): |
|
28 | + | def __init__( |
|
29 | + | self, |
|
30 | + | filename: AnyStr, |
|
31 | + | include: Tuple[Pattern[AnyStr], ...], |
|
32 | + | exclude: Optional[Tuple[Pattern[AnyStr], ...]], |
|
33 | + | real: bool, |
|
34 | + | path: bool, |
|
35 | + | follow: bool |
|
36 | + | ) -> None: |
|
28 | 37 | """Initialize.""" |
|
29 | 38 | ||
30 | - | self.filename = filename |
|
31 | - | self.include = include |
|
32 | - | self.exclude = exclude |
|
39 | + | self.filename = filename # type: AnyStr |
|
40 | + | self.include = include # type: Tuple[Pattern[AnyStr], ...] |
|
41 | + | self.exclude = exclude # type: Optional[Tuple[Pattern[AnyStr], ...]] |
|
33 | 42 | self.real = real |
|
34 | 43 | self.path = path |
|
35 | 44 | self.follow = follow |
|
36 | - | self.is_bytes = isinstance(self.filename, bytes) |
|
37 | - | self.ptype = util.BYTES if self.is_bytes else util.UNICODE |
|
38 | - | ||
39 | - | def _fs_match(self, pattern, filename, is_dir, sep, follow, symlinks, root, dir_fd): |
|
45 | + | self.ptype = util.BYTES if isinstance(self.filename, bytes) else util.UNICODE |
|
46 | + | ||
47 | + | def _fs_match( |
|
48 | + | self, |
|
49 | + | pattern: Pattern[AnyStr], |
|
50 | + | filename: AnyStr, |
|
51 | + | is_dir: bool, |
|
52 | + | sep: AnyStr, |
|
53 | + | follow: bool, |
|
54 | + | symlinks: Dict[Tuple[Optional[int], AnyStr], bool], |
|
55 | + | root: AnyStr, |
|
56 | + | dir_fd: Optional[int] |
|
57 | + | ) -> bool: |
|
40 | 58 | """ |
|
41 | 59 | Match path against the pattern. |
|
42 | 60 |
@@ -98,12 +116,19 @@
Loading
98 | 116 | matched = False |
|
99 | 117 | return matched |
|
100 | 118 | ||
101 | - | def _match_real(self, symlinks, root, dir_fd): |
|
119 | + | def _match_real( |
|
120 | + | self, |
|
121 | + | symlinks: Dict[Tuple[Optional[int], AnyStr], bool], |
|
122 | + | root: AnyStr, |
|
123 | + | dir_fd: Optional[int] |
|
124 | + | ) -> bool: |
|
102 | 125 | """Match real filename includes and excludes.""" |
|
103 | 126 | ||
104 | - | sep = '\\' if util.platform() == "windows" else '/' |
|
127 | + | temp = '\\' if util.platform() == "windows" else '/' |
|
105 | 128 | if isinstance(self.filename, bytes): |
|
106 | - | sep = os.fsencode(sep) |
|
129 | + | sep = os.fsencode(temp) |
|
130 | + | else: |
|
131 | + | sep = temp |
|
107 | 132 | ||
108 | 133 | is_dir = self.filename.endswith(sep) |
|
109 | 134 | try: |
@@ -140,11 +165,14 @@
Loading
140 | 165 | ||
141 | 166 | return matched |
|
142 | 167 | ||
143 | - | def match(self, root_dir=None, dir_fd=None): |
|
168 | + | def match(self, root_dir: Optional[AnyStr] = None, dir_fd: Optional[int] = None) -> bool: |
|
144 | 169 | """Match.""" |
|
145 | 170 | ||
146 | 171 | if self.real: |
|
147 | - | root = root_dir if root_dir else (b'.' if self.is_bytes else '.') |
|
172 | + | if isinstance(self.filename, bytes): |
|
173 | + | root = root_dir if root_dir is not None else b'.' # type: AnyStr |
|
174 | + | else: |
|
175 | + | root = root_dir if root_dir is not None else '.' |
|
148 | 176 | ||
149 | 177 | if dir_fd is not None and not SUPPORT_DIR_FD: |
|
150 | 178 | dir_fd = None |
@@ -163,9 +191,8 @@
Loading
163 | 191 | ) |
|
164 | 192 | ) |
|
165 | 193 | ||
166 | - | is_abs = ( |
|
167 | - | RE_WIN_MOUNT if util.platform() == "windows" else RE_MOUNT |
|
168 | - | )[self.ptype].match(self.filename) is not None |
|
194 | + | re_mount = cast(Pattern[AnyStr], (RE_WIN_MOUNT if util.platform() == "windows" else RE_MOUNT)[self.ptype]) |
|
195 | + | is_abs = re_mount.match(self.filename) is not None |
|
169 | 196 | ||
170 | 197 | if is_abs: |
|
171 | 198 | exists = os.path.lexists(self.filename) |
@@ -180,7 +207,7 @@
Loading
180 | 207 | exists = True |
|
181 | 208 | ||
182 | 209 | if exists: |
|
183 | - | symlinks = {} |
|
210 | + | symlinks = {} # type: Dict[Tuple[Optional[int], AnyStr], bool] |
|
184 | 211 | return self._match_real(symlinks, root, dir_fd) |
|
185 | 212 | else: |
|
186 | 213 | return False |
@@ -201,15 +228,29 @@
Loading
201 | 228 | return matched |
|
202 | 229 | ||
203 | 230 | ||
204 | - | class WcRegexp(util.Immutable): |
|
231 | + | class WcRegexp(util.Immutable, Generic[AnyStr]): |
|
205 | 232 | """File name match object.""" |
|
206 | 233 | ||
234 | + | _include: Tuple[Pattern[AnyStr], ...] |
|
235 | + | _exclude: Optional[Tuple[Pattern[AnyStr], ...]] |
|
236 | + | _real: bool |
|
237 | + | _path: bool |
|
238 | + | _follow: bool |
|
239 | + | _hash: int |
|
240 | + | ||
207 | 241 | __slots__ = ("_include", "_exclude", "_real", "_path", "_follow", "_hash") |
|
208 | 242 | ||
209 | - | def __init__(self, include, exclude=None, real=False, path=False, follow=False): |
|
243 | + | def __init__( |
|
244 | + | self, |
|
245 | + | include: Tuple[Pattern[AnyStr], ...], |
|
246 | + | exclude: Optional[Tuple[Pattern[AnyStr], ...]] = None, |
|
247 | + | real: bool = False, |
|
248 | + | path: bool = False, |
|
249 | + | follow: bool = False |
|
250 | + | ): |
|
210 | 251 | """Initialization.""" |
|
211 | 252 | ||
212 | - | super(WcRegexp, self).__init__( |
|
253 | + | super().__init__( |
|
213 | 254 | _include=include, |
|
214 | 255 | _exclude=exclude, |
|
215 | 256 | _real=real, |
@@ -227,17 +268,17 @@
Loading
227 | 268 | ) |
|
228 | 269 | ) |
|
229 | 270 | ||
230 | - | def __hash__(self): |
|
271 | + | def __hash__(self) -> int: |
|
231 | 272 | """Hash.""" |
|
232 | 273 | ||
233 | 274 | return self._hash |
|
234 | 275 | ||
235 | - | def __len__(self): |
|
276 | + | def __len__(self) -> int: |
|
236 | 277 | """Length.""" |
|
237 | 278 | ||
238 | 279 | return len(self._include) + (len(self._exclude) if self._exclude is not None else 0) |
|
239 | 280 | ||
240 | - | def __eq__(self, other): |
|
281 | + | def __eq__(self, other: Any) -> bool: |
|
241 | 282 | """Equal.""" |
|
242 | 283 | ||
243 | 284 | return ( |
@@ -249,7 +290,7 @@
Loading
249 | 290 | self._follow == other._follow |
|
250 | 291 | ) |
|
251 | 292 | ||
252 | - | def __ne__(self, other): |
|
293 | + | def __ne__(self, other: Any) -> bool: |
|
253 | 294 | """Equal.""" |
|
254 | 295 | ||
255 | 296 | return ( |
@@ -261,7 +302,7 @@
Loading
261 | 302 | self._follow != other._follow |
|
262 | 303 | ) |
|
263 | 304 | ||
264 | - | def match(self, filename, root_dir=None, dir_fd=None): |
|
305 | + | def match(self, filename: AnyStr, root_dir: Optional[AnyStr] = None, dir_fd: Optional[int] = None) -> bool: |
|
265 | 306 | """Match filename.""" |
|
266 | 307 | ||
267 | 308 | return _Match( |
@@ -277,7 +318,7 @@
Loading
277 | 318 | ) |
|
278 | 319 | ||
279 | 320 | ||
280 | - | def _pickle(p): |
|
321 | + | def _pickle(p): # type: ignore[no-untyped-def] |
|
281 | 322 | return WcRegexp, (p._include, p._exclude, p._real, p._path, p._follow) |
|
282 | 323 | ||
283 | 324 |
@@ -7,6 +7,7 @@
Loading
7 | 7 | import unicodedata |
|
8 | 8 | from functools import wraps |
|
9 | 9 | import warnings |
|
10 | + | from typing import Any, Callable, Tuple, AnyStr, Match, Pattern, Optional, cast |
|
10 | 11 | ||
11 | 12 | PY37 = (3, 7) <= sys.version_info |
|
12 | 13 | PY310 = (3, 10) <= sys.version_info |
@@ -64,25 +65,19 @@
Loading
64 | 65 | _PLATFORM = "linux" |
|
65 | 66 | ||
66 | 67 | ||
67 | - | def platform(): |
|
68 | + | def platform() -> str: |
|
68 | 69 | """Get platform.""" |
|
69 | 70 | ||
70 | 71 | return _PLATFORM |
|
71 | 72 | ||
72 | 73 | ||
73 | - | def is_case_sensitive(): |
|
74 | + | def is_case_sensitive() -> bool: |
|
74 | 75 | """Check if case sensitive.""" |
|
75 | 76 | ||
76 | 77 | return CASE_FS |
|
77 | 78 | ||
78 | 79 | ||
79 | - | def to_tuple(values): |
|
80 | - | """Combine values.""" |
|
81 | - | ||
82 | - | return (values,) if isinstance(values, (str, bytes)) else tuple(values) |
|
83 | - | ||
84 | - | ||
85 | - | def norm_pattern(pattern, normalize, is_raw_chars, ignore_escape=False): |
|
80 | + | def norm_pattern(pattern: AnyStr, normalize: Optional[bool], is_raw_chars: bool, ignore_escape: bool = False) -> AnyStr: |
|
86 | 81 | r""" |
|
87 | 82 | Normalize pattern. |
|
88 | 83 |
@@ -92,59 +87,68 @@
Loading
92 | 87 | - If `normalize` is enabled, take care to convert \/ to \\\\. |
|
93 | 88 | """ |
|
94 | 89 | ||
95 | - | is_bytes = isinstance(pattern, bytes) |
|
90 | + | if isinstance(pattern, bytes): |
|
91 | + | is_bytes = True |
|
92 | + | slash = b'\\' |
|
93 | + | multi_slash = slash * 4 |
|
94 | + | pat = RE_BNORM |
|
95 | + | else: |
|
96 | + | is_bytes = False |
|
97 | + | slash = '\\' |
|
98 | + | multi_slash = slash * 4 |
|
99 | + | pat = RE_NORM |
|
96 | 100 | ||
97 | 101 | if not normalize and not is_raw_chars and not ignore_escape: |
|
98 | 102 | return pattern |
|
99 | 103 | ||
100 | - | def norm(m): |
|
104 | + | def norm(m: Match[AnyStr]) -> AnyStr: |
|
101 | 105 | """Normalize the pattern.""" |
|
102 | 106 | ||
103 | 107 | if m.group(1): |
|
104 | 108 | char = m.group(1) |
|
105 | - | if normalize: |
|
106 | - | char = br'\\\\' if is_bytes else r'\\\\' if len(char) > 1 else char |
|
109 | + | if normalize and len(char) > 1: |
|
110 | + | char = multi_slash |
|
107 | 111 | elif m.group(2): |
|
108 | - | char = BACK_SLASH_TRANSLATION[m.group(2)] if is_raw_chars else m.group(2) |
|
112 | + | char = cast(AnyStr, BACK_SLASH_TRANSLATION[m.group(2)] if is_raw_chars else m.group(2)) |
|
109 | 113 | elif is_raw_chars and m.group(4): |
|
110 | - | char = bytes([int(m.group(4), 8) & 0xFF]) if is_bytes else chr(int(m.group(4), 8)) |
|
114 | + | char = cast(AnyStr, bytes([int(m.group(4), 8) & 0xFF]) if is_bytes else chr(int(m.group(4), 8))) |
|
111 | 115 | elif is_raw_chars and m.group(3): |
|
112 | - | char = bytes([int(m.group(3)[2:], 16)]) if is_bytes else chr(int(m.group(3)[2:], 16)) |
|
116 | + | char = cast(AnyStr, bytes([int(m.group(3)[2:], 16)]) if is_bytes else chr(int(m.group(3)[2:], 16))) |
|
113 | 117 | elif is_raw_chars and not is_bytes and m.group(5): |
|
114 | - | char = unicodedata.lookup(m.group(5)[3:-1]) |
|
118 | + | char = unicodedata.lookup(m.group(5)[3:-1]) # type: ignore[assignment] |
|
115 | 119 | elif not is_raw_chars or m.group(5 if is_bytes else 6): |
|
116 | 120 | char = m.group(0) |
|
117 | 121 | if ignore_escape: |
|
118 | - | char = (b'\\' if is_bytes else '\\') + char |
|
122 | + | char = slash + char |
|
119 | 123 | else: |
|
120 | 124 | value = m.group(6) if is_bytes else m.group(7) |
|
121 | 125 | pos = m.start(6) if is_bytes else m.start(7) |
|
122 | - | raise SyntaxError("Could not convert character value {} at position {:d}".format(value, pos)) |
|
126 | + | raise SyntaxError("Could not convert character value {!r} at position {:d}".format(value, pos)) |
|
123 | 127 | return char |
|
124 | 128 | ||
125 | - | return (RE_BNORM if is_bytes else RE_NORM).sub(norm, pattern) |
|
129 | + | return pat.sub(norm, pattern) |
|
126 | 130 | ||
127 | 131 | ||
128 | - | class StringIter(object): |
|
132 | + | class StringIter: |
|
129 | 133 | """Preprocess replace tokens.""" |
|
130 | 134 | ||
131 | - | def __init__(self, string): |
|
135 | + | def __init__(self, string: str) -> None: |
|
132 | 136 | """Initialize.""" |
|
133 | 137 | ||
134 | 138 | self._string = string |
|
135 | 139 | self._index = 0 |
|
136 | 140 | ||
137 | - | def __iter__(self): |
|
141 | + | def __iter__(self) -> "StringIter": |
|
138 | 142 | """Iterate.""" |
|
139 | 143 | ||
140 | 144 | return self |
|
141 | 145 | ||
142 | - | def __next__(self): |
|
146 | + | def __next__(self) -> str: |
|
143 | 147 | """Python 3 iterator compatible next.""" |
|
144 | 148 | ||
145 | 149 | return self.iternext() |
|
146 | 150 | ||
147 | - | def match(self, pattern): |
|
151 | + | def match(self, pattern: Pattern[str]) -> Optional[Match[str]]: |
|
148 | 152 | """Perform regex match at index.""" |
|
149 | 153 | ||
150 | 154 | m = pattern.match(self._string, self._index) |
@@ -153,22 +157,22 @@
Loading
153 | 157 | return m |
|
154 | 158 | ||
155 | 159 | @property |
|
156 | - | def index(self): |
|
160 | + | def index(self) -> int: |
|
157 | 161 | """Get current index.""" |
|
158 | 162 | ||
159 | 163 | return self._index |
|
160 | 164 | ||
161 | - | def previous(self): # pragma: no cover |
|
165 | + | def previous(self) -> str: # pragma: no cover |
|
162 | 166 | """Get previous char.""" |
|
163 | 167 | ||
164 | 168 | return self._string[self._index - 1] |
|
165 | 169 | ||
166 | - | def advance(self, count): # pragma: no cover |
|
170 | + | def advance(self, count: int) -> None: # pragma: no cover |
|
167 | 171 | """Advanced the index.""" |
|
168 | 172 | ||
169 | 173 | self._index += count |
|
170 | 174 | ||
171 | - | def rewind(self, count): |
|
175 | + | def rewind(self, count: int) -> None: |
|
172 | 176 | """Rewind index.""" |
|
173 | 177 | ||
174 | 178 | if count > self._index: # pragma: no cover |
@@ -176,7 +180,7 @@
Loading
176 | 180 | ||
177 | 181 | self._index -= count |
|
178 | 182 | ||
179 | - | def iternext(self): |
|
183 | + | def iternext(self) -> str: |
|
180 | 184 | """Iterate through characters of the string.""" |
|
181 | 185 | ||
182 | 186 | try: |
@@ -188,24 +192,24 @@
Loading
188 | 192 | return char |
|
189 | 193 | ||
190 | 194 | ||
191 | - | class Immutable(object): |
|
195 | + | class Immutable: |
|
192 | 196 | """Immutable.""" |
|
193 | 197 | ||
194 | - | __slots__ = tuple() |
|
198 | + | __slots__: Tuple[Any, ...] = tuple() |
|
195 | 199 | ||
196 | - | def __init__(self, **kwargs): |
|
200 | + | def __init__(self, **kwargs: Any) -> None: |
|
197 | 201 | """Initialize.""" |
|
198 | 202 | ||
199 | 203 | for k, v in kwargs.items(): |
|
200 | 204 | super(Immutable, self).__setattr__(k, v) |
|
201 | 205 | ||
202 | - | def __setattr__(self, name, value): # pragma: no cover |
|
206 | + | def __setattr__(self, name: str, value: Any) -> None: # pragma: no cover |
|
203 | 207 | """Prevent mutability.""" |
|
204 | 208 | ||
205 | 209 | raise AttributeError('Class is immutable!') |
|
206 | 210 | ||
207 | 211 | ||
208 | - | def is_hidden(path): |
|
212 | + | def is_hidden(path: AnyStr) -> bool: |
|
209 | 213 | """Check if file is hidden.""" |
|
210 | 214 | ||
211 | 215 | hidden = False |
@@ -213,19 +217,19 @@
Loading
213 | 217 | if f[:1] in ('.', b'.'): |
|
214 | 218 | # Count dot file as hidden on all systems |
|
215 | 219 | hidden = True |
|
216 | - | elif _PLATFORM == 'windows': |
|
220 | + | elif sys.platform == 'win32': |
|
217 | 221 | # On Windows, look for `FILE_ATTRIBUTE_HIDDEN` |
|
218 | - | FILE_ATTRIBUTE_HIDDEN = 0x2 |
|
219 | 222 | results = os.lstat(path) |
|
223 | + | FILE_ATTRIBUTE_HIDDEN = 0x2 |
|
220 | 224 | hidden = bool(results.st_file_attributes & FILE_ATTRIBUTE_HIDDEN) |
|
221 | - | elif _PLATFORM == "osx": # pragma: no cover |
|
225 | + | elif sys.platform == "darwin": # pragma: no cover |
|
222 | 226 | # On macOS, look for `UF_HIDDEN` |
|
223 | 227 | results = os.lstat(path) |
|
224 | 228 | hidden = bool(results.st_flags & stat.UF_HIDDEN) |
|
225 | 229 | return hidden |
|
226 | 230 | ||
227 | 231 | ||
228 | - | def deprecated(message, stacklevel=2): # pragma: no cover |
|
232 | + | def deprecated(message: str, stacklevel: int = 2) -> Callable[..., Any]: # pragma: no cover |
|
229 | 233 | """ |
|
230 | 234 | Raise a `DeprecationWarning` when wrapped function/method is called. |
|
231 | 235 |
@@ -236,9 +240,9 @@
Loading
236 | 240 | pass |
|
237 | 241 | """ |
|
238 | 242 | ||
239 | - | def _wrapper(func): |
|
243 | + | def _wrapper(func: Callable[..., Any]) -> Callable[..., Any]: |
|
240 | 244 | @wraps(func) |
|
241 | - | def _deprecated_func(*args, **kwargs): |
|
245 | + | def _deprecated_func(*args: Any, **kwargs: Any) -> Any: |
|
242 | 246 | warnings.warn( |
|
243 | 247 | f"'{func.__name__}' is deprecated. {message}", |
|
244 | 248 | category=DeprecationWarning, |
@@ -249,7 +253,7 @@
Loading
249 | 253 | return _wrapper |
|
250 | 254 | ||
251 | 255 | ||
252 | - | def warn_deprecated(message, stacklevel=2): # pragma: no cover |
|
256 | + | def warn_deprecated(message: str, stacklevel: int = 2) -> None: # pragma: no cover |
|
253 | 257 | """Warn deprecated.""" |
|
254 | 258 | ||
255 | 259 | warnings.warn( |
@@ -27,6 +27,7 @@
Loading
27 | 27 | from . import util |
|
28 | 28 | from . import posix |
|
29 | 29 | from . _wcmatch import WcRegexp |
|
30 | + | from typing import List, Tuple, AnyStr, Iterable, Pattern, Generic, Optional, Set, Sequence, Union, cast |
|
30 | 31 | ||
31 | 32 | UNICODE_RANGE = '\u0000-\U0010ffff' |
|
32 | 33 | ASCII_RANGE = '\x00-\xff' |
@@ -298,7 +299,16 @@
Loading
298 | 299 | """Pattern limit exception.""" |
|
299 | 300 | ||
300 | 301 | ||
301 | - | def escape(pattern, unix=None, pathname=True, raw=False): |
|
302 | + | def to_str_sequence(patterns: Union[str, bytes, Sequence[AnyStr]]) -> Sequence[AnyStr]: |
|
303 | + | """Return a simple string sequence.""" |
|
304 | + | ||
305 | + | if isinstance(patterns, (str, bytes)): |
|
306 | + | return cast(Sequence[AnyStr], [patterns]) |
|
307 | + | else: |
|
308 | + | return patterns |
|
309 | + | ||
310 | + | ||
311 | + | def escape(pattern: AnyStr, unix: Optional[bool] = None, pathname: bool = True, raw: bool = False) -> AnyStr: |
|
302 | 312 | """ |
|
303 | 313 | Escape. |
|
304 | 314 |
@@ -309,17 +319,17 @@
Loading
309 | 319 | """ |
|
310 | 320 | ||
311 | 321 | if isinstance(pattern, bytes): |
|
312 | - | drive_pat = RE_WIN_DRIVE[util.BYTES] |
|
313 | - | magic = RE_MAGIC_ESCAPE[util.BYTES] |
|
314 | - | drive_magic = RE_WIN_DRIVE_MAGIC[util.BYTES] |
|
322 | + | drive_pat = cast(Pattern[AnyStr], RE_WIN_DRIVE[util.BYTES]) |
|
323 | + | magic = cast(Pattern[AnyStr], RE_MAGIC_ESCAPE[util.BYTES]) |
|
324 | + | drive_magic = cast(Pattern[AnyStr], RE_WIN_DRIVE_MAGIC[util.BYTES]) |
|
315 | 325 | replace = br'\\\1' |
|
316 | 326 | slash = b'\\' |
|
317 | 327 | double_slash = b'\\\\' |
|
318 | 328 | drive = b'' |
|
319 | 329 | else: |
|
320 | - | drive_pat = RE_WIN_DRIVE[util.UNICODE] |
|
321 | - | magic = RE_MAGIC_ESCAPE[util.UNICODE] |
|
322 | - | drive_magic = RE_WIN_DRIVE_MAGIC[util.UNICODE] |
|
330 | + | drive_pat = cast(Pattern[AnyStr], RE_WIN_DRIVE[util.UNICODE]) |
|
331 | + | magic = cast(Pattern[AnyStr], RE_MAGIC_ESCAPE[util.UNICODE]) |
|
332 | + | drive_magic = cast(Pattern[AnyStr], RE_WIN_DRIVE_MAGIC[util.UNICODE]) |
|
323 | 333 | replace = r'\\\1' |
|
324 | 334 | slash = '\\' |
|
325 | 335 | double_slash = '\\\\' |
@@ -345,7 +355,11 @@
Loading
345 | 355 | return drive + magic.sub(replace, pattern) |
|
346 | 356 | ||
347 | 357 | ||
348 | - | def _get_win_drive(pattern, regex=False, case_sensitive=False): |
|
358 | + | def _get_win_drive( |
|
359 | + | pattern: str, |
|
360 | + | regex: bool = False, |
|
361 | + | case_sensitive: bool = False |
|
362 | + | ) -> Tuple[bool, Optional[str], bool, int]: |
|
349 | 363 | """Get Windows drive.""" |
|
350 | 364 | ||
351 | 365 | drive = None |
@@ -392,47 +406,56 @@
Loading
392 | 406 | return root_specified, drive, slash, end |
|
393 | 407 | ||
394 | 408 | ||
395 | - | def _get_magic_symbols(ptype, unix, flags): |
|
409 | + | def _get_magic_symbols(pattern: AnyStr, unix: bool, flags: int) -> Tuple[Set[AnyStr], Set[AnyStr]]: |
|
396 | 410 | """Get magic symbols.""" |
|
397 | 411 | ||
398 | - | if ptype == util.BYTES: |
|
399 | - | slash = b'\\' |
|
412 | + | if isinstance(pattern, bytes): |
|
413 | + | ptype = util.BYTES |
|
414 | + | slash = b'\\' # type: AnyStr |
|
400 | 415 | else: |
|
416 | + | ptype = util.UNICODE |
|
401 | 417 | slash = '\\' |
|
402 | 418 | ||
403 | - | magic = set() |
|
404 | - | magic_drive = set() if unix else set(slash) |
|
419 | + | magic = set() # type: Set[AnyStr] |
|
420 | + | if unix: |
|
421 | + | magic_drive = set() # type: Set[AnyStr] |
|
422 | + | else: |
|
423 | + | magic_drive = set([slash]) |
|
405 | 424 | ||
406 | - | magic |= MAGIC_DEF[ptype] |
|
425 | + | magic |= cast(Set[AnyStr], MAGIC_DEF[ptype]) |
|
407 | 426 | if flags & BRACE: |
|
408 | - | magic |= MAGIC_BRACE[ptype] |
|
409 | - | magic_drive |= MAGIC_BRACE[ptype] |
|
427 | + | magic |= cast(Set[AnyStr], MAGIC_BRACE[ptype]) |
|
428 | + | magic_drive |= cast(Set[AnyStr], MAGIC_BRACE[ptype]) |
|
410 | 429 | if flags & SPLIT: |
|
411 | - | magic |= MAGIC_SPLIT[ptype] |
|
412 | - | magic_drive |= MAGIC_SPLIT[ptype] |
|
430 | + | magic |= cast(Set[AnyStr], MAGIC_SPLIT[ptype]) |
|
431 | + | magic_drive |= cast(Set[AnyStr], MAGIC_SPLIT[ptype]) |
|
413 | 432 | if flags & GLOBTILDE: |
|
414 | - | magic |= MAGIC_TILDE[ptype] |
|
433 | + | magic |= cast(Set[AnyStr], MAGIC_TILDE[ptype]) |
|
415 | 434 | if flags & EXTMATCH: |
|
416 | - | magic |= MAGIC_EXTMATCH[ptype] |
|
435 | + | magic |= cast(Set[AnyStr], MAGIC_EXTMATCH[ptype]) |
|
417 | 436 | if flags & NEGATE: |
|
418 | 437 | if flags & MINUSNEGATE: |
|
419 | - | magic |= MAGIC_MINUS_NEGATE[ptype] |
|
438 | + | magic |= cast(Set[AnyStr], MAGIC_MINUS_NEGATE[ptype]) |
|
420 | 439 | else: |
|
421 | - | magic |= MAGIC_NEGATE[ptype] |
|
440 | + | magic |= cast(Set[AnyStr], MAGIC_NEGATE[ptype]) |
|
422 | 441 | ||
423 | 442 | return magic, magic_drive |
|
424 | 443 | ||
425 | 444 | ||
426 | - | def is_magic(pattern, flags=0): |
|
445 | + | def is_magic(pattern: AnyStr, flags: int = 0) -> bool: |
|
427 | 446 | """Check if pattern is magic.""" |
|
428 | 447 | ||
429 | 448 | magical = False |
|
430 | 449 | unix = is_unix_style(flags) |
|
431 | 450 | ||
432 | - | ptype = util.BYTES if isinstance(pattern, bytes) else util.UNICODE |
|
433 | - | drive_pat = RE_WIN_DRIVE[ptype] |
|
451 | + | if isinstance(pattern, bytes): |
|
452 | + | ptype = util.BYTES |
|
453 | + | else: |
|
454 | + | ptype = util.UNICODE |
|
455 | + | ||
456 | + | drive_pat = cast(Pattern[AnyStr], RE_WIN_DRIVE[ptype]) |
|
434 | 457 | ||
435 | - | magic, magic_drive = _get_magic_symbols(ptype, unix, flags) |
|
458 | + | magic, magic_drive = _get_magic_symbols(pattern, unix, flags) |
|
436 | 459 | is_path = flags & PATHNAME |
|
437 | 460 | ||
438 | 461 | length = 0 |
@@ -456,18 +479,18 @@
Loading
456 | 479 | return magical |
|
457 | 480 | ||
458 | 481 | ||
459 | - | def is_negative(pattern, flags): |
|
482 | + | def is_negative(pattern: AnyStr, flags: int) -> bool: |
|
460 | 483 | """Check if negative pattern.""" |
|
461 | 484 | ||
462 | 485 | if flags & MINUSNEGATE: |
|
463 | - | return flags & NEGATE and pattern[0:1] in MINUS_NEGATIVE_SYM |
|
486 | + | return bool(flags & NEGATE and pattern[0:1] in MINUS_NEGATIVE_SYM) |
|
464 | 487 | elif flags & EXTMATCH: |
|
465 | - | return flags & NEGATE and pattern[0:1] in NEGATIVE_SYM and pattern[1:2] not in ROUND_BRACKET |
|
488 | + | return bool(flags & NEGATE and pattern[0:1] in NEGATIVE_SYM and pattern[1:2] not in ROUND_BRACKET) |
|
466 | 489 | else: |
|
467 | - | return flags & NEGATE and pattern[0:1] in NEGATIVE_SYM |
|
490 | + | return bool(flags & NEGATE and pattern[0:1] in NEGATIVE_SYM) |
|
468 | 491 | ||
469 | 492 | ||
470 | - | def tilde_pos(pattern, flags): |
|
493 | + | def tilde_pos(pattern: AnyStr, flags: int) -> int: |
|
471 | 494 | """Is user folder.""" |
|
472 | 495 | ||
473 | 496 | pos = -1 |
@@ -482,7 +505,7 @@
Loading
482 | 505 | return pos |
|
483 | 506 | ||
484 | 507 | ||
485 | - | def expand_braces(patterns, flags, limit): |
|
508 | + | def expand_braces(patterns: AnyStr, flags: int, limit: int) -> Iterable[AnyStr]: |
|
486 | 509 | """Expand braces.""" |
|
487 | 510 | ||
488 | 511 | if flags & BRACE: |
@@ -502,24 +525,24 @@
Loading
502 | 525 | yield p |
|
503 | 526 | ||
504 | 527 | ||
505 | - | def expand_tilde(pattern, is_unix, flags): |
|
528 | + | def expand_tilde(pattern: AnyStr, is_unix: bool, flags: int) -> AnyStr: |
|
506 | 529 | """Expand tilde.""" |
|
507 | 530 | ||
508 | 531 | pos = tilde_pos(pattern, flags) |
|
509 | 532 | ||
510 | 533 | if pos > -1: |
|
511 | 534 | string_type = util.BYTES if isinstance(pattern, bytes) else util.UNICODE |
|
512 | - | tilde = TILDE_SYM[string_type] |
|
513 | - | re_tilde = RE_WIN_TILDE[string_type] if not is_unix else RE_TILDE[string_type] |
|
535 | + | tilde = cast(AnyStr, TILDE_SYM[string_type]) |
|
536 | + | re_tilde = cast(Pattern[AnyStr], RE_WIN_TILDE[string_type] if not is_unix else RE_TILDE[string_type]) |
|
514 | 537 | m = re_tilde.match(pattern, pos) |
|
515 | 538 | if m: |
|
516 | 539 | expanded = os.path.expanduser(m.group(0)) |
|
517 | 540 | if not expanded.startswith(tilde) and os.path.exists(expanded): |
|
518 | - | pattern = (pattern[0] if pos else pattern[0:0]) + escape(expanded, is_unix) + pattern[m.end(0):] |
|
541 | + | pattern = (pattern[0:1] if pos else pattern[0:0]) + escape(expanded, is_unix) + pattern[m.end(0):] |
|
519 | 542 | return pattern |
|
520 | 543 | ||
521 | 544 | ||
522 | - | def expand(pattern, flags, limit): |
|
545 | + | def expand(pattern: AnyStr, flags: int, limit: int) -> Iterable[AnyStr]: |
|
523 | 546 | """Expand and normalize.""" |
|
524 | 547 | ||
525 | 548 | for expanded in expand_braces(pattern, flags, limit): |
@@ -527,7 +550,7 @@
Loading
527 | 550 | yield expand_tilde(splitted, is_unix_style(flags), flags) |
|
528 | 551 | ||
529 | 552 | ||
530 | - | def is_case_sensitive(flags): |
|
553 | + | def is_case_sensitive(flags: int) -> bool: |
|
531 | 554 | """Is case sensitive.""" |
|
532 | 555 | ||
533 | 556 | if bool(flags & FORCEWIN): |
@@ -539,7 +562,7 @@
Loading
539 | 562 | return case_sensitive |
|
540 | 563 | ||
541 | 564 | ||
542 | - | def get_case(flags): |
|
565 | + | def get_case(flags: int) -> bool: |
|
543 | 566 | """Parse flags for case sensitivity settings.""" |
|
544 | 567 | ||
545 | 568 | if not bool(flags & CASE_FLAGS): |
@@ -551,13 +574,13 @@
Loading
551 | 574 | return case_sensitive |
|
552 | 575 | ||
553 | 576 | ||
554 | - | def escape_drive(drive, case): |
|
577 | + | def escape_drive(drive: str, case: bool) -> str: |
|
555 | 578 | """Escape drive.""" |
|
556 | 579 | ||
557 | 580 | return '(?i:{})'.format(re.escape(drive)) if case else re.escape(drive) |
|
558 | 581 | ||
559 | 582 | ||
560 | - | def is_unix_style(flags): |
|
583 | + | def is_unix_style(flags: int) -> bool: |
|
561 | 584 | """Check if we should use Unix style.""" |
|
562 | 585 | ||
563 | 586 | return ( |
@@ -569,13 +592,15 @@
Loading
569 | 592 | ) |
|
570 | 593 | ||
571 | 594 | ||
572 | - | def translate(patterns, flags, limit=PATTERN_LIMIT): |
|
595 | + | def translate( |
|
596 | + | patterns: Sequence[AnyStr], |
|
597 | + | flags: int, |
|
598 | + | limit: int = PATTERN_LIMIT |
|
599 | + | ) -> Tuple[List[AnyStr], List[AnyStr]]: |
|
573 | 600 | """Translate patterns.""" |
|
574 | 601 | ||
575 | - | positive = [] |
|
576 | - | negative = [] |
|
577 | - | if isinstance(patterns, (str, bytes)): |
|
578 | - | patterns = [patterns] |
|
602 | + | positive = [] # type: List[AnyStr] |
|
603 | + | negative = [] # type: List[AnyStr] |
|
579 | 604 | ||
580 | 605 | flags = (flags | _TRANSLATE) & FLAG_MASK |
|
581 | 606 | is_unix = is_unix_style(flags) |
@@ -585,7 +610,7 @@
Loading
585 | 610 | current_limit = limit |
|
586 | 611 | total = 0 |
|
587 | 612 | for pattern in patterns: |
|
588 | - | pattern = util.norm_pattern(pattern, not is_unix, flags & RAWCHARS) |
|
613 | + | pattern = util.norm_pattern(pattern, not is_unix, bool(flags & RAWCHARS)) |
|
589 | 614 | count = 0 |
|
590 | 615 | for count, expanded in enumerate(expand(pattern, flags, current_limit), 1): |
|
591 | 616 | total += 1 |
@@ -601,20 +626,22 @@
Loading
601 | 626 | except bracex.ExpansionLimitException: |
|
602 | 627 | raise PatternLimitException("Pattern limit exceeded the limit of {:d}".format(limit)) |
|
603 | 628 | ||
604 | - | if patterns and negative and not positive: |
|
629 | + | if patterns is not None and negative and not positive: |
|
605 | 630 | if flags & NEGATEALL: |
|
606 | 631 | default = b'**' if isinstance(patterns[0], bytes) else '**' |
|
607 | - | positive.append(WcParse(default, flags | (GLOBSTAR if flags & PATHNAME else 0)).parse()) |
|
632 | + | positive.append( |
|
633 | + | WcParse(default, flags | (GLOBSTAR if flags & PATHNAME else 0)).parse() |
|
634 | + | ) |
|
608 | 635 | ||
609 | 636 | if patterns and flags & NODIR: |
|
610 | 637 | index = util.BYTES if isinstance(patterns[0], bytes) else util.UNICODE |
|
611 | - | exclude = _NO_NIX_DIR[index] if is_unix else _NO_WIN_DIR[index] |
|
638 | + | exclude = cast(AnyStr, _NO_NIX_DIR[index] if is_unix else _NO_WIN_DIR[index]) |
|
612 | 639 | negative.append(exclude) |
|
613 | 640 | ||
614 | 641 | return positive, negative |
|
615 | 642 | ||
616 | 643 | ||
617 | - | def split(pattern, flags): |
|
644 | + | def split(pattern: AnyStr, flags: int) -> Iterable[AnyStr]: |
|
618 | 645 | """Split patterns.""" |
|
619 | 646 | ||
620 | 647 | if flags & SPLIT: |
@@ -623,13 +650,15 @@
Loading
623 | 650 | yield pattern |
|
624 | 651 | ||
625 | 652 | ||
626 | - | def compile(patterns, flags, limit=PATTERN_LIMIT): # noqa A001 |
|
653 | + | def compile( # noqa: A001 |
|
654 | + | patterns: Sequence[AnyStr], |
|
655 | + | flags: int, |
|
656 | + | limit: int = PATTERN_LIMIT |
|
657 | + | ) -> WcRegexp[AnyStr]: |
|
627 | 658 | """Compile patterns.""" |
|
628 | 659 | ||
629 | - | positive = [] |
|
630 | - | negative = [] |
|
631 | - | if isinstance(patterns, (str, bytes)): |
|
632 | - | patterns = [patterns] |
|
660 | + | positive = [] # type: List[Pattern[AnyStr]] |
|
661 | + | negative = [] # type: List[Pattern[AnyStr]] |
|
633 | 662 | ||
634 | 663 | is_unix = is_unix_style(flags) |
|
635 | 664 | seen = set() |
@@ -638,7 +667,7 @@
Loading
638 | 667 | current_limit = limit |
|
639 | 668 | total = 0 |
|
640 | 669 | for pattern in patterns: |
|
641 | - | pattern = util.norm_pattern(pattern, not is_unix, flags & RAWCHARS) |
|
670 | + | pattern = util.norm_pattern(pattern, not is_unix, bool(flags & RAWCHARS)) |
|
642 | 671 | count = 0 |
|
643 | 672 | for count, expanded in enumerate(expand(pattern, flags, current_limit), 1): |
|
644 | 673 | total += 1 |
@@ -654,39 +683,41 @@
Loading
654 | 683 | except bracex.ExpansionLimitException: |
|
655 | 684 | raise PatternLimitException("Pattern limit exceeded the limit of {:d}".format(limit)) |
|
656 | 685 | ||
657 | - | if patterns and negative and not positive: |
|
686 | + | if patterns is not None and negative and not positive: |
|
658 | 687 | if flags & NEGATEALL: |
|
659 | 688 | default = b'**' if isinstance(patterns[0], bytes) else '**' |
|
660 | 689 | positive.append(_compile(default, flags | (GLOBSTAR if flags & PATHNAME else 0))) |
|
661 | 690 | ||
662 | - | if patterns and flags & NODIR: |
|
691 | + | if patterns is not None and flags & NODIR: |
|
663 | 692 | ptype = util.BYTES if isinstance(patterns[0], bytes) else util.UNICODE |
|
664 | - | negative.append(RE_NO_DIR[ptype] if is_unix else RE_WIN_NO_DIR[ptype]) |
|
693 | + | negative.append(cast(Pattern[AnyStr], RE_NO_DIR[ptype] if is_unix else RE_WIN_NO_DIR[ptype])) |
|
665 | 694 | ||
666 | - | return WcRegexp(tuple(positive), tuple(negative), flags & REALPATH, flags & PATHNAME, flags & FOLLOW) |
|
695 | + | return WcRegexp( |
|
696 | + | tuple(positive), tuple(negative), |
|
697 | + | bool(flags & REALPATH), bool(flags & PATHNAME), bool(flags & FOLLOW) |
|
698 | + | ) |
|
667 | 699 | ||
668 | 700 | ||
669 | 701 | @functools.lru_cache(maxsize=256, typed=True) |
|
670 | - | def _compile(pattern, flags): |
|
702 | + | def _compile(pattern: AnyStr, flags: int) -> Pattern[AnyStr]: |
|
671 | 703 | """Compile the pattern to regex.""" |
|
672 | 704 | ||
673 | 705 | return re.compile(WcParse(pattern, flags & FLAG_MASK).parse()) |
|
674 | 706 | ||
675 | 707 | ||
676 | - | class WcSplit(object): |
|
708 | + | class WcSplit(Generic[AnyStr]): |
|
677 | 709 | """Class that splits patterns on |.""" |
|
678 | 710 | ||
679 | - | def __init__(self, pattern, flags): |
|
711 | + | def __init__(self, pattern: AnyStr, flags: int) -> None: |
|
680 | 712 | """Initialize.""" |
|
681 | 713 | ||
682 | - | self.pattern = pattern |
|
683 | - | self.is_bytes = isinstance(pattern, bytes) |
|
714 | + | self.pattern = pattern # type: AnyStr |
|
684 | 715 | self.pathname = bool(flags & PATHNAME) |
|
685 | 716 | self.extend = bool(flags & EXTMATCH) |
|
686 | 717 | self.unix = is_unix_style(flags) |
|
687 | 718 | self.bslash_abort = not self.unix |
|
688 | 719 | ||
689 | - | def _sequence(self, i): |
|
720 | + | def _sequence(self, i: util.StringIter) -> None: |
|
690 | 721 | """Handle character group.""" |
|
691 | 722 | ||
692 | 723 | c = next(i) |
@@ -707,7 +738,7 @@
Loading
707 | 738 | raise StopIteration |
|
708 | 739 | c = next(i) |
|
709 | 740 | ||
710 | - | def _references(self, i, sequence=False): |
|
741 | + | def _references(self, i: util.StringIter, sequence: bool = False) -> None: |
|
711 | 742 | """Handle references.""" |
|
712 | 743 | ||
713 | 744 | c = next(i) |
@@ -723,7 +754,7 @@
Loading
723 | 754 | # \a, \b, \c, etc. |
|
724 | 755 | pass |
|
725 | 756 | ||
726 | - | def parse_extend(self, c, i): |
|
757 | + | def parse_extend(self, c: str, i: util.StringIter) -> bool: |
|
727 | 758 | """Parse extended pattern lists.""" |
|
728 | 759 | ||
729 | 760 | # Start list parsing |
@@ -759,14 +790,12 @@
Loading
759 | 790 | ||
760 | 791 | return success |
|
761 | 792 | ||
762 | - | def split(self): |
|
763 | - | """Start parsing the pattern.""" |
|
764 | - | ||
765 | - | pattern = self.pattern.decode('latin-1') if self.is_bytes else self.pattern |
|
793 | + | def _split(self, pattern: str) -> Iterable[str]: |
|
794 | + | """Split the pattern.""" |
|
766 | 795 | ||
767 | 796 | start = -1 |
|
768 | 797 | i = util.StringIter(pattern) |
|
769 | - | iter(i) |
|
798 | + | ||
770 | 799 | for c in i: |
|
771 | 800 | if self.extend and c in EXT_TYPES and self.parse_extend(c, i): |
|
772 | 801 | continue |
@@ -774,7 +803,7 @@
Loading
774 | 803 | if c == '|': |
|
775 | 804 | split = i.index - 1 |
|
776 | 805 | p = pattern[start + 1:split] |
|
777 | - | yield p.encode('latin-1') if self.is_bytes else p |
|
806 | + | yield p |
|
778 | 807 | start = split |
|
779 | 808 | elif c == '\\': |
|
780 | 809 | index = i.index |
@@ -790,17 +819,25 @@
Loading
790 | 819 | i.rewind(i.index - index) |
|
791 | 820 | ||
792 | 821 | if start < len(pattern): |
|
793 | - | p = pattern[start + 1:] |
|
794 | - | yield p.encode('latin-1') if self.is_bytes else p |
|
822 | + | yield pattern[start + 1:] |
|
823 | + | ||
824 | + | def split(self) -> Iterable[AnyStr]: |
|
825 | + | """Split the pattern.""" |
|
826 | + | ||
827 | + | if isinstance(self.pattern, bytes): |
|
828 | + | for p in self._split(self.pattern.decode('latin-1')): |
|
829 | + | yield p.encode('latin-1') |
|
830 | + | else: |
|
831 | + | yield from self._split(self.pattern) |
|
795 | 832 | ||
796 | 833 | ||
797 | - | class WcParse(object): |
|
834 | + | class WcParse(Generic[AnyStr]): |
|
798 | 835 | """Parse the wildcard pattern.""" |
|
799 | 836 | ||
800 | - | def __init__(self, pattern, flags=0): |
|
837 | + | def __init__(self, pattern: AnyStr, flags: int = 0) -> None: |
|
801 | 838 | """Initialize.""" |
|
802 | 839 | ||
803 | - | self.pattern = pattern |
|
840 | + | self.pattern = pattern # type: AnyStr |
|
804 | 841 | self.no_abs = bool(flags & _NOABSOLUTE) |
|
805 | 842 | self.braces = bool(flags & BRACE) |
|
806 | 843 | self.is_bytes = isinstance(pattern, bytes) |
@@ -827,7 +864,7 @@
Loading
827 | 864 | self.unix = is_unix_style(self.flags) |
|
828 | 865 | if not self.unix: |
|
829 | 866 | self.win_drive_detect = self.pathname |
|
830 | - | self.char_avoid = (ord('\\'), ord('/'), ord('.')) |
|
867 | + | self.char_avoid = (ord('\\'), ord('/'), ord('.')) # type: Tuple[int, ...] |
|
831 | 868 | self.bslash_abort = self.pathname |
|
832 | 869 | sep = {"sep": re.escape('\\/')} |
|
833 | 870 | else: |
@@ -851,25 +888,25 @@
Loading
851 | 888 | else: |
|
852 | 889 | self.need_char = _NEED_CHAR |
|
853 | 890 | ||
854 | - | def set_after_start(self): |
|
891 | + | def set_after_start(self) -> None: |
|
855 | 892 | """Set tracker for character after the start of a directory.""" |
|
856 | 893 | ||
857 | 894 | self.after_start = True |
|
858 | 895 | self.dir_start = False |
|
859 | 896 | ||
860 | - | def set_start_dir(self): |
|
897 | + | def set_start_dir(self) -> None: |
|
861 | 898 | """Set directory start.""" |
|
862 | 899 | ||
863 | 900 | self.dir_start = True |
|
864 | 901 | self.after_start = False |
|
865 | 902 | ||
866 | - | def reset_dir_track(self): |
|
903 | + | def reset_dir_track(self) -> None: |
|
867 | 904 | """Reset directory tracker.""" |
|
868 | 905 | ||
869 | 906 | self.dir_start = False |
|
870 | 907 | self.after_start = False |
|
871 | 908 | ||
872 | - | def update_dir_state(self): |
|
909 | + | def update_dir_state(self) -> None: |
|
873 | 910 | """ |
|
874 | 911 | Update the directory state. |
|
875 | 912 |
@@ -883,12 +920,12 @@
Loading
883 | 920 | elif not self.dir_start and self.after_start: |
|
884 | 921 | self.reset_dir_track() |
|
885 | 922 | ||
886 | - | def _restrict_extended_slash(self): |
|
923 | + | def _restrict_extended_slash(self) -> str: |
|
887 | 924 | """Restrict extended slash.""" |
|
888 | 925 | ||
889 | 926 | return self.seq_path if self.pathname else '' |
|
890 | 927 | ||
891 | - | def _restrict_sequence(self): |
|
928 | + | def _restrict_sequence(self) -> str: |
|
892 | 929 | """Restrict sequence.""" |
|
893 | 930 | ||
894 | 931 | if self.pathname: |
@@ -901,7 +938,7 @@
Loading
901 | 938 | ||
902 | 939 | return value |
|
903 | 940 | ||
904 | - | def _sequence_range_check(self, result, last): |
|
941 | + | def _sequence_range_check(self, result: List[str], last: str) -> bool: |
|
905 | 942 | """ |
|
906 | 943 | If range backwards, remove it. |
|
907 | 944 |
@@ -926,7 +963,7 @@
Loading
926 | 963 | result.append(last) |
|
927 | 964 | return removed |
|
928 | 965 | ||
929 | - | def _handle_posix(self, i, result, end_range): |
|
966 | + | def _handle_posix(self, i: util.StringIter, result: List[str], end_range: int) -> bool: |
|
930 | 967 | """Handle posix classes.""" |
|
931 | 968 | ||
932 | 969 | last_posix = False |
@@ -941,7 +978,7 @@
Loading
941 | 978 | result.append(posix.get_posix_property(m.group(1), self.is_bytes)) |
|
942 | 979 | return last_posix |
|
943 | 980 | ||
944 | - | def _sequence(self, i): |
|
981 | + | def _sequence(self, i: util.StringIter) -> str: |
|
945 | 982 | """Handle character group.""" |
|
946 | 983 | ||
947 | 984 | result = ['['] |
@@ -1043,7 +1080,7 @@
Loading
1043 | 1080 | ||
1044 | 1081 | return ''.join(result) |
|
1045 | 1082 | ||
1046 | - | def _references(self, i, sequence=False): |
|
1083 | + | def _references(self, i: util.StringIter, sequence: bool = False) -> str: |
|
1047 | 1084 | """Handle references.""" |
|
1048 | 1085 | ||
1049 | 1086 | value = '' |
@@ -1083,7 +1120,7 @@
Loading
1083 | 1120 | ||
1084 | 1121 | return value |
|
1085 | 1122 | ||
1086 | - | def _handle_dot(self, i, current): |
|
1123 | + | def _handle_dot(self, i: util.StringIter, current: List[str]) -> None: |
|
1087 | 1124 | """Handle dot.""" |
|
1088 | 1125 | ||
1089 | 1126 | is_current = True |
@@ -1133,7 +1170,7 @@
Loading
1133 | 1170 | else: |
|
1134 | 1171 | current.append(re.escape('.')) |
|
1135 | 1172 | ||
1136 | - | def _handle_star(self, i, current): |
|
1173 | + | def _handle_star(self, i: util.StringIter, current: List[str]) -> None: |
|
1137 | 1174 | """Handle star.""" |
|
1138 | 1175 | ||
1139 | 1176 | if self.pathname: |
@@ -1227,7 +1264,7 @@
Loading
1227 | 1264 | else: |
|
1228 | 1265 | current.append(value) |
|
1229 | 1266 | ||
1230 | - | def clean_up_inverse(self, current, nested=False): |
|
1267 | + | def clean_up_inverse(self, current: List[str], nested: bool = False) -> None: |
|
1231 | 1268 | """ |
|
1232 | 1269 | Clean up current. |
|
1233 | 1270 |
@@ -1258,7 +1295,7 @@
Loading
1258 | 1295 | index -= 1 |
|
1259 | 1296 | self.inv_ext = 0 |
|
1260 | 1297 | ||
1261 | - | def parse_extend(self, c, i, current, reset_dot=False): |
|
1298 | + | def parse_extend(self, c: str, i: util.StringIter, current: List[str], reset_dot: bool = False) -> bool: |
|
1262 | 1299 | """Parse extended pattern lists.""" |
|
1263 | 1300 | ||
1264 | 1301 | # Save state |
@@ -1277,7 +1314,7 @@
Loading
1277 | 1314 | success = True |
|
1278 | 1315 | index = i.index |
|
1279 | 1316 | list_type = c |
|
1280 | - | extended = [] |
|
1317 | + | extended = [] # type: List[str] |
|
1281 | 1318 | ||
1282 | 1319 | try: |
|
1283 | 1320 | c = next(i) |
@@ -1382,7 +1419,7 @@
Loading
1382 | 1419 | ||
1383 | 1420 | return success |
|
1384 | 1421 | ||
1385 | - | def consume_path_sep(self, i): |
|
1422 | + | def consume_path_sep(self, i: util.StringIter) -> None: |
|
1386 | 1423 | """Consume any consecutive path separators as they count as one.""" |
|
1387 | 1424 | ||
1388 | 1425 | try: |
@@ -1407,12 +1444,12 @@
Loading
1407 | 1444 | except StopIteration: |
|
1408 | 1445 | pass |
|
1409 | 1446 | ||
1410 | - | def root(self, pattern, current): |
|
1447 | + | def root(self, pattern: str, current: List[str]) -> None: |
|
1411 | 1448 | """Start parsing the pattern.""" |
|
1412 | 1449 | ||
1413 | 1450 | self.set_after_start() |
|
1414 | 1451 | i = util.StringIter(pattern) |
|
1415 | - | iter(i) |
|
1452 | + | ||
1416 | 1453 | root_specified = False |
|
1417 | 1454 | if self.win_drive_detect: |
|
1418 | 1455 | root_specified, drive, slash, end = _get_win_drive(pattern, True, self.case_sensitive) |
@@ -1491,21 +1528,18 @@
Loading
1491 | 1528 | if self.pathname: |
|
1492 | 1529 | current.append(_PATH_TRAIL.format(self.sep)) |
|
1493 | 1530 | ||
1494 | - | def parse(self): |
|
1495 | - | """Parse pattern list.""" |
|
1531 | + | def _parse(self, p: str) -> str: |
|
1532 | + | """Parse pattern.""" |
|
1496 | 1533 | ||
1497 | 1534 | result = [''] |
|
1498 | 1535 | prepend = [''] |
|
1499 | - | self.negative = False |
|
1500 | 1536 | ||
1501 | - | p = self.pattern |
|
1537 | + | self.negative = False |
|
1502 | 1538 | ||
1503 | 1539 | if is_negative(p, self.flags): |
|
1504 | 1540 | self.negative = True |
|
1505 | 1541 | p = p[1:] |
|
1506 | 1542 | ||
1507 | - | p = p.decode('latin-1') if self.is_bytes else p |
|
1508 | - | ||
1509 | 1543 | if self.negative: |
|
1510 | 1544 | # TODO: Do we prevent `NODOTDIR` for negative patterns? |
|
1511 | 1545 | self.globstar_capture = False |
@@ -1559,7 +1593,14 @@
Loading
1559 | 1593 | # Strip out unnecessary regex comments |
|
1560 | 1594 | pattern = pattern.replace('(?#)', '') |
|
1561 | 1595 | ||
1562 | - | if self.is_bytes: |
|
1563 | - | pattern = pattern.encode('latin-1') |
|
1596 | + | return pattern |
|
1597 | + | ||
1598 | + | def parse(self) -> AnyStr: |
|
1599 | + | """Parse pattern list.""" |
|
1600 | + | ||
1601 | + | if isinstance(self.pattern, bytes): |
|
1602 | + | pattern = self._parse(self.pattern.decode('latin-1')).encode('latin-1') |
|
1603 | + | else: |
|
1604 | + | pattern = self._parse(self.pattern) |
|
1564 | 1605 | ||
1565 | 1606 | return pattern |
@@ -21,6 +21,7 @@
Loading
21 | 21 | IN THE SOFTWARE. |
|
22 | 22 | """ |
|
23 | 23 | from . import _wcparse |
|
24 | + | from typing import Tuple, List, AnyStr, Iterable, Union, Sequence |
|
24 | 25 | ||
25 | 26 | __all__ = ( |
|
26 | 27 | "CASE", "EXTMATCH", "IGNORECASE", "RAWCHARS", |
@@ -59,7 +60,7 @@
Loading
59 | 60 | ) |
|
60 | 61 | ||
61 | 62 | ||
62 | - | def _flag_transform(flags): |
|
63 | + | def _flag_transform(flags: int) -> int: |
|
63 | 64 | """Transform flags to glob defaults.""" |
|
64 | 65 | ||
65 | 66 | # Enabling both cancels out |
@@ -69,14 +70,25 @@
Loading
69 | 70 | return (flags & FLAG_MASK) |
|
70 | 71 | ||
71 | 72 | ||
72 | - | def translate(patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
73 | + | def translate( |
|
74 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
75 | + | *, |
|
76 | + | flags: int = 0, |
|
77 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
78 | + | ) -> Tuple[List[AnyStr], List[AnyStr]]: |
|
73 | 79 | """Translate `fnmatch` pattern.""" |
|
74 | 80 | ||
75 | 81 | flags = _flag_transform(flags) |
|
76 | - | return _wcparse.translate(patterns, flags, limit) |
|
82 | + | return _wcparse.translate(_wcparse.to_str_sequence(patterns), flags, limit) |
|
77 | 83 | ||
78 | 84 | ||
79 | - | def fnmatch(filename, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
85 | + | def fnmatch( |
|
86 | + | filename: AnyStr, |
|
87 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
88 | + | *, |
|
89 | + | flags: int = 0, |
|
90 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
91 | + | ) -> bool: |
|
80 | 92 | """ |
|
81 | 93 | Check if filename matches pattern. |
|
82 | 94 |
@@ -85,16 +97,22 @@
Loading
85 | 97 | """ |
|
86 | 98 | ||
87 | 99 | flags = _flag_transform(flags) |
|
88 | - | return _wcparse.compile(patterns, flags, limit).match(filename) |
|
100 | + | return bool(_wcparse.compile(_wcparse.to_str_sequence(patterns), flags, limit).match(filename)) |
|
89 | 101 | ||
90 | 102 | ||
91 | - | def filter(filenames, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): # noqa A001 |
|
103 | + | def filter( # noqa A001 |
|
104 | + | filenames: Iterable[AnyStr], |
|
105 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
106 | + | *, |
|
107 | + | flags: int = 0, |
|
108 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
109 | + | ) -> List[AnyStr]: |
|
92 | 110 | """Filter names using pattern.""" |
|
93 | 111 | ||
94 | 112 | matches = [] |
|
95 | 113 | ||
96 | 114 | flags = _flag_transform(flags) |
|
97 | - | obj = _wcparse.compile(patterns, flags, limit) |
|
115 | + | obj = _wcparse.compile(_wcparse.to_str_sequence(patterns), flags, limit) |
|
98 | 116 | ||
99 | 117 | for filename in filenames: |
|
100 | 118 | if obj.match(filename): |
@@ -102,13 +120,13 @@
Loading
102 | 120 | return matches |
|
103 | 121 | ||
104 | 122 | ||
105 | - | def escape(pattern): |
|
123 | + | def escape(pattern: AnyStr) -> AnyStr: |
|
106 | 124 | """Escape.""" |
|
107 | 125 | ||
108 | 126 | return _wcparse.escape(pattern, pathname=False) |
|
109 | 127 | ||
110 | 128 | ||
111 | - | def is_magic(pattern, *, flags=0): |
|
129 | + | def is_magic(pattern: AnyStr, *, flags: int = 0) -> bool: |
|
112 | 130 | """Check if the pattern is likely to be magic.""" |
|
113 | 131 | ||
114 | 132 | flags = _flag_transform(flags) |
@@ -25,9 +25,14 @@
Loading
25 | 25 | import re |
|
26 | 26 | import functools |
|
27 | 27 | from collections import namedtuple |
|
28 | + | import bracex |
|
28 | 29 | from . import _wcparse |
|
29 | 30 | from . import _wcmatch |
|
30 | 31 | from . import util |
|
32 | + | from typing import ( |
|
33 | + | Optional, Iterator, Iterable, List, AnyStr, Union, Generic, |
|
34 | + | Tuple, Pattern, Callable, Any, Set, Sequence, cast |
|
35 | + | ) |
|
31 | 36 | ||
32 | 37 | __all__ = ( |
|
33 | 38 | "CASE", "IGNORECASE", "RAWCHARS", "DOTGLOB", "DOTMATCH", |
@@ -102,19 +107,18 @@
Loading
102 | 107 | _NOABSOLUTE |
|
103 | 108 | ) |
|
104 | 109 | ||
105 | - | _RE_PATHLIB_DOT_NORM = [ |
|
110 | + | _RE_PATHLIB_DOT_NORM = ( |
|
106 | 111 | re.compile(r'(?:((?<=^)|(?<=/))\.(?:/|$))+'), |
|
107 | 112 | re.compile(br'(?:((?<=^)|(?<=/))\.(?:/|$))+') |
|
113 | + | ) # type: Tuple[Pattern[str], Pattern[bytes]] |
|
108 | 114 | ||
109 | - | ] |
|
110 | - | ||
111 | - | _RE_WIN_PATHLIB_DOT_NORM = [ |
|
115 | + | _RE_WIN_PATHLIB_DOT_NORM = ( |
|
112 | 116 | re.compile(r'(?:((?<=^)|(?<=[\\/]))\.(?:[\\/]|$))+'), |
|
113 | 117 | re.compile(br'(?:((?<=^)|(?<=[\\/]))\.(?:[\\/]|$))+') |
|
114 | - | ] |
|
118 | + | ) # type: Tuple[Pattern[str], Pattern[bytes]] |
|
115 | 119 | ||
116 | 120 | ||
117 | - | def _flag_transform(flags): |
|
121 | + | def _flag_transform(flags: int) -> int: |
|
118 | 122 | """Transform flags to glob defaults.""" |
|
119 | 123 | ||
120 | 124 | # Enabling both cancels out |
@@ -135,11 +139,13 @@
Loading
135 | 139 | return flags |
|
136 | 140 | ||
137 | 141 | ||
138 | - | class _GlobPart(namedtuple('_GlobPart', ['pattern', 'is_magic', 'is_globstar', 'dir_only', 'is_drive'])): |
|
142 | + | class _GlobPart( |
|
143 | + | namedtuple('_GlobPart', ['pattern', 'is_magic', 'is_globstar', 'dir_only', 'is_drive']), |
|
144 | + | ): |
|
139 | 145 | """File Glob.""" |
|
140 | 146 | ||
141 | 147 | ||
142 | - | class _GlobSplit(object): |
|
148 | + | class _GlobSplit(Generic[AnyStr]): |
|
143 | 149 | """ |
|
144 | 150 | Split glob pattern on "magic" file and directories. |
|
145 | 151 |
@@ -166,12 +172,12 @@
Loading
166 | 172 | ||
167 | 173 | """ |
|
168 | 174 | ||
169 | - | def __init__(self, pattern, flags): |
|
175 | + | def __init__(self, pattern: AnyStr, flags: int) -> None: |
|
170 | 176 | """Initialize.""" |
|
171 | 177 | ||
178 | + | self.pattern = pattern # type: AnyStr |
|
172 | 179 | self.unix = _wcparse.is_unix_style(flags) |
|
173 | 180 | self.flags = flags |
|
174 | - | self.pattern = pattern |
|
175 | 181 | self.no_abs = bool(flags & _wcparse._NOABSOLUTE) |
|
176 | 182 | self.globstar = bool(flags & GLOBSTAR) |
|
177 | 183 | self.matchbase = bool(flags & MATCHBASE) |
@@ -185,7 +191,6 @@
Loading
185 | 191 | if flags & NEGATE: |
|
186 | 192 | flags ^= NEGATE |
|
187 | 193 | self.flags = flags |
|
188 | - | self.is_bytes = isinstance(pattern, bytes) |
|
189 | 194 | self.extend = bool(flags & EXTMATCH) |
|
190 | 195 | if not self.unix: |
|
191 | 196 | self.win_drive_detect = True |
@@ -197,10 +202,9 @@
Loading
197 | 202 | self.sep = '/' |
|
198 | 203 | # Once split, Windows file names will never have `\\` in them, |
|
199 | 204 | # so we can use the Unix magic detect |
|
200 | - | ptype = util.BYTES if self.is_bytes else util.UNICODE |
|
201 | - | self.magic_symbols = _wcparse._get_magic_symbols(ptype, self.unix, self.flags)[0] |
|
205 | + | self.magic_symbols = _wcparse._get_magic_symbols(pattern, self.unix, self.flags)[0] # type: Set[AnyStr] |
|
202 | 206 | ||
203 | - | def is_magic(self, name): |
|
207 | + | def is_magic(self, name: AnyStr) -> bool: |
|
204 | 208 | """Check if name contains magic characters.""" |
|
205 | 209 | ||
206 | 210 | for c in self.magic_symbols: |
@@ -208,7 +212,7 @@
Loading
208 | 212 | return True |
|
209 | 213 | return False |
|
210 | 214 | ||
211 | - | def _sequence(self, i): |
|
215 | + | def _sequence(self, i: util.StringIter) -> None: |
|
212 | 216 | """Handle character group.""" |
|
213 | 217 | ||
214 | 218 | c = next(i) |
@@ -228,7 +232,7 @@
Loading
228 | 232 | raise StopIteration |
|
229 | 233 | c = next(i) |
|
230 | 234 | ||
231 | - | def _references(self, i, sequence=False): |
|
235 | + | def _references(self, i: util.StringIter, sequence: bool = False) -> str: |
|
232 | 236 | """Handle references.""" |
|
233 | 237 | ||
234 | 238 | value = '' |
@@ -249,7 +253,7 @@
Loading
249 | 253 | pass |
|
250 | 254 | return value |
|
251 | 255 | ||
252 | - | def parse_extend(self, c, i): |
|
256 | + | def parse_extend(self, c: str, i: util.StringIter) -> bool: |
|
253 | 257 | """Parse extended pattern lists.""" |
|
254 | 258 | ||
255 | 259 | # Start list parsing |
@@ -285,7 +289,7 @@
Loading
285 | 289 | ||
286 | 290 | return success |
|
287 | 291 | ||
288 | - | def store(self, value, l, dir_only): |
|
292 | + | def store(self, value: AnyStr, l: List[_GlobPart], dir_only: bool) -> None: |
|
289 | 293 | """Group patterns by literals and potential magic patterns.""" |
|
290 | 294 | ||
291 | 295 | if l and value in (b'', ''): |
@@ -294,39 +298,43 @@
Loading
294 | 298 | globstar = value in (b'**', '**') and self.globstar |
|
295 | 299 | magic = self.is_magic(value) |
|
296 | 300 | if magic: |
|
297 | - | value = _wcparse._compile(value, self.flags) |
|
301 | + | v = cast(Pattern[AnyStr], _wcparse._compile(value, self.flags)) # type: Union[Pattern[AnyStr], AnyStr] |
|
302 | + | else: |
|
303 | + | v = value |
|
298 | 304 | if globstar and l and l[-1].is_globstar: |
|
299 | - | l[-1] = _GlobPart(value, magic, globstar, dir_only, False) |
|
305 | + | l[-1] = _GlobPart(v, magic, globstar, dir_only, False) |
|
300 | 306 | else: |
|
301 | - | l.append(_GlobPart(value, magic, globstar, dir_only, False)) |
|
307 | + | l.append(_GlobPart(v, magic, globstar, dir_only, False)) |
|
302 | 308 | ||
303 | - | def split(self): |
|
309 | + | def split(self) -> List[_GlobPart]: |
|
304 | 310 | """Start parsing the pattern.""" |
|
305 | 311 | ||
306 | 312 | split_index = [] |
|
307 | 313 | parts = [] |
|
308 | 314 | start = -1 |
|
309 | 315 | ||
310 | - | pattern = self.pattern.decode('latin-1') if self.is_bytes else self.pattern |
|
316 | + | if isinstance(self.pattern, bytes): |
|
317 | + | is_bytes = True |
|
318 | + | pattern = self.pattern.decode('latin-1') |
|
319 | + | else: |
|
320 | + | is_bytes = False |
|
321 | + | pattern = self.pattern |
|
311 | 322 | ||
312 | 323 | i = util.StringIter(pattern) |
|
313 | - | iter(i) |
|
314 | 324 | ||
315 | 325 | # Detect and store away windows drive as a literal |
|
316 | 326 | if self.win_drive_detect: |
|
317 | 327 | root_specified, drive, slash, end = _wcparse._get_win_drive(pattern) |
|
318 | 328 | if drive is not None: |
|
319 | - | if self.is_bytes: |
|
320 | - | drive = drive.encode('latin-1') |
|
321 | - | parts.append(_GlobPart(drive, False, False, True, True)) |
|
329 | + | parts.append(_GlobPart(drive.encode('latin-1') if is_bytes else drive, False, False, True, True)) |
|
322 | 330 | start = end - 1 |
|
323 | 331 | i.advance(start) |
|
324 | 332 | elif drive is None and root_specified: |
|
325 | - | parts.append(_GlobPart(b'\\' if self.is_bytes else '\\', False, False, True, True)) |
|
333 | + | parts.append(_GlobPart(b'\\' if is_bytes else '\\', False, False, True, True)) |
|
326 | 334 | start = 1 |
|
327 | 335 | i.advance(2) |
|
328 | 336 | elif not self.win_drive_detect and pattern.startswith('/'): |
|
329 | - | parts.append(_GlobPart(b'/' if self.is_bytes else '/', False, False, True, True)) |
|
337 | + | parts.append(_GlobPart(b'/' if is_bytes else '/', False, False, True, True)) |
|
330 | 338 | start = 0 |
|
331 | 339 | i.advance(1) |
|
332 | 340 |
@@ -353,30 +361,24 @@
Loading
353 | 361 | i.rewind(i.index - index) |
|
354 | 362 | ||
355 | 363 | for split, offset in split_index: |
|
356 | - | if self.is_bytes: |
|
357 | - | value = pattern[start + 1:split].encode('latin-1') |
|
358 | - | else: |
|
359 | - | value = pattern[start + 1:split] |
|
360 | - | self.store(value, parts, True) |
|
364 | + | value = pattern[start + 1:split] |
|
365 | + | self.store(cast(AnyStr, value.encode('latin-1') if is_bytes else value), parts, True) |
|
361 | 366 | start = split + offset |
|
362 | 367 | ||
363 | 368 | if start < len(pattern): |
|
364 | - | if self.is_bytes: |
|
365 | - | value = pattern[start + 1:].encode('latin-1') |
|
366 | - | else: |
|
367 | - | value = pattern[start + 1:] |
|
369 | + | value = pattern[start + 1:] |
|
368 | 370 | if value: |
|
369 | - | self.store(value, parts, False) |
|
371 | + | self.store(cast(AnyStr, value.encode('latin-1') if is_bytes else value), parts, False) |
|
370 | 372 | ||
371 | 373 | if len(pattern) == 0: |
|
372 | - | parts.append(_GlobPart(pattern.encode('latin-1') if self.is_bytes else pattern, False, False, False, False)) |
|
374 | + | parts.append(_GlobPart(pattern.encode('latin-1') if is_bytes else pattern, False, False, False, False)) |
|
373 | 375 | ||
374 | 376 | if ( |
|
375 | 377 | (self.extmatchbase and not parts[0].is_drive) or |
|
376 | 378 | (self.matchbase and len(parts) == 1 and not parts[0].dir_only) |
|
377 | 379 | ): |
|
378 | 380 | self.globstar = True |
|
379 | - | parts.insert(0, _GlobPart(b'**' if self.is_bytes else '**', True, True, True, False)) |
|
381 | + | parts.insert(0, _GlobPart(b'**' if is_bytes else '**', True, True, True, False)) |
|
380 | 382 | ||
381 | 383 | if self.no_abs and parts and parts[0].is_drive: |
|
382 | 384 | raise ValueError('The pattern must be a relative path pattern') |
@@ -384,75 +386,92 @@
Loading
384 | 386 | return parts |
|
385 | 387 | ||
386 | 388 | ||
387 | - | class Glob(object): |
|
389 | + | class Glob(Generic[AnyStr]): |
|
388 | 390 | """Glob patterns.""" |
|
389 | 391 | ||
390 | - | def __init__(self, pattern, flags=0, root_dir=None, dir_fd=None, limit=_wcparse.PATTERN_LIMIT): |
|
392 | + | def __init__( |
|
393 | + | self, |
|
394 | + | pattern: Union[str, bytes, Sequence[AnyStr]], |
|
395 | + | flags: int = 0, |
|
396 | + | root_dir: Optional[Union[AnyStr, 'os.PathLike[AnyStr]']] = None, |
|
397 | + | dir_fd: Optional[int] = None, |
|
398 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
399 | + | ) -> None: |
|
391 | 400 | """Initialize the directory walker object.""" |
|
392 | 401 | ||
393 | - | self.seen = set() |
|
394 | - | self.is_bytes = isinstance(pattern[0], bytes) |
|
395 | - | self.current = b'.' if self.is_bytes else '.' |
|
396 | - | self.dir_fd = dir_fd if SUPPORT_DIR_FD else None |
|
397 | - | self.root_dir = os.fspath(root_dir) if root_dir is not None else self.current |
|
398 | - | self.nounique = bool(flags & NOUNIQUE) |
|
399 | - | self.mark = bool(flags & MARK) |
|
402 | + | pats = _wcparse.to_str_sequence(pattern) |
|
403 | + | ||
404 | + | self.pattern = [] # type: List[List[_GlobPart]] |
|
405 | + | self.npatterns = [] # type: List[Pattern[AnyStr]] |
|
406 | + | self.seen = set() # type: Set[AnyStr] |
|
407 | + | self.dir_fd = dir_fd if SUPPORT_DIR_FD else None # type: Optional[int] |
|
408 | + | self.nounique = bool(flags & NOUNIQUE) # type: bool |
|
409 | + | self.mark = bool(flags & MARK) # type: bool |
|
400 | 410 | # Only scan for `.` and `..` if it is specifically requested. |
|
401 | - | self.scandotdir = flags & SCANDOTDIR |
|
411 | + | self.scandotdir = bool(flags & SCANDOTDIR) # type: bool |
|
402 | 412 | if self.mark: |
|
403 | 413 | flags ^= MARK |
|
404 | - | self.negateall = bool(flags & NEGATEALL) |
|
414 | + | self.negateall = bool(flags & NEGATEALL) # type: bool |
|
405 | 415 | if self.negateall: |
|
406 | 416 | flags ^= NEGATEALL |
|
407 | - | self.nodir = bool(flags & NODIR) |
|
417 | + | self.nodir = bool(flags & NODIR) # type: bool |
|
408 | 418 | if self.nodir: |
|
409 | 419 | flags ^= NODIR |
|
410 | - | self.pathlib = bool(flags & _PATHLIB) |
|
420 | + | self.pathlib = bool(flags & _PATHLIB) # type: bool |
|
411 | 421 | if self.pathlib: |
|
412 | 422 | flags ^= _PATHLIB |
|
413 | 423 | # Right to left searching is only for matching |
|
414 | 424 | if flags & _RTL: # pragma: no cover |
|
415 | 425 | flags ^= _RTL |
|
416 | - | self.flags = _flag_transform(flags | REALPATH) |
|
417 | - | self.negate_flags = self.flags |
|
426 | + | self.flags = _flag_transform(flags | REALPATH) # type: int |
|
427 | + | self.negate_flags = self.flags # type: int |
|
418 | 428 | if not self.scandotdir and not self.flags & NODOTDIR: |
|
419 | 429 | self.flags |= NODOTDIR |
|
420 | - | self.raw_chars = bool(self.flags & RAWCHARS) |
|
421 | - | self.follow_links = bool(self.flags & FOLLOW) |
|
422 | - | self.dot = bool(self.flags & DOTMATCH) |
|
423 | - | self.unix = not bool(self.flags & FORCEWIN) |
|
424 | - | self.negate = bool(self.flags & NEGATE) |
|
425 | - | self.globstar = bool(self.flags & GLOBSTAR) |
|
426 | - | self.braces = bool(self.flags & BRACE) |
|
427 | - | self.matchbase = bool(self.flags & MATCHBASE) |
|
428 | - | self.case_sensitive = _wcparse.get_case(self.flags) |
|
429 | - | self.specials = (b'.', b'..') if self.is_bytes else ('.', '..') |
|
430 | - | self.empty = b'' if self.is_bytes else '' |
|
431 | - | self.stars = b'**' if self.is_bytes else '**' |
|
432 | - | self.limit = limit |
|
433 | - | if self.flags & FORCEWIN: |
|
434 | - | self.sep = b'\\' if self.is_bytes else '\\' |
|
435 | - | self.seps = (b'/' if self.is_bytes else '/', self.sep) |
|
436 | - | self.re_pathlib_norm = _RE_WIN_PATHLIB_DOT_NORM[util.BYTES if self.is_bytes else util.UNICODE] |
|
437 | - | self.re_no_dir = _wcparse.RE_WIN_NO_DIR[util.BYTES if self.is_bytes else util.UNICODE] |
|
430 | + | self.raw_chars = bool(self.flags & RAWCHARS) # type: bool |
|
431 | + | self.follow_links = bool(self.flags & FOLLOW) # type: bool |
|
432 | + | self.dot = bool(self.flags & DOTMATCH) # type: bool |
|
433 | + | self.unix = not bool(self.flags & FORCEWIN) # type: bool |
|
434 | + | self.negate = bool(self.flags & NEGATE) # type: bool |
|
435 | + | self.globstar = bool(self.flags & GLOBSTAR) # type: bool |
|
436 | + | self.braces = bool(self.flags & BRACE) # type: bool |
|
437 | + | self.matchbase = bool(self.flags & MATCHBASE) # type: bool |
|
438 | + | self.case_sensitive = _wcparse.get_case(self.flags) # type: bool |
|
439 | + | self.limit = limit # type: int |
|
440 | + | ||
441 | + | forcewin = self.flags & FORCEWIN |
|
442 | + | if isinstance(pats[0], bytes): |
|
443 | + | ptype = util.BYTES |
|
444 | + | self.current = b'.' # type: AnyStr |
|
445 | + | self.specials = (b'.', b'..') # type: Tuple[AnyStr, ...] |
|
446 | + | self.empty = b'' # type: AnyStr |
|
447 | + | self.stars = b'**' # type: AnyStr |
|
448 | + | self.sep = b'\\' if forcewin else b'/' # type: AnyStr |
|
449 | + | self.seps = (b'/', self.sep) if forcewin else (self.sep,) # type: Tuple[AnyStr, ...] |
|
450 | + | self.re_pathlib_norm = cast(Pattern[AnyStr], _RE_WIN_PATHLIB_DOT_NORM[ptype]) # type: Pattern[AnyStr] |
|
451 | + | self.re_no_dir = cast(Pattern[AnyStr], _wcparse.RE_WIN_NO_DIR[ptype]) # type: Pattern[AnyStr] |
|
438 | 452 | else: |
|
439 | - | self.sep = b'/' if self.is_bytes else '/' |
|
440 | - | self.seps = (self.sep,) |
|
441 | - | self.re_pathlib_norm = _RE_PATHLIB_DOT_NORM[util.BYTES if self.is_bytes else util.UNICODE] |
|
442 | - | self.re_no_dir = _wcparse.RE_NO_DIR[util.BYTES if self.is_bytes else util.UNICODE] |
|
443 | - | self._parse_patterns(pattern) |
|
444 | - | ||
445 | - | if ( |
|
446 | - | (self.is_bytes and not isinstance(self.root_dir, bytes)) or |
|
447 | - | (not self.is_bytes and not isinstance(self.root_dir, str)) |
|
448 | - | ): |
|
453 | + | ptype = util.UNICODE |
|
454 | + | self.current = '.' |
|
455 | + | self.specials = ('.', '..') |
|
456 | + | self.empty = '' |
|
457 | + | self.stars = '**' |
|
458 | + | self.sep = '\\' if forcewin else '/' |
|
459 | + | self.seps = ('/', self.sep) if forcewin else (self.sep,) |
|
460 | + | self.re_pathlib_norm = cast(Pattern[AnyStr], _RE_WIN_PATHLIB_DOT_NORM[ptype]) |
|
461 | + | self.re_no_dir = cast(Pattern[AnyStr], _wcparse.RE_WIN_NO_DIR[ptype]) |
|
462 | + | ||
463 | + | temp = os.fspath(root_dir) if root_dir is not None else self.current |
|
464 | + | if not isinstance(temp, type(pats[0])): |
|
449 | 465 | raise TypeError( |
|
450 | 466 | 'Pattern and root_dir should be of the same type, not {} and {}'.format( |
|
451 | - | type(pattern[0]), type(self.root_dir) |
|
467 | + | type(pats[0]), type(temp) |
|
452 | 468 | ) |
|
453 | 469 | ) |
|
454 | 470 | ||
455 | - | def _iter_patterns(self, patterns): |
|
471 | + | self.root_dir = temp # type: AnyStr |
|
472 | + | self._parse_patterns(pats) |
|
473 | + | ||
474 | + | def _iter_patterns(self, patterns: Sequence[AnyStr]) -> Iterator[Tuple[bool, AnyStr]]: |
|
456 | 475 | """Iterate expanded patterns.""" |
|
457 | 476 | ||
458 | 477 | seen = set() |
@@ -482,22 +501,20 @@
Loading
482 | 501 | current_limit -= count |
|
483 | 502 | if current_limit < 1: |
|
484 | 503 | current_limit = 1 |
|
485 | - | except _wcparse.bracex.ExpansionLimitException: |
|
504 | + | except bracex.ExpansionLimitException: |
|
486 | 505 | raise _wcparse.PatternLimitException( |
|
487 | 506 | "Pattern limit exceeded the limit of {:d}".format(self.limit) |
|
488 | 507 | ) |
|
489 | 508 | ||
490 | - | def _parse_patterns(self, patterns): |
|
509 | + | def _parse_patterns(self, patterns: Sequence[AnyStr]) -> None: |
|
491 | 510 | """Parse patterns.""" |
|
492 | 511 | ||
493 | - | self.pattern = [] |
|
494 | - | self.npatterns = [] |
|
495 | 512 | for is_neg, p in self._iter_patterns(patterns): |
|
496 | 513 | if is_neg: |
|
497 | 514 | # Treat the inverse pattern as a normal pattern if it matches, we will exclude. |
|
498 | 515 | # This is faster as compiled patterns usually compare the include patterns first, |
|
499 | 516 | # and then the exclude, but glob will already know it wants to include the file. |
|
500 | - | self.npatterns.append(_wcparse._compile(p, self.negate_flags)) |
|
517 | + | self.npatterns.append(cast(Pattern[AnyStr], _wcparse._compile(p, self.negate_flags))) |
|
501 | 518 | else: |
|
502 | 519 | self.pattern.append(_GlobSplit(p, self.flags).split()) |
|
503 | 520 |
@@ -519,22 +536,22 @@
Loading
519 | 536 | ): |
|
520 | 537 | self.nounique = True |
|
521 | 538 | ||
522 | - | def _is_hidden(self, name): |
|
539 | + | def _is_hidden(self, name: AnyStr) -> bool: |
|
523 | 540 | """Check if is file hidden.""" |
|
524 | 541 | ||
525 | 542 | return not self.dot and name[0:1] == self.specials[0] |
|
526 | 543 | ||
527 | - | def _is_this(self, name): |
|
544 | + | def _is_this(self, name: AnyStr) -> bool: |
|
528 | 545 | """Check if "this" directory `.`.""" |
|
529 | 546 | ||
530 | 547 | return name == self.specials[0] or name == self.sep |
|
531 | 548 | ||
532 | - | def _is_parent(self, name): |
|
549 | + | def _is_parent(self, name: AnyStr) -> bool: |
|
533 | 550 | """Check if `..`.""" |
|
534 | 551 | ||
535 | 552 | return name == self.specials[1] |
|
536 | 553 | ||
537 | - | def _match_excluded(self, filename, is_dir): |
|
554 | + | def _match_excluded(self, filename: AnyStr, is_dir: bool) -> bool: |
|
538 | 555 | """Check if file should be excluded.""" |
|
539 | 556 | ||
540 | 557 | if is_dir and not filename.endswith(self.sep): |
@@ -548,21 +565,21 @@
Loading
548 | 565 | ||
549 | 566 | return matched |
|
550 | 567 | ||
551 | - | def _is_excluded(self, path, is_dir): |
|
568 | + | def _is_excluded(self, path: AnyStr, is_dir: bool) -> bool: |
|
552 | 569 | """Check if file is excluded.""" |
|
553 | 570 | ||
554 | - | return self.npatterns and self._match_excluded(path, is_dir) |
|
571 | + | return bool(self.npatterns and self._match_excluded(path, is_dir)) |
|
555 | 572 | ||
556 | - | def _match_literal(self, a, b=None): |
|
573 | + | def _match_literal(self, a: AnyStr, b: Optional[AnyStr] = None) -> bool: |
|
557 | 574 | """Match two names.""" |
|
558 | 575 | ||
559 | 576 | return a.lower() == b if not self.case_sensitive else a == b |
|
560 | 577 | ||
561 | - | def _get_matcher(self, target): |
|
578 | + | def _get_matcher(self, target: Optional[Union[AnyStr, Pattern[AnyStr]]]) -> Optional[Callable[..., Any]]: |
|
562 | 579 | """Get deep match.""" |
|
563 | 580 | ||
564 | 581 | if target is None: |
|
565 | - | matcher = None |
|
582 | + | matcher = None # type: Optional[Callable[..., Any]] |
|
566 | 583 | elif isinstance(target, (str, bytes)): |
|
567 | 584 | # Plain text match |
|
568 | 585 | if not self.case_sensitive: |
@@ -575,7 +592,7 @@
Loading
575 | 592 | matcher = target.match |
|
576 | 593 | return matcher |
|
577 | 594 | ||
578 | - | def _lexists(self, path): |
|
595 | + | def _lexists(self, path: AnyStr) -> bool: |
|
579 | 596 | """Check if file exists.""" |
|
580 | 597 | ||
581 | 598 | if not self.dir_fd: |
@@ -587,7 +604,7 @@
Loading
587 | 604 | else: |
|
588 | 605 | return True |
|
589 | 606 | ||
590 | - | def prepend_base(self, path): |
|
607 | + | def prepend_base(self, path: AnyStr) -> AnyStr: |
|
591 | 608 | """Join path to base if pattern is not absolute.""" |
|
592 | 609 | ||
593 | 610 | if self.is_abs_pattern: |
@@ -595,13 +612,13 @@
Loading
595 | 612 | else: |
|
596 | 613 | return os.path.join(self.root_dir, path) |
|
597 | 614 | ||
598 | - | def _iter(self, curdir, dir_only, deep): |
|
615 | + | def _iter(self, curdir: Optional[AnyStr], dir_only: bool, deep: bool) -> Iterator[Tuple[AnyStr, bool, bool, bool]]: |
|
599 | 616 | """Iterate the directory.""" |
|
600 | 617 | ||
601 | 618 | try: |
|
602 | - | fd = None |
|
619 | + | fd = None # type: Optional[int] |
|
603 | 620 | if self.is_abs_pattern and curdir: |
|
604 | - | scandir = curdir |
|
621 | + | scandir = curdir # type: Union[AnyStr, int] |
|
605 | 622 | elif self.dir_fd is not None: |
|
606 | 623 | fd = scandir = os.open( |
|
607 | 624 | os.path.join(self.root_dir, curdir) if curdir else self.root_dir, |
@@ -616,10 +633,10 @@
Loading
616 | 633 | yield special, True, True, False |
|
617 | 634 | ||
618 | 635 | try: |
|
619 | - | with os.scandir(scandir) as scan: |
|
636 | + | with os.scandir(scandir) as scan: # type: ignore[type-var] |
|
620 | 637 | for f in scan: |
|
621 | 638 | try: |
|
622 | - | hidden = self._is_hidden(f.name) |
|
639 | + | hidden = self._is_hidden(f.name) # type: ignore[arg-type] |
|
623 | 640 | is_dir = f.is_dir() |
|
624 | 641 | if is_dir: |
|
625 | 642 | is_link = f.is_symlink() |
@@ -627,7 +644,7 @@
Loading
627 | 644 | # We don't care if a file is a link |
|
628 | 645 | is_link = False |
|
629 | 646 | if (not dir_only or is_dir): |
|
630 | - | yield f.name, is_dir, hidden, is_link |
|
647 | + | yield f.name, is_dir, hidden, is_link # type: ignore[misc] |
|
631 | 648 | except OSError: # pragma: no cover |
|
632 | 649 | pass |
|
633 | 650 | finally: |
@@ -637,7 +654,13 @@
Loading
637 | 654 | except OSError: # pragma: no cover |
|
638 | 655 | pass |
|
639 | 656 | ||
640 | - | def _glob_dir(self, curdir, matcher, dir_only=False, deep=False): |
|
657 | + | def _glob_dir( |
|
658 | + | self, |
|
659 | + | curdir: AnyStr, |
|
660 | + | matcher: Optional[Callable[..., Any]], |
|
661 | + | dir_only: bool = False, |
|
662 | + | deep: bool = False |
|
663 | + | ) -> Iterator[Tuple[AnyStr, bool]]: |
|
641 | 664 | """Recursive directory glob.""" |
|
642 | 665 | ||
643 | 666 | files = list(self._iter(curdir, dir_only, deep)) |
@@ -655,7 +678,7 @@
Loading
655 | 678 | if deep and not hidden and is_dir and follow: |
|
656 | 679 | yield from self._glob_dir(path, matcher, dir_only, deep) |
|
657 | 680 | ||
658 | - | def _glob(self, curdir, this, rest): |
|
681 | + | def _glob(self, curdir: AnyStr, part: _GlobPart, rest: List[_GlobPart]) -> Iterator[Tuple[AnyStr, bool]]: |
|
659 | 682 | """ |
|
660 | 683 | Handle glob flow. |
|
661 | 684 |
@@ -669,10 +692,10 @@
Loading
669 | 692 | - `globstar` `**`. |
|
670 | 693 | """ |
|
671 | 694 | ||
672 | - | is_magic = this.is_magic |
|
673 | - | dir_only = this.dir_only |
|
674 | - | target = this.pattern |
|
675 | - | is_globstar = this.is_globstar |
|
695 | + | is_magic = part.is_magic |
|
696 | + | dir_only = part.dir_only |
|
697 | + | target = part.pattern |
|
698 | + | is_globstar = part.is_globstar |
|
676 | 699 | ||
677 | 700 | if is_magic and is_globstar: |
|
678 | 701 | # Glob star directory `**`. |
@@ -730,7 +753,7 @@
Loading
730 | 753 | else: |
|
731 | 754 | yield path, is_dir |
|
732 | 755 | ||
733 | - | def _get_starting_paths(self, curdir, dir_only): |
|
756 | + | def _get_starting_paths(self, curdir: AnyStr, dir_only: bool) -> List[Tuple[AnyStr, bool]]: |
|
734 | 757 | """ |
|
735 | 758 | Get the starting location. |
|
736 | 759 |
@@ -751,7 +774,7 @@
Loading
751 | 774 | results = [(curdir, True)] |
|
752 | 775 | return results |
|
753 | 776 | ||
754 | - | def is_unique(self, path): |
|
777 | + | def is_unique(self, path: AnyStr) -> bool: |
|
755 | 778 | """Test if path is unique.""" |
|
756 | 779 | ||
757 | 780 | if self.nounique: |
@@ -763,20 +786,20 @@
Loading
763 | 786 | unique = True |
|
764 | 787 | return unique |
|
765 | 788 | ||
766 | - | def _pathlib_norm(self, path): |
|
789 | + | def _pathlib_norm(self, path: AnyStr) -> AnyStr: |
|
767 | 790 | """Normalize path as `pathlib` does.""" |
|
768 | 791 | ||
769 | 792 | path = self.re_pathlib_norm.sub(self.empty, path) |
|
770 | 793 | return path[:-1] if len(path) > 1 and path[-1:] in self.seps else path |
|
771 | 794 | ||
772 | - | def format_path(self, path, is_dir, dir_only): |
|
795 | + | def format_path(self, path: AnyStr, is_dir: bool, dir_only: bool) -> Iterator[AnyStr]: |
|
773 | 796 | """Format path.""" |
|
774 | 797 | ||
775 | 798 | path = os.path.join(path, self.empty) if dir_only or (self.mark and is_dir) else path |
|
776 | 799 | if self.is_unique(self._pathlib_norm(path) if self.pathlib else path): |
|
777 | 800 | yield path |
|
778 | 801 | ||
779 | - | def glob(self): |
|
802 | + | def glob(self) -> Iterator[AnyStr]: |
|
780 | 803 | """Starts off the glob iterator.""" |
|
781 | 804 | ||
782 | 805 | curdir = self.current |
@@ -829,26 +852,56 @@
Loading
829 | 852 | yield from self.format_path(match, is_dir, dir_only) |
|
830 | 853 | ||
831 | 854 | ||
832 | - | def iglob(patterns, *, flags=0, root_dir=None, dir_fd=None, limit=_wcparse.PATTERN_LIMIT): |
|
855 | + | def iglob( |
|
856 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
857 | + | *, |
|
858 | + | flags: int = 0, |
|
859 | + | root_dir: Optional[Union[AnyStr, 'os.PathLike[AnyStr]']] = None, |
|
860 | + | dir_fd: Optional[int] = None, |
|
861 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
862 | + | ) -> Iterator[AnyStr]: |
|
833 | 863 | """Glob.""" |
|
834 | 864 | ||
835 | - | yield from Glob(util.to_tuple(patterns), flags, root_dir, dir_fd, limit).glob() |
|
865 | + | if not isinstance(patterns, (str, bytes)) and not patterns: |
|
866 | + | return |
|
867 | + | ||
868 | + | yield from Glob(patterns, flags, root_dir, dir_fd, limit).glob() |
|
836 | 869 | ||
837 | 870 | ||
838 | - | def glob(patterns, *, flags=0, root_dir=None, dir_fd=None, limit=_wcparse.PATTERN_LIMIT): |
|
871 | + | def glob( |
|
872 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
873 | + | *, |
|
874 | + | flags: int = 0, |
|
875 | + | root_dir: Optional[Union[AnyStr, 'os.PathLike[AnyStr]']] = None, |
|
876 | + | dir_fd: Optional[int] = None, |
|
877 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
878 | + | ) -> List[AnyStr]: |
|
839 | 879 | """Glob.""" |
|
840 | 880 | ||
841 | 881 | return list(iglob(patterns, flags=flags, root_dir=root_dir, dir_fd=dir_fd, limit=limit)) |
|
842 | 882 | ||
843 | 883 | ||
844 | - | def translate(patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
884 | + | def translate( |
|
885 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
886 | + | *, |
|
887 | + | flags: int = 0, |
|
888 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
889 | + | ) -> Tuple[List[AnyStr], List[AnyStr]]: |
|
845 | 890 | """Translate glob pattern.""" |
|
846 | 891 | ||
847 | 892 | flags = _flag_transform(flags) |
|
848 | - | return _wcparse.translate(patterns, flags, limit) |
|
849 | - | ||
850 | - | ||
851 | - | def globmatch(filename, patterns, *, flags=0, root_dir=None, dir_fd=None, limit=_wcparse.PATTERN_LIMIT): |
|
893 | + | return _wcparse.translate(_wcparse.to_str_sequence(patterns), flags, limit) |
|
894 | + | ||
895 | + | ||
896 | + | def globmatch( |
|
897 | + | filename: Union[AnyStr, 'os.PathLike[AnyStr]'], |
|
898 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
899 | + | *, |
|
900 | + | flags: int = 0, |
|
901 | + | root_dir: Optional[Union[AnyStr, 'os.PathLike[AnyStr]']] = None, |
|
902 | + | dir_fd: Optional[int] = None, |
|
903 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
904 | + | ) -> bool: |
|
852 | 905 | """ |
|
853 | 906 | Check if filename matches pattern. |
|
854 | 907 |
@@ -856,45 +909,79 @@
Loading
856 | 909 | but if `case_sensitive` is set, respect that instead. |
|
857 | 910 | """ |
|
858 | 911 | ||
859 | - | if root_dir is not None: |
|
860 | - | root_dir = os.fspath(root_dir) |
|
912 | + | pats = _wcparse.to_str_sequence(patterns) |
|
861 | 913 | ||
914 | + | # Shortcut out if we have no patterns |
|
915 | + | if not pats: |
|
916 | + | return False |
|
917 | + | ||
918 | + | ptype = type(pats[0]) |
|
919 | + | rdir = os.fspath(root_dir) if root_dir is not None else root_dir |
|
862 | 920 | flags = _flag_transform(flags) |
|
863 | - | filename = os.fspath(filename) |
|
864 | - | return _wcparse.compile(patterns, flags, limit).match(filename, root_dir, dir_fd) |
|
921 | + | fname = os.fspath(filename) |
|
922 | + | ||
923 | + | # Ensure types are not mismatched |
|
924 | + | if (rdir is not None and not isinstance(rdir, ptype)): |
|
925 | + | raise TypeError( |
|
926 | + | "Pattern type of '{}' does not match the resolved type of '{}' of the root dir".format(ptype, type(rdir)) |
|
927 | + | ) |
|
928 | + | ||
929 | + | return bool(_wcparse.compile(pats, flags, limit).match(fname, rdir, dir_fd)) |
|
930 | + | ||
931 | + | ||
932 | + | def globfilter( |
|
933 | + | filenames: Iterable[Union[AnyStr, 'os.PathLike[AnyStr]']], |
|
934 | + | patterns: Union[str, bytes, Sequence[AnyStr]], |
|
935 | + | *, |
|
936 | + | flags: int = 0, |
|
937 | + | root_dir: Optional[Union[AnyStr, 'os.PathLike[AnyStr]']] = None, |
|
938 | + | dir_fd: Optional[int] = None, |
|
939 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
940 | + | ) -> List[Union[AnyStr, 'os.PathLike[AnyStr]']]: |
|
941 | + | """Filter names using pattern.""" |
|
865 | 942 | ||
943 | + | pats = _wcparse.to_str_sequence(patterns) |
|
866 | 944 | ||
867 | - | def globfilter(filenames, patterns, *, flags=0, root_dir=None, dir_fd=None, limit=_wcparse.PATTERN_LIMIT): |
|
868 | - | """Filter names using pattern.""" |
|
945 | + | # Shortcut out if we have no patterns |
|
946 | + | if not pats: |
|
947 | + | return [] |
|
948 | + | ||
949 | + | ptype = type(pats[0]) |
|
950 | + | rdir = os.fspath(root_dir) if root_dir is not None else root_dir |
|
869 | 951 | ||
870 | - | if root_dir is not None: |
|
871 | - | root_dir = os.fspath(root_dir) |
|
952 | + | # Ensure types are not mismatched |
|
953 | + | if (rdir is not None and not isinstance(rdir, ptype)): |
|
954 | + | raise TypeError( |
|
955 | + | "Pattern type of '{}' does not match the resolved type of '{}' of the root dir".format(ptype, type(rdir)) |
|
956 | + | ) |
|
872 | 957 | ||
873 | - | matches = [] |
|
958 | + | matches = [] # type: List[Union[AnyStr, 'os.PathLike[AnyStr]']] |
|
874 | 959 | flags = _flag_transform(flags) |
|
875 | - | obj = _wcparse.compile(patterns, flags, limit) |
|
960 | + | obj = _wcparse.compile(pats, flags, limit) |
|
876 | 961 | ||
877 | 962 | for filename in filenames: |
|
878 | 963 | temp = os.fspath(filename) |
|
879 | - | if obj.match(temp, root_dir, dir_fd): |
|
964 | + | if obj.match(temp, rdir, dir_fd): |
|
880 | 965 | matches.append(filename) |
|
881 | 966 | return matches |
|
882 | 967 | ||
883 | 968 | ||
884 | 969 | @util.deprecated("This function will be removed in 9.0.") |
|
885 | - | def raw_escape(pattern, unix=None, raw_chars=True): |
|
970 | + | def raw_escape(pattern: AnyStr, unix: Optional[bool] = None, raw_chars: bool = True) -> AnyStr: |
|
886 | 971 | """Apply raw character transform before applying escape.""" |
|
887 | 972 | ||
888 | - | return _wcparse.escape(util.norm_pattern(pattern, False, raw_chars, True), unix=unix, pathname=True, raw=True) |
|
973 | + | return _wcparse.escape( |
|
974 | + | util.norm_pattern(pattern, False, raw_chars, True), unix=unix, pathname=True, raw=True |
|
975 | + | ) |
|
889 | 976 | ||
890 | 977 | ||
891 | - | def escape(pattern, unix=None): |
|
978 | + | def escape(pattern: AnyStr, unix: Optional[bool] = None) -> AnyStr: |
|
892 | 979 | """Escape.""" |
|
893 | 980 | ||
894 | 981 | return _wcparse.escape(pattern, unix=unix) |
|
895 | 982 | ||
896 | 983 | ||
897 | - | def is_magic(pattern, *, flags=0): |
|
984 | + | def is_magic(pattern: AnyStr, *, flags: int = 0) -> bool: |
|
898 | 985 | """Check if the pattern is likely to be magic.""" |
|
899 | 986 | ||
900 | 987 | flags = _flag_transform(flags) |
@@ -80,7 +80,11 @@
Loading
80 | 80 | ||
81 | 81 | """ |
|
82 | 82 | ||
83 | - | def __new__(cls, major, minor, micro, release="final", pre=0, post=0, dev=0): |
|
83 | + | def __new__( |
|
84 | + | cls, |
|
85 | + | major: int, minor: int, micro: int, release: str = "final", |
|
86 | + | pre: int = 0, post: int = 0, dev: int = 0 |
|
87 | + | ) -> "Version": |
|
84 | 88 | """Validate version info.""" |
|
85 | 89 | ||
86 | 90 | # Ensure all parts are positive integers. |
@@ -116,27 +120,27 @@
Loading
116 | 120 | ||
117 | 121 | return super(Version, cls).__new__(cls, major, minor, micro, release, pre, post, dev) |
|
118 | 122 | ||
119 | - | def _is_pre(self): |
|
123 | + | def _is_pre(self) -> bool: |
|
120 | 124 | """Is prerelease.""" |
|
121 | 125 | ||
122 | - | return self.pre > 0 |
|
126 | + | return bool(self.pre > 0) |
|
123 | 127 | ||
124 | - | def _is_dev(self): |
|
128 | + | def _is_dev(self) -> bool: |
|
125 | 129 | """Is development.""" |
|
126 | 130 | ||
127 | 131 | return bool(self.release < "alpha") |
|
128 | 132 | ||
129 | - | def _is_post(self): |
|
133 | + | def _is_post(self) -> bool: |
|
130 | 134 | """Is post.""" |
|
131 | 135 | ||
132 | - | return self.post > 0 |
|
136 | + | return bool(self.post > 0) |
|
133 | 137 | ||
134 | - | def _get_dev_status(self): # pragma: no cover |
|
138 | + | def _get_dev_status(self) -> str: # pragma: no cover |
|
135 | 139 | """Get development status string.""" |
|
136 | 140 | ||
137 | 141 | return DEV_STATUS[self.release] |
|
138 | 142 | ||
139 | - | def _get_canonical(self): |
|
143 | + | def _get_canonical(self) -> str: |
|
140 | 144 | """Get the canonical output string.""" |
|
141 | 145 | ||
142 | 146 | # Assemble major, minor, micro version and append `pre`, `post`, or `dev` if needed.. |
@@ -154,7 +158,7 @@
Loading
154 | 158 | return ver |
|
155 | 159 | ||
156 | 160 | ||
157 | - | def parse_version(ver): |
|
161 | + | def parse_version(ver: str) -> Version: |
|
158 | 162 | """Parse version into a comparable Version tuple.""" |
|
159 | 163 | ||
160 | 164 | m = RE_VER.match(ver) |
@@ -189,5 +193,5 @@
Loading
189 | 193 | return Version(major, minor, micro, release, pre, post, dev) |
|
190 | 194 | ||
191 | 195 | ||
192 | - | __version_info__ = Version(8, 2, 0, "final") |
|
196 | + | __version_info__ = Version(8, 3, 0, "final") |
|
193 | 197 | __version__ = __version_info__._get_canonical() |
@@ -23,7 +23,10 @@
Loading
23 | 23 | import os |
|
24 | 24 | import re |
|
25 | 25 | from . import _wcparse |
|
26 | + | from . import _wcmatch |
|
26 | 27 | from . import util |
|
28 | + | from typing import Optional, Any, Iterator, List, Generic, AnyStr |
|
29 | + | ||
27 | 30 | ||
28 | 31 | __all__ = ( |
|
29 | 32 | "CASE", "IGNORECASE", "RAWCHARS", "FILEPATHNAME", "DIRPATHNAME", "PATHNAME", |
@@ -78,104 +81,62 @@
Loading
78 | 81 | ) |
|
79 | 82 | ||
80 | 83 | ||
81 | - | class _Mixin: # pragma: no cover |
|
82 | - | """ |
|
83 | - | DO NOT USE: Provide temporary methods to allow temporary, backwards compatibility for Rummage. |
|
84 | - | ||
85 | - | This is only a temporary solution to transition Rummage to a new way of overriding `glob` |
|
86 | - | style patterns with regular expressions (which is a niche need and not publicly supported). |
|
87 | - | It is advised to stick to the public, documented API. Anything else you use, you use at your |
|
88 | - | own risk. |
|
89 | - | ||
90 | - | Wildcard Match was originally written inside Rummage and moved out at a later point to be its |
|
91 | - | own library. The regular expression override of file patterns was something that didn't make |
|
92 | - | much sense in Wildcard match as a standalone project. The old way Rummage used to override the |
|
93 | - | file/folder checks was hacked in and was messy as it was based on internal knowledge of how |
|
94 | - | `WcMatch` worked. |
|
95 | - | ||
96 | - | NEW WAY: |
|
97 | - | We've provided a cleaner easier way to do this moving forward, and if we ever made the knowledge |
|
98 | - | of this public, this is the way we'd do it. If either `file_check` or `folder_exclude_check` is |
|
99 | - | initialized prior to us compiling the checks, we will skip compilation. This would happen in |
|
100 | - | `on_init`. Un-compiled patterns are stored in `pattern_file` and `pattern_folder_exclude`. |
|
101 | - | ||
102 | - | OLD WAY: |
|
103 | - | The reason we need these functions below is because the old way looked to see if `file_pattern` |
|
104 | - | or `exclude_pattern` already had a compiled object, and then would replace them with compiled |
|
105 | - | objects using the pattern that was already contained within. Rummage avoided recompiling |
|
106 | - | objects that were already compiled, but this was messy. |
|
107 | - | """ |
|
108 | - | ||
109 | - | @property |
|
110 | - | def file_pattern(self): |
|
111 | - | """DO NOT USE: only provided for Rummage backwards compatibility, and will be remove in the future.""" |
|
112 | - | ||
113 | - | return _wcparse.WcRegexp(tuple()) if not self.pattern_file else self.pattern_file |
|
114 | - | ||
115 | - | @property |
|
116 | - | def exclude_pattern(self): |
|
117 | - | """DO NOT USE: only provided for Rummage backwards compatibility, and will be remove in the future.""" |
|
118 | - | ||
119 | - | return _wcparse.WcRegexp(tuple()) if not self.pattern_folder_exclude else self.pattern_folder_exclude |
|
120 | - | ||
121 | - | @file_pattern.setter |
|
122 | - | def file_pattern(self, value): |
|
123 | - | """DO NOT USE: only provided for Rummage backwards compatibility, and will be remove in the future.""" |
|
124 | - | ||
125 | - | self.file_check = value |
|
126 | - | ||
127 | - | @exclude_pattern.setter |
|
128 | - | def exclude_pattern(self, value): |
|
129 | - | """DO NOT USE: only provided for Rummage backwards compatibility, and will be remove in the future.""" |
|
130 | - | ||
131 | - | self.folder_exclude_check = value |
|
132 | - | ||
133 | - | ||
134 | - | class WcMatch(_Mixin): |
|
84 | + | class WcMatch(Generic[AnyStr]): |
|
135 | 85 | """Finds files by wildcard.""" |
|
136 | 86 | ||
137 | - | def __init__(self, root_dir, file_pattern=None, exclude_pattern=None, flags=0, limit=_wcparse.PATHNAME, **kwargs): |
|
87 | + | def __init__( |
|
88 | + | self, |
|
89 | + | root_dir: AnyStr, |
|
90 | + | file_pattern: Optional[AnyStr] = None, |
|
91 | + | exclude_pattern: Optional[AnyStr] = None, |
|
92 | + | flags: int = 0, |
|
93 | + | limit: int = _wcparse.PATHNAME, |
|
94 | + | **kwargs: Any |
|
95 | + | ): |
|
138 | 96 | """Initialize the directory walker object.""" |
|
139 | 97 | ||
140 | 98 | self.is_bytes = isinstance(root_dir, bytes) |
|
99 | + | self._directory = self._norm_slash(root_dir) # type: AnyStr |
|
141 | 100 | self._abort = False |
|
142 | 101 | self._skipped = 0 |
|
143 | 102 | self._parse_flags(flags) |
|
144 | - | self._directory = self._norm_slash(root_dir) |
|
145 | - | self._sep = os.fsencode(os.sep) if self.is_bytes else os.sep |
|
146 | - | self._root_dir = self._add_sep(self._get_cwd(), True) |
|
103 | + | self._sep = os.fsencode(os.sep) if isinstance(root_dir, bytes) else os.sep # type: AnyStr |
|
104 | + | self._root_dir = self._add_sep(self._get_cwd(), True) # type: AnyStr |
|
147 | 105 | self.limit = limit |
|
148 | - | self.pattern_file = file_pattern if file_pattern else self._directory[0:0] |
|
149 | - | self.pattern_folder_exclude = exclude_pattern if exclude_pattern else self._directory[0:0] |
|
150 | - | self.file_check = None |
|
151 | - | self.folder_exclude_check = None |
|
106 | + | empty = os.fsencode('') if isinstance(root_dir, bytes) else '' |
|
107 | + | self.pattern_file = file_pattern if file_pattern is not None else empty # type: AnyStr |
|
108 | + | self.pattern_folder_exclude = exclude_pattern if exclude_pattern is not None else empty # type: AnyStr |
|
109 | + | self.file_check = None # type: Optional[_wcmatch.WcRegexp[AnyStr]] |
|
110 | + | self.folder_exclude_check = None # type: Optional[_wcmatch.WcRegexp[AnyStr]] |
|
152 | 111 | self.on_init(**kwargs) |
|
153 | 112 | self._compile(self.pattern_file, self.pattern_folder_exclude) |
|
154 | 113 | ||
155 | - | def _norm_slash(self, name): |
|
114 | + | def _norm_slash(self, name: AnyStr) -> AnyStr: |
|
156 | 115 | """Normalize path slashes.""" |
|
157 | 116 | ||
158 | - | if self.is_bytes: |
|
159 | - | return name.replace(b'/', b"\\") if not util.is_case_sensitive() else name |
|
117 | + | if util.is_case_sensitive(): |
|
118 | + | return name |
|
119 | + | elif isinstance(name, bytes): |
|
120 | + | return name.replace(b'/', b"\\") |
|
160 | 121 | else: |
|
161 | - | return name.replace('/', "\\") if not util.is_case_sensitive() else name |
|
122 | + | return name.replace('/', "\\") |
|
162 | 123 | ||
163 | - | def _add_sep(self, path, check=False): |
|
124 | + | def _add_sep(self, path: AnyStr, check: bool = False) -> AnyStr: |
|
164 | 125 | """Add separator.""" |
|
165 | 126 | ||
166 | 127 | return (path + self._sep) if not check or not path.endswith(self._sep) else path |
|
167 | 128 | ||
168 | - | def _get_cwd(self): |
|
129 | + | def _get_cwd(self) -> AnyStr: |
|
169 | 130 | """Get current working directory.""" |
|
170 | 131 | ||
171 | 132 | if self._directory: |
|
172 | 133 | return self._directory |
|
173 | - | elif self.is_bytes: |
|
134 | + | elif isinstance(self._directory, bytes): |
|
174 | 135 | return bytes(os.curdir, 'ASCII') |
|
175 | 136 | else: |
|
176 | 137 | return os.curdir |
|
177 | 138 | ||
178 | - | def _parse_flags(self, flags): |
|
139 | + | def _parse_flags(self, flags: int) -> None: |
|
179 | 140 | """Parse flags.""" |
|
180 | 141 | ||
181 | 142 | self.flags = flags & FLAG_MASK |
@@ -190,7 +151,7 @@
Loading
190 | 151 | self.flags |= _FORCEWIN |
|
191 | 152 | self.flags = self.flags & (_wcparse.FLAG_MASK ^ MATCHBASE) |
|
192 | 153 | ||
193 | - | def _compile_wildcard(self, pattern, pathname=False): |
|
154 | + | def _compile_wildcard(self, pattern: AnyStr, pathname: bool = False) -> Optional[_wcmatch.WcRegexp[AnyStr]]: |
|
194 | 155 | """Compile or format the wildcard inclusion/exclusion pattern.""" |
|
195 | 156 | ||
196 | 157 | flags = self.flags |
@@ -199,14 +160,14 @@
Loading
199 | 160 | if self.matchbase: |
|
200 | 161 | flags |= MATCHBASE |
|
201 | 162 | ||
202 | - | return _wcparse.compile(pattern, flags, self.limit) if pattern else None |
|
163 | + | return _wcparse.compile([pattern], flags, self.limit) if pattern else None |
|
203 | 164 | ||
204 | - | def _compile(self, file_pattern, folder_exclude_pattern): |
|
165 | + | def _compile(self, file_pattern: AnyStr, folder_exclude_pattern: AnyStr) -> None: |
|
205 | 166 | """Compile patterns.""" |
|
206 | 167 | ||
207 | 168 | if self.file_check is None: |
|
208 | 169 | if not file_pattern: |
|
209 | - | self.file_check = _wcparse.WcRegexp( |
|
170 | + | self.file_check = _wcmatch.WcRegexp( |
|
210 | 171 | (re.compile(br'^.*$' if isinstance(file_pattern, bytes) else r'^.*$', re.DOTALL),) |
|
211 | 172 | ) |
|
212 | 173 | else: |
@@ -214,11 +175,11 @@
Loading
214 | 175 | ||
215 | 176 | if self.folder_exclude_check is None: |
|
216 | 177 | if not folder_exclude_pattern: |
|
217 | - | self.folder_exclude_check = _wcparse.WcRegexp(tuple()) |
|
178 | + | self.folder_exclude_check = _wcmatch.WcRegexp(tuple()) |
|
218 | 179 | else: |
|
219 | 180 | self.folder_exclude_check = self._compile_wildcard(folder_exclude_pattern, self.dir_pathname) |
|
220 | 181 | ||
221 | - | def _valid_file(self, base, name): |
|
182 | + | def _valid_file(self, base: AnyStr, name: AnyStr) -> bool: |
|
222 | 183 | """Return whether a file can be searched.""" |
|
223 | 184 | ||
224 | 185 | valid = False |
@@ -229,17 +190,17 @@
Loading
229 | 190 | valid = False |
|
230 | 191 | return self.on_validate_file(base, name) if valid else valid |
|
231 | 192 | ||
232 | - | def compare_file(self, filename): |
|
193 | + | def compare_file(self, filename: AnyStr) -> bool: |
|
233 | 194 | """Compare filename.""" |
|
234 | 195 | ||
235 | - | return self.file_check.match(filename) |
|
196 | + | return self.file_check.match(filename) # type: ignore[union-attr] |
|
236 | 197 | ||
237 | - | def on_validate_file(self, base, name): |
|
198 | + | def on_validate_file(self, base: AnyStr, name: AnyStr) -> bool: |
|
238 | 199 | """Validate file override.""" |
|
239 | 200 | ||
240 | 201 | return True |
|
241 | 202 | ||
242 | - | def _valid_folder(self, base, name): |
|
203 | + | def _valid_folder(self, base: AnyStr, name: AnyStr) -> bool: |
|
243 | 204 | """Return whether a folder can be searched.""" |
|
244 | 205 | ||
245 | 206 | valid = True |
@@ -247,7 +208,7 @@
Loading
247 | 208 | if ( |
|
248 | 209 | not self.recursive or |
|
249 | 210 | ( |
|
250 | - | len(self.folder_exclude_check) and |
|
211 | + | self.folder_exclude_check and |
|
251 | 212 | not self.compare_directory(fullpath[self._base_len:] if self.dir_pathname else name) |
|
252 | 213 | ) |
|
253 | 214 | ): |
@@ -256,58 +217,60 @@
Loading
256 | 217 | valid = False |
|
257 | 218 | return self.on_validate_directory(base, name) if valid else valid |
|
258 | 219 | ||
259 | - | def compare_directory(self, directory): |
|
220 | + | def compare_directory(self, directory: AnyStr) -> bool: |
|
260 | 221 | """Compare folder.""" |
|
261 | 222 | ||
262 | - | return not self.folder_exclude_check.match(self._add_sep(directory) if self.dir_pathname else directory) |
|
223 | + | return not self.folder_exclude_check.match( # type: ignore[union-attr] |
|
224 | + | self._add_sep(directory) if self.dir_pathname else directory |
|
225 | + | ) |
|
263 | 226 | ||
264 | - | def on_init(self, **kwargs): |
|
227 | + | def on_init(self, **kwargs: Any) -> None: |
|
265 | 228 | """Handle custom initialization.""" |
|
266 | 229 | ||
267 | - | def on_validate_directory(self, base, name): |
|
230 | + | def on_validate_directory(self, base: AnyStr, name: AnyStr) -> bool: |
|
268 | 231 | """Validate folder override.""" |
|
269 | 232 | ||
270 | 233 | return True |
|
271 | 234 | ||
272 | - | def on_skip(self, base, name): |
|
235 | + | def on_skip(self, base: AnyStr, name: AnyStr) -> Any: |
|
273 | 236 | """On skip.""" |
|
274 | 237 | ||
275 | 238 | return None |
|
276 | 239 | ||
277 | - | def on_error(self, base, name): |
|
240 | + | def on_error(self, base: AnyStr, name: AnyStr) -> Any: |
|
278 | 241 | """On error.""" |
|
279 | 242 | ||
280 | 243 | return None |
|
281 | 244 | ||
282 | - | def on_match(self, base, name): |
|
245 | + | def on_match(self, base: AnyStr, name: AnyStr) -> Any: |
|
283 | 246 | """On match.""" |
|
284 | 247 | ||
285 | 248 | return os.path.join(base, name) |
|
286 | 249 | ||
287 | - | def on_reset(self): |
|
250 | + | def on_reset(self) -> None: |
|
288 | 251 | """On reset.""" |
|
289 | 252 | ||
290 | - | def get_skipped(self): |
|
253 | + | def get_skipped(self) -> int: |
|
291 | 254 | """Get number of skipped files.""" |
|
292 | 255 | ||
293 | 256 | return self._skipped |
|
294 | 257 | ||
295 | - | def kill(self): |
|
258 | + | def kill(self) -> None: |
|
296 | 259 | """Abort process.""" |
|
297 | 260 | ||
298 | 261 | self._abort = True |
|
299 | 262 | ||
300 | - | def is_aborted(self): |
|
263 | + | def is_aborted(self) -> bool: |
|
301 | 264 | """Check if process has been aborted.""" |
|
302 | 265 | ||
303 | 266 | return self._abort |
|
304 | 267 | ||
305 | - | def reset(self): |
|
268 | + | def reset(self) -> None: |
|
306 | 269 | """Revive class from a killed state.""" |
|
307 | 270 | ||
308 | 271 | self._abort = False |
|
309 | 272 | ||
310 | - | def _walk(self): |
|
273 | + | def _walk(self) -> Iterator[Any]: |
|
311 | 274 | """Start search for valid files.""" |
|
312 | 275 | ||
313 | 276 | self._base_len = len(self._root_dir) |
@@ -353,12 +316,12 @@
Loading
353 | 316 | if self.is_aborted(): |
|
354 | 317 | break |
|
355 | 318 | ||
356 | - | def match(self): |
|
319 | + | def match(self) -> List[Any]: |
|
357 | 320 | """Run the directory walker.""" |
|
358 | 321 | ||
359 | 322 | return list(self.imatch()) |
|
360 | 323 | ||
361 | - | def imatch(self): |
|
324 | + | def imatch(self) -> Iterator[Any]: |
|
362 | 325 | """Run the directory walker as iterator.""" |
|
363 | 326 | ||
364 | 327 | self.on_reset() |
@@ -4,6 +4,7 @@
Loading
4 | 4 | from . import glob |
|
5 | 5 | from . import _wcparse |
|
6 | 6 | from . import util |
|
7 | + | from typing import Iterable, Any, Union, Sequence, cast |
|
7 | 8 | ||
8 | 9 | __all__ = ( |
|
9 | 10 | "CASE", "IGNORECASE", "RAWCHARS", "DOTGLOB", "DOTMATCH", |
@@ -68,67 +69,19 @@
Loading
68 | 69 | ) |
|
69 | 70 | ||
70 | 71 | ||
71 | - | class Path(pathlib.Path): |
|
72 | - | """Special pathlike object (which accesses the filesystem) that uses our own glob methods.""" |
|
73 | - | ||
74 | - | __slots__ = () |
|
75 | - | ||
76 | - | def __new__(cls, *args, **kwargs): |
|
77 | - | """New.""" |
|
78 | - | ||
79 | - | if cls is Path: |
|
80 | - | cls = WindowsPath if os.name == 'nt' else PosixPath |
|
81 | - | if util.PY310: |
|
82 | - | self = cls._from_parts(args) |
|
83 | - | else: |
|
84 | - | self = cls._from_parts(args, init=False) |
|
85 | - | if not self._flavour.is_supported: |
|
86 | - | raise NotImplementedError("Cannot instantiate {!r} on your system".format(cls.__name__)) |
|
87 | - | if not util.PY310: |
|
88 | - | self._init() |
|
89 | - | return self |
|
90 | - | ||
91 | - | def glob(self, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
92 | - | """ |
|
93 | - | Search the file system. |
|
94 | - | ||
95 | - | `GLOBSTAR` is enabled by default in order match the default behavior of `pathlib`. |
|
96 | - | ||
97 | - | """ |
|
98 | - | ||
99 | - | if self.is_dir(): |
|
100 | - | scandotdir = flags & SCANDOTDIR |
|
101 | - | flags = self._translate_flags(flags | _NOABSOLUTE) | ((_PATHLIB | SCANDOTDIR) if scandotdir else _PATHLIB) |
|
102 | - | for filename in glob.iglob(patterns, flags=flags, root_dir=str(self), limit=limit): |
|
103 | - | yield self.joinpath(filename) |
|
104 | - | ||
105 | - | def rglob(self, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
106 | - | """ |
|
107 | - | Recursive glob. |
|
108 | - | ||
109 | - | This uses the same recursive logic that the default `pathlib` object uses. |
|
110 | - | Folders and files are essentially matched from right to left. |
|
111 | - | ||
112 | - | `GLOBSTAR` is enabled by default in order match the default behavior of `pathlib`. |
|
113 | - | ||
114 | - | """ |
|
115 | - | ||
116 | - | yield from self.glob(patterns, flags=flags | _EXTMATCHBASE, limit=limit) |
|
117 | - | ||
118 | - | ||
119 | 72 | class PurePath(pathlib.PurePath): |
|
120 | 73 | """Special pure pathlike object that uses our own glob methods.""" |
|
121 | 74 | ||
122 | 75 | __slots__ = () |
|
123 | 76 | ||
124 | - | def __new__(cls, *args): |
|
77 | + | def __new__(cls, *args: str) -> 'PurePath': |
|
125 | 78 | """New.""" |
|
126 | 79 | ||
127 | 80 | if cls is PurePath: |
|
128 | 81 | cls = PureWindowsPath if os.name == 'nt' else PurePosixPath |
|
129 | - | return cls._from_parts(args) |
|
82 | + | return cast('PurePath', cls._from_parts(args)) # type: ignore[attr-defined] |
|
130 | 83 | ||
131 | - | def _translate_flags(self, flags): |
|
84 | + | def _translate_flags(self, flags: int) -> int: |
|
132 | 85 | """Translate flags for the current `pathlib` object.""" |
|
133 | 86 | ||
134 | 87 | flags = (flags & FLAG_MASK) | _PATHNAME |
@@ -144,7 +97,7 @@
Loading
144 | 97 | flags |= _FORCEUNIX |
|
145 | 98 | return flags |
|
146 | 99 | ||
147 | - | def _translate_path(self): |
|
100 | + | def _translate_path(self) -> str: |
|
148 | 101 | """Translate the object to a path string and ensure trailing slash for non-pure paths that are directories.""" |
|
149 | 102 | ||
150 | 103 | sep = '' |
@@ -154,7 +107,13 @@
Loading
154 | 107 | ||
155 | 108 | return name + sep |
|
156 | 109 | ||
157 | - | def match(self, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
110 | + | def match( |
|
111 | + | self, |
|
112 | + | patterns: Union[str, Sequence[str]], |
|
113 | + | *, |
|
114 | + | flags: int = 0, |
|
115 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
116 | + | ) -> bool: |
|
158 | 117 | """ |
|
159 | 118 | Match patterns using `globmatch`, but also using the same right to left logic that the default `pathlib` uses. |
|
160 | 119 |
@@ -167,7 +126,13 @@
Loading
167 | 126 | ||
168 | 127 | return self.globmatch(patterns, flags=flags | _RTL, limit=limit) |
|
169 | 128 | ||
170 | - | def globmatch(self, patterns, *, flags=0, limit=_wcparse.PATTERN_LIMIT): |
|
129 | + | def globmatch( |
|
130 | + | self, |
|
131 | + | patterns: Union[str, Sequence[str]], |
|
132 | + | *, |
|
133 | + | flags: int = 0, |
|
134 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
135 | + | ) -> bool: |
|
171 | 136 | """ |
|
172 | 137 | Match patterns using `globmatch`, but without the right to left logic that the default `pathlib` uses. |
|
173 | 138 |
@@ -183,17 +148,79 @@
Loading
183 | 148 | ) |
|
184 | 149 | ||
185 | 150 | ||
151 | + | class Path(pathlib.Path): |
|
152 | + | """Special pathlike object (which accesses the filesystem) that uses our own glob methods.""" |
|
153 | + | ||
154 | + | __slots__ = () |
|
155 | + | ||
156 | + | def __new__(cls, *args: str, **kwargs: Any) -> 'Path': |
|
157 | + | """New.""" |
|
158 | + | ||
159 | + | if cls is Path: |
|
160 | + | cls = WindowsPath if os.name == 'nt' else PosixPath |
|
161 | + | if util.PY310: |
|
162 | + | self = cls._from_parts(args) # type: ignore[attr-defined] |
|
163 | + | else: |
|
164 | + | self = cls._from_parts(args, init=False) # type: ignore[attr-defined] |
|
165 | + | if not self._flavour.is_supported: |
|
166 | + | raise NotImplementedError("Cannot instantiate {!r} on your system".format(cls.__name__)) |
|
167 | + | if not util.PY310: |
|
168 | + | self._init() |
|
169 | + | return cast('Path', self) |
|
170 | + | ||
171 | + | def glob( # type: ignore[override] |
|
172 | + | self, |
|
173 | + | patterns: Union[str, Sequence[str]], |
|
174 | + | *, |
|
175 | + | flags: int = 0, |
|
176 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
177 | + | ) -> Iterable['Path']: |
|
178 | + | """ |
|
179 | + | Search the file system. |
|
180 | + | ||
181 | + | `GLOBSTAR` is enabled by default in order match the default behavior of `pathlib`. |
|
182 | + | ||
183 | + | """ |
|
184 | + | ||
185 | + | if self.is_dir(): |
|
186 | + | scandotdir = flags & SCANDOTDIR |
|
187 | + | flags = self._translate_flags( # type: ignore[attr-defined] |
|
188 | + | flags | _NOABSOLUTE |
|
189 | + | ) | ((_PATHLIB | SCANDOTDIR) if scandotdir else _PATHLIB) |
|
190 | + | for filename in glob.iglob(patterns, flags=flags, root_dir=str(self), limit=limit): |
|
191 | + | yield self.joinpath(filename) |
|
192 | + | ||
193 | + | def rglob( # type: ignore[override] |
|
194 | + | self, |
|
195 | + | patterns: Union[str, Sequence[str]], |
|
196 | + | *, |
|
197 | + | flags: int = 0, |
|
198 | + | limit: int = _wcparse.PATTERN_LIMIT |
|
199 | + | ) -> Iterable['Path']: |
|
200 | + | """ |
|
201 | + | Recursive glob. |
|
202 | + | ||
203 | + | This uses the same recursive logic that the default `pathlib` object uses. |
|
204 | + | Folders and files are essentially matched from right to left. |
|
205 | + | ||
206 | + | `GLOBSTAR` is enabled by default in order match the default behavior of `pathlib`. |
|
207 | + | ||
208 | + | """ |
|
209 | + | ||
210 | + | yield from self.glob(patterns, flags=flags | _EXTMATCHBASE, limit=limit) |
|
211 | + | ||
212 | + | ||
186 | 213 | class PurePosixPath(PurePath): |
|
187 | 214 | """Pure Posix path.""" |
|
188 | 215 | ||
189 | - | _flavour = pathlib._posix_flavour |
|
216 | + | _flavour = pathlib._posix_flavour # type: ignore[attr-defined] |
|
190 | 217 | __slots__ = () |
|
191 | 218 | ||
192 | 219 | ||
193 | 220 | class PureWindowsPath(PurePath): |
|
194 | 221 | """Pure Windows path.""" |
|
195 | 222 | ||
196 | - | _flavour = pathlib._windows_flavour |
|
223 | + | _flavour = pathlib._windows_flavour # type: ignore[attr-defined] |
|
197 | 224 | __slots__ = () |
|
198 | 225 | ||
199 | 226 |
Files | Coverage |
---|---|
wcmatch | 100.00% |
Project Totals (10 files) | 100.00% |
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file.
The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files.
The size and color of each slice is representing the number of statements and the coverage, respectively.