|
29 | 29 | # stdlib |
30 | 30 | import ast |
31 | 31 | import re |
| 32 | +from collections import deque |
32 | 33 | from typing import List |
33 | 34 |
|
34 | 35 | # 3rd party |
| 36 | +import tokenize_rt # type: ignore[import-untyped] |
35 | 37 | from domdf_python_tools.paths import PathPlus |
36 | 38 | from domdf_python_tools.stringlist import StringList |
37 | 39 | from domdf_python_tools.typing import PathLike |
38 | 40 |
|
39 | 41 | # this package |
40 | 42 | from formate.config import wants_filename |
41 | 43 |
|
42 | | -__all__ = ("noqa_reformat", "check_ast", "squish_stubs") |
| 44 | +__all__ = ("check_ast", "newline_after_equals", "noqa_reformat", "squish_stubs") |
43 | 45 |
|
44 | 46 |
|
45 | 47 | def noqa_reformat(source: str) -> str: |
@@ -246,3 +248,40 @@ def _reformat_blocks(blocks: List[List[str]]) -> StringList: |
246 | 248 | output.blankline(ensure_single=True) |
247 | 249 |
|
248 | 250 | return output |
| 251 | + |
| 252 | + |
| 253 | +def newline_after_equals(source: str) -> str: |
| 254 | + """ |
| 255 | + Removes newlines immediately after equals signs. |
| 256 | +
|
| 257 | + .. versionadded:: 1.2.0 |
| 258 | +
|
| 259 | + :param source: The source to check. |
| 260 | +
|
| 261 | + :return: The reformatted source. |
| 262 | + """ |
| 263 | + |
| 264 | + original_tokens = deque(tokenize_rt.src_to_tokens(source)) |
| 265 | + tokens = [] |
| 266 | + |
| 267 | + while original_tokens: |
| 268 | + token = original_tokens.popleft() |
| 269 | + |
| 270 | + if token.name == "OP" and token.src == '=': |
| 271 | + while True: |
| 272 | + |
| 273 | + # TODO: handle spaces around equals (e.g. in function signature) |
| 274 | + # Look ahead until there's a non-whitespace token. |
| 275 | + next_token = original_tokens.popleft() |
| 276 | + if next_token.name in {"UNIMPORTANT_WS", "NL"}: |
| 277 | + pass |
| 278 | + else: |
| 279 | + break |
| 280 | + |
| 281 | + tokens.append(token) |
| 282 | + tokens.append(next_token) |
| 283 | + |
| 284 | + else: |
| 285 | + tokens.append(token) |
| 286 | + |
| 287 | + return tokenize_rt.tokens_to_src(tokens) |
0 commit comments