Skip to content

Instantly share code, notes, and snippets.

@zhammer
Created August 18, 2019 00:01
Show Gist options
  • Save zhammer/4d8e8d3bea2f4da1ad7a842b0ed281c8 to your computer and use it in GitHub Desktop.
Save zhammer/4d8e8d3bea2f4da1ad7a842b0ed281c8 to your computer and use it in GitHub Desktop.
import os
import re
import sys
from pathlib import Path
from typing import List, Tuple
import libcst as cst
from libcst import CSTVisitorT
class UrlLibTransformer(cst.CSTTransformer):
def visit_Import(self, node: cst.Import) -> None:
print("import", [name.name.value for name in node.names])
def leave_Import(self, node: cst.Import, updated_node: cst.Import) -> cst.Import:
return updated_node
def visit_ImportFrom(self, node: cst.ImportFrom) -> None:
print("from", node.module.value, "import", [name.name.value for name in node.names])
def leave_ImportFrom(self, node: cst.ImportFrom, updated_node: cst.ImportFrom) -> cst.ImportFrom:
return updated_node
def lint_file(visitor: CSTVisitorT, filename: str) -> None:
with open(filename, 'r') as python_file:
python_source = python_file.read()
source_tree = cst.parse_module(python_source)
visited_tree = source_tree.visit(visitor)
if not visited_tree.deep_equals(source_tree):
with open(filename, 'w') as python_file:
python_file.write(visited_tree.code)
def collect_files(base: str) -> Tuple[str, ...]:
def is_python_file(path: str):
return os.path.isfile(path) and re.search(r"\.pyi?$", path)
if is_python_file(base):
return (base,)
if os.path.isdir(base):
python_files: List[str] = []
for root, dirs, filenames in os.walk(base):
full_filenames = (f"{root}/{filename}" for filename in filenames)
python_files += [full_filename for full_filename in full_filenames if is_python_file(full_filename)]
return tuple(python_files)
return tuple()
if __name__ == '__main__':
base = sys.argv[1]
python_files = collect_files(base)
for python_file in python_files:
lint_file(UrlLibTransformer(), python_file)
@zhammer
Copy link
Author

zhammer commented Aug 18, 2019

class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]):
    """Parser engine.

    A Parser instance contains state pertaining to the current token
    sequence, and should not be used concurrently by different threads
    to parse separate token sequences.

    See python/tokenize.py for how to get input tokens by a string.
    """

    tokens: Iterable[_TokenT]
    lines: Sequence[str]  # used when generating parse errors
    _pgen_grammar: "Grammar[_TokenTypeT]"
    stack: List[StackNode[_TokenTypeT, _NodeT]]
    # Keep track of if parse was called. Because a parser may keep global mutable state,
    # each BaseParser instance should only be used once.
    __was_parse_called: bool

    def __init__(
        self,
        *,
        tokens: Iterable[_TokenT],
        lines: Sequence[str],
        pgen_grammar: "Grammar[_TokenTypeT]",
        start_nonterminal: str,
    ) -> None:
        self.tokens = tokens
        self.lines = lines
        pgen_grammar.__dict__['reserved_syntax_strings'].pop('async', None)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment