Skip to content

Commit

Permalink
chore: code style clean-up
Browse files Browse the repository at this point in the history
One-off clean up by Sourcery
  • Loading branch information
tserg authored Jul 12, 2023
2 parents 29853f0 + 0688c55 commit 7cb9e2c
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 67 deletions.
3 changes: 1 addition & 2 deletions medusa/_cli/analyse.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ def main():
formatted_analysis = format_analysis(output_dict)

# Write to output file
output_file = args["--output"]
if output_file:
if output_file := args["--output"]:
write_analysis(formatted_analysis, output_file)

# Print analysis to console
Expand Down
2 changes: 1 addition & 1 deletion medusa/analysis/analyse.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def analyse(ast: vy_ast.Module) -> dict[BaseAnalyser, set[vy_ast.VyperNode]]:
analysis: dict[BaseAnalyser, set[vy_ast.VyperNode]] = {}

for k, v in PASSES.items():
for v in PASSES.values():
analyser = v()
analyser.analyse(ast, analysis)

Expand Down
2 changes: 1 addition & 1 deletion medusa/analysis/passes/dead_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def analyse(self, ast: vy_ast.Module, analysis: dict[BaseAnalyser, set[vy_ast.Vy
if i != name_node
]

if len(used_by) == 0:
if not used_by:
temp = analysis.get(self, set())
temp.add(local_var)
analysis[self] = temp
Expand Down
2 changes: 1 addition & 1 deletion medusa/analysis/passes/unused_param.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def analyse(self, ast: vy_ast.Module, analysis: dict[BaseAnalyser, set[vy_ast.Vy
for ref in fn_node.get_descendants(vy_ast.Name, {"id": arg_name})
if not isinstance(ref.get_ancestor, vy_ast.Attribute)
]
if len(arg_name_references) == 0:
if not arg_name_references:
temp = analysis.get(self, set())
temp.add(arg_node)
analysis[self] = temp
Expand Down
93 changes: 41 additions & 52 deletions medusa/utils/docopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,7 @@ def transform(pattern: "BranchPattern") -> "Either":
child = [c for c in children if type(c) in parents][0]
children.remove(child)
if type(child) is Either:
for c in child.children:
groups.append([c] + children)
groups.extend([c] + children for c in child.children)
elif type(child) is OneOrMore:
groups.append(child.children * 2 + children)
else:
Expand Down Expand Up @@ -190,11 +189,11 @@ def match(
return False, left, collected
left_ = left[:pos] + left[(pos + 1) :]
same_name = [a for a in collected if a.name == self.name]
if type(self.value) == int and len(same_name) > 0:
if type(self.value) == int and same_name:
if isinstance(same_name[0].value, int):
same_name[0].value += 1
return True, left_, collected
if type(self.value) == int and not same_name:
if type(self.value) == int:
match.value = 1
return True, left_, collected + [match]
if same_name and type(self.value) == list:
Expand Down Expand Up @@ -253,20 +252,24 @@ def fix_repeating_arguments(self) -> "BranchPattern":
return self

def __repr__(self) -> str:
return "%s(%s)" % (self.__class__.__name__, ", ".join(repr(a) for a in self.children))
return f'{self.__class__.__name__}({", ".join(repr(a) for a in self.children)})'

def flat(self, *types) -> Any:
if type(self) in types:
return [self]
return sum([child.flat(*types) for child in self.children], [])
return sum((child.flat(*types) for child in self.children), [])


class Argument(LeafPattern):
def single_match(self, left: List[LeafPattern]) -> TSingleMatch:
for n, pattern in enumerate(left):
if type(pattern) is Argument:
return n, Argument(self.name, pattern.value)
return None, None
return next(
(
(n, Argument(self.name, pattern.value))
for n, pattern in enumerate(left)
if type(pattern) is Argument
),
(None, None),
)


class Command(Argument):
Expand All @@ -291,12 +294,12 @@ def __init__(
argcount: int = 0,
value: Union[List[str], str, int, None] = False,
) -> None:
assert argcount in (0, 1)
assert argcount in {0, 1}
self.short, self.longer, self.argcount = short, longer, argcount
self.value = None if value is False and argcount else value

@classmethod
def parse(class_, option_description: str) -> "Option":
def parse(cls, option_description: str) -> "Option":
short, longer, argcount, value = None, None, 0, False
options, _, description = option_description.strip().partition(" ")
options = options.replace(",", " ").replace("=", " ")
Expand All @@ -310,13 +313,13 @@ def parse(class_, option_description: str) -> "Option":
if argcount:
matched = re.findall(r"\[default: (.*)\]", description, flags=re.I)
value = matched[0] if matched else None
return class_(short, longer, argcount, value)
return cls(short, longer, argcount, value)

def single_match(self, left: List[LeafPattern]) -> TSingleMatch:
for n, pattern in enumerate(left):
if self.name == pattern.name:
return n, pattern
return None, None
return next(
((n, pattern) for n, pattern in enumerate(left) if self.name == pattern.name),
(None, None),
)

@property
def name(self) -> Optional[str]:
Expand Down Expand Up @@ -390,10 +393,7 @@ def __init__(
source: Union[List[str], str],
error: Union[Type[DocoptExit], Type[DocoptLanguageError]] = DocoptExit,
) -> None:
if isinstance(source, list):
self += source
else:
self += source.split()
self += source if isinstance(source, list) else source.split()
self.error = error

@staticmethod
Expand All @@ -419,10 +419,7 @@ def parse_longer(
f"parse_longer got what appears to be an invalid token: {current_token}"
) # pragma: no cover
longer, maybe_eq, maybe_value = current_token.partition("=")
if maybe_eq == maybe_value == "":
value = None
else:
value = maybe_value
value = None if maybe_eq == maybe_value == "" else maybe_value
similar = [o for o in options if o.longer and longer == o.longer]
start_collision = (
len([o for o in options if o.longer and longer in o.longer and o.longer.startswith(longer)])
Expand All @@ -442,7 +439,7 @@ def parse_longer(
similar = [correct for (original, correct) in corrected]
if len(similar) > 1:
raise tokens.error(f"{longer} is not a unique prefix: {similar}?") # pragma: no cover
elif len(similar) < 1:
elif not similar:
argcount = 1 if maybe_eq == "=" else 0
o = Option(None, longer, argcount)
options.append(o)
Expand All @@ -452,11 +449,11 @@ def parse_longer(
o = Option(similar[0].short, similar[0].longer, similar[0].argcount, similar[0].value)
if o.argcount == 0:
if value is not None:
raise tokens.error("%s must not have an argument" % o.longer)
else:
if value is None:
if tokens.current() in [None, "--"]:
raise tokens.error("%s requires argument" % o.longer)
raise tokens.error(f"{o.longer} must not have an argument")
elif value is None:
if tokens.current() in [None, "--"]:
raise tokens.error(f"{o.longer} requires argument")
else:
value = tokens.move()
if tokens.error is DocoptExit:
o.value = value if value is not None else True
Expand All @@ -473,7 +470,7 @@ def parse_shorts(tokens: Tokens, options: List[Option], more_magic: bool = False
left = token.lstrip("-")
parsed: List[Pattern] = []
while left != "":
short, left = "-" + left[0], left[1:]
short, left = f"-{left[0]}", left[1:]
transformations: Dict[Union[None, str], Callable[[str], str]] = {None: lambda x: x}
if more_magic:
transformations["lowercase"] = lambda x: x.lower()
Expand All @@ -483,7 +480,7 @@ def parse_shorts(tokens: Tokens, options: List[Option], more_magic: bool = False
similar: List[Option] = []
de_abbreviated = False
for transform_name, transform in transformations.items():
transformed = list(set([transform(o.short) for o in options if o.short]))
transformed = list({transform(o.short) for o in options if o.short})
no_collisions = len(
[o for o in options if o.short and transformed.count(transform(o.short)) == 1]
) # == len(transformed)
Expand Down Expand Up @@ -527,17 +524,14 @@ def parse_shorts(tokens: Tokens, options: List[Option], more_magic: bool = False
if tokens.error is DocoptExit:
o = Option(short, None, 0, True)
else:
if de_abbreviated:
option_short_value = None
else:
option_short_value = transform(short)
option_short_value = None if de_abbreviated else transform(short)
o = Option(option_short_value, similar[0].longer, similar[0].argcount, similar[0].value)
value = None
current_token = tokens.current()
if o.argcount != 0:
if left == "":
if current_token is None or current_token == "--":
raise tokens.error("%s requires argument" % short)
raise tokens.error(f"{short} requires argument")
else:
value = tokens.move()
else:
Expand Down Expand Up @@ -570,10 +564,7 @@ def parse_expr(tokens: Tokens, options: List[Option]) -> List[Pattern]:
while tokens.current() == "|":
tokens.move()
seq_1 = parse_seq(tokens, options)
if len(seq_1) > 1:
result += [Required(*seq_1)]
else:
result += seq_1
result += [Required(*seq_1)] if len(seq_1) > 1 else seq_1
return [Either(*result)]


Expand Down Expand Up @@ -602,7 +593,7 @@ def parse_atom(tokens: Tokens, options: List[Option]) -> List[Pattern]:
pattern = {"(": Required, "[": NotRequired}[token]
matched_pattern = pattern(*parse_expr(tokens, options))
if tokens.move() != matching:
raise tokens.error("unmatched '%s'" % token)
raise tokens.error(f"unmatched '{token}'")
return [matched_pattern]
elif token == "options":
tokens.move()
Expand Down Expand Up @@ -677,8 +668,7 @@ def parse_section(name: str, source: str) -> List[str]:
pattern = re.compile(
"^([^\n]*" + name + "[^\n]*\n?(?:[ \t].*?(?:\n|$))*)", re.IGNORECASE | re.MULTILINE
)
r = [s.strip() for s in pattern.findall(source) if s.strip().lower() != name.lower()]
return r
return [s.strip() for s in pattern.findall(source) if s.strip().lower() != name.lower()]


def formal_usage(section: str) -> str:
Expand Down Expand Up @@ -779,8 +769,7 @@ def docopt(
"""
argv = sys.argv[1:] if argv is None else argv
maybe_frame = inspect.currentframe()
if maybe_frame:
if maybe_frame := inspect.currentframe():
parent_frame = doc_parent_frame = magic_parent_frame = maybe_frame.f_back
if not more_magic: # make sure 'magic' isn't in the calling name
while not more_magic and magic_parent_frame:
Expand All @@ -798,11 +787,11 @@ def docopt(
docstring = doc_parent_frame.f_locals.get("__doc__")
if not docstring:
doc_parent_frame = doc_parent_frame.f_back
if not docstring:
raise DocoptLanguageError(
"Either __doc__ must be defined in the scope of a parent or ",
"passed as the first argument.",
)
if not docstring:
raise DocoptLanguageError(
"Either __doc__ must be defined in the scope of a parent or ",
"passed as the first argument.",
)
output_value_assigned = False
if more_magic and parent_frame:
import dis
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from setuptools import find_packages, setup

with open("README.md", "r") as fh:
with open("README.md") as fh:
long_description = fh.read()


Expand Down
11 changes: 2 additions & 9 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,10 @@


def extract_errors(analysis: dict[str, set[vy_ast.VyperNode]]) -> dict[str, int]:
ret = {}

for analyser, errors_flagged in analysis.items():
error_count = len(errors_flagged)
ret[str(analyser)] = error_count

return ret
return {str(analyser): len(errors_flagged) for analyser, errors_flagged in analysis.items()}


def get_contract_analysis(path: str) -> dict[str, int]:
vyper_ast = get_vyper_ast(path)
output_dict = analyse(vyper_ast)
ret = extract_errors(output_dict)
return ret
return extract_errors(output_dict)

0 comments on commit 7cb9e2c

Please sign in to comment.