Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def n_return(self, node):
start = len(self.f.getvalue()) + len(self.indent)
if self.params["is_lambda"]:
self.preorder(node[0])
if hasattr(node[-1], "offset"):
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
self.prune()
else:
start = len(self.f.getvalue()) + len(self.indent)
self.write(self.indent, "return")
if self.return_none or node != SyntaxTree(
"return", [SyntaxTree("ret_expr", [NONE]), Token("RETURN_VALUE")]
):
self.write(" ")
self.last_finish = len(self.f.getvalue())
self.preorder(node[0])
if hasattr(node[-1], "offset"):
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
pass
pass
else:
for n in node:
self.set_pos_info_recurse(n, start, len(self.f.getvalue()))
pass
pass
self.set_pos_info(node, start, len(self.f.getvalue()))
self.println()
self.prune() # stop recursing
# return statement instructions before parsing.
# But here we want to keep these instructions at the expense of
# a fully runnable Python program because we
# my be queried about the role of one of those instructions.
#
# NOTE: this differs from behavior in pysource.py
if len(tokens) >= 2 and not noneInNames:
if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"):
# Python 3.4's classes can add a "return None" which is
# invalid syntax.
if tokens[-2].kind == "LOAD_CONST":
if isTopLevel or tokens[-2].pattr is None:
del tokens[-2:]
else:
tokens.append(Token("RETURN_LAST"))
else:
tokens.append(Token("RETURN_LAST"))
if len(tokens) == 0:
return PASS
# Build parse tree from tokenized and massaged disassembly.
try:
# FIXME: have p.insts update in a better way
# modularity is broken here
p_insts = self.p.insts
self.p.insts = self.scanner.insts
ast = parser.parse(self.p, tokens, customize)
self.p.insts = p_insts
except (parser.ParserError, AssertionError) as e:
raise ParserError(e, tokens)
def n_return_if_stmt(self, node):
start = len(self.f.getvalue()) + len(self.indent)
if self.params['isLambda']:
node[0].parent = node
self.preorder(node[0])
else:
start = len(self.f.getvalue()) + len(self.indent)
self.write(self.indent, 'return')
if self.return_none or node != AST('return_stmt', [AST('ret_expr', [NONE]), Token('RETURN_END_IF')]):
self.write(' ')
self.preorder(node[0])
if hasattr(node[-1], 'offset'):
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
self.println()
self.set_pos_info(node, start, len(self.f.getvalue()))
self.prune() # stop recursing
# "loop" tag last so the grammar rule matches that properly.
for jump_offset in sorted(jump_targets[inst.offset], reverse=True):
come_from_name = "COME_FROM"
opname = self.opname_for_offset(jump_offset)
if opname == "EXTENDED_ARG":
j = xdis.next_offset(op, self.opc, jump_offset)
opname = self.opname_for_offset(j)
if opname.startswith("SETUP_"):
come_from_type = opname[len("SETUP_") :]
come_from_name = "COME_FROM_%s" % come_from_type
pass
elif inst.offset in self.except_targets:
come_from_name = "COME_FROM_EXCEPT_CLAUSE"
tokens.append(
Token(
come_from_name,
jump_offset,
repr(jump_offset),
offset="%s_%s" % (inst.offset, jump_idx),
has_arg=True,
opc=self.opc,
)
)
jump_idx += 1
pass
pass
elif inst.offset in self.else_start:
end_offset = self.else_start[inst.offset]
tokens.append(
Token(
"ELSE",
come_from_name,
jump_offset,
repr(jump_offset),
offset="%s_%s" % (inst.offset, jump_idx),
has_arg=True,
opc=self.opc,
),
)
jump_idx += 1
pass
pass
elif inst.offset in self.else_start:
end_offset = self.else_start[inst.offset]
j = tokens_append(
j,
Token(
"ELSE",
None,
repr(end_offset),
offset="%s" % (inst.offset),
has_arg=True,
opc=self.opc,
),
)
pass
pattr = inst.argrepr
opname = inst.opname
if op in self.opc.CONST_OPS:
const = argval
# offset order so we have the larger range or biggest instruction interval
# last. (I think they are sorted in increasing order, but for safety
# we sort them). That way, specific COME_FROM tags will match up
# properly. For example, a "loop" with an "if" nested in it should have the
# "loop" tag last so the grammar rule matches that properly.
last_jump_offset = -1
for jump_offset in sorted(jump_targets[offset], reverse=True):
if jump_offset != last_jump_offset:
tokens.append(Token(
'COME_FROM', jump_offset, repr(jump_offset),
offset="%s_%d" % (offset, jump_idx),
has_arg = True))
jump_idx += 1
last_jump_offset = jump_offset
elif offset in self.thens:
tokens.append(Token(
'THEN', None, self.thens[offset],
offset="%s_0" % offset,
has_arg = True))
has_arg = (op >= self.opc.HAVE_ARGUMENT)
if has_arg:
oparg = self.get_argument(offset) + extended_arg
extended_arg = 0
if op == self.opc.EXTENDED_ARG:
extended_arg = oparg * L65536
continue
if op in self.opc.CONST_OPS:
const = co.co_consts[oparg]
# We can't use inspect.iscode() because we may be
# using a different version of Python than the
# one that this was byte-compiled on. So the code
def build_ast(
self, tokens, customize, is_lambda=False, noneInNames=False, isTopLevel=False
):
# FIXME: DRY with pysource.py
# assert isinstance(tokens[0], Token)
if is_lambda:
for t in tokens:
if t.kind == "RETURN_END_IF":
t.kind = "RETURN_END_IF_LAMBDA"
elif t.kind == "RETURN_VALUE":
t.kind = "RETURN_VALUE_LAMBDA"
tokens.append(Token("LAMBDA_MARKER"))
try:
# FIXME: have p.insts update in a better way
# modularity is broken here
p_insts = self.p.insts
self.p.insts = self.scanner.insts
ast = python_parser.parse(self.p, tokens, customize)
self.p.insts = p_insts
except (python_parser.ParserError, AssertionError) as e:
raise ParserError(e, tokens)
maybe_show_tree(self, ast)
return ast
# The bytecode for the end of the main routine has a
# "return None". However you can't issue a "return" statement in
# main. In the other build_ast routine we eliminate the
# return statement instructions before parsing.
# a fully runnable Python program because we
# my be queried about the role of one of those instructions.
#
# NOTE: this differs from behavior in pysource.py
if len(tokens) >= 2 and not noneInNames:
if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"):
# Python 3.4's classes can add a "return None" which is
# invalid syntax.
if tokens[-2].kind == "LOAD_CONST":
if isTopLevel or tokens[-2].pattr is None:
del tokens[-2:]
else:
tokens.append(Token("RETURN_LAST"))
else:
tokens.append(Token("RETURN_LAST"))
if len(tokens) == 0:
return PASS
# Build parse tree from tokenized and massaged disassembly.
try:
# FIXME: have p.insts update in a better way
# modularity is broken here
p_insts = self.p.insts
self.p.insts = self.scanner.insts
ast = parser.parse(self.p, tokens, customize)
self.p.insts = p_insts
except (parser.ParserError, AssertionError) as e:
raise ParserError(e, tokens)
maybe_show_tree(self, ast)