Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# convert to a bytecode object and remove the leading and
# trailing ops: STORE_NAME LOAD_CONST RETURN_VALUE
outer_ops = bc.Bytecode.from_code(code)[0:-3]
# the stack now looks like the following:
# ...
# ...
# LOAD_CONST (<code>)
# LOAD_CONST (qualified name)
# MAKE_FUCTION (num defaults) // TOS
# extract the inner code object which represents the actual
# function code and update its flags
inner = bc.Bytecode.from_code(outer_ops[-3].arg)
inner.flags ^= (inner.flags & bc.CompilerFlags.NEWLOCALS)
# On Python 3 all comprehensions use a function call. To avoid scoping
# issues the function call is run in the dynamic scope.
if has_defs:
run_in_dynamic_scope(inner, global_vars)
else:
rewrite_globals_access(inner, global_vars)
outer_ops[-3].arg = inner.to_code()
# inline the modified code ops into the code generator
cg.code_ops.extend(outer_ops)
</code>
getter_code.append(Instr('BINARY_SUBSCR'))
getter_code.append(Instr('RETURN_VALUE'))
getter_code.flags = CompilerFlags.OPTIMIZED | CompilerFlags.NEWLOCALS | CompilerFlags.NOFREE
getter_fn = property(get_func_from_code(getter_code.to_code(), k))
setter_code = Bytecode()
setter_code.filename = filepath
setter_code.argcount = 2
setter_code.argnames.extend(['self', 'value'])
setter_code.append(Instr('LOAD_FAST', 'value'))
setter_code.append(Instr('LOAD_FAST', 'self'))
setter_code.append(Instr('LOAD_CONST', i))
setter_code.append(Instr('STORE_SUBSCR'))
setter_code.append(Instr('LOAD_CONST', None))
setter_code.append(Instr('RETURN_VALUE'))
setter_code.flags = CompilerFlags.OPTIMIZED | CompilerFlags.NEWLOCALS | CompilerFlags.NOFREE
setter_fn = getter_fn.setter(get_func_from_code(setter_code.to_code(), k))
namespace[k] = setter_fn
init_code = Bytecode()
init_code.name = '__init__'
init_code.filename = filepath
ary_num = len(annotations)
args = list(annotations)
init_code.argcount = ary_num + 1
init_code.argnames.extend(['self', *args])
if ary_num:
init_code.append(Instr('LOAD_FAST', 'self'))
if ary_num >= 4:
init_code.append(Instr('DUP_TOP'))
for i in range((ary_num - 2) // 2):
def _build_attrs(attrs: Dict[str, Any], bases: Tuple[Type], fields: Dict[str, Any], noinit: Set[str]) -> Dict[str, Any]:
asm = Assembler('')
asm.instrs.flags = CompilerFlags.VARARGS | CompilerFlags.VARKEYWORDS
asm.instrs.argcount = 1
asm.instrs.argnames = ['self', 'args', 'kwargs']
# add all attributes
for name, vtype in fields.items():
if name not in noinit:
init = _make_init(asm, name, vtype, _real_type(vtype))
init and init()
# set slots and attributes
attrs['__attrs__'] = set(sorted(attrs.keys()))
attrs['__slots__'] = tuple(sorted(fields.keys()))
# create a new `__init__` only when fields present
if not asm.instrs:
return attrs
The code object created by the Enaml compiler.
f_globals : dict
The global scope for the returned function.
Returns
-------
result : FunctionType
A new function with optimized local variable access
and instrumentation for inverting the operation.
"""
bc_code = bc.Bytecode.from_code(code)
optimize_locals(bc_code)
bc_code = inject_inversion(bc_code)
bc_code.flags ^= (bc_code.flags & bc.CompilerFlags.NEWLOCALS)
bc_code.argnames = ['_[inverter]', '_[value]'] + bc_code.argnames
bc_code.argcount += 2
new_code = bc_code.to_code()
return FunctionType(new_code, f_globals)
def code_info(cls, code: Bytecode) -> PyCodeInfo[Repr]:
cfg = ControlFlowGraph.from_bytecode(code)
current = cls.empty()
run_machine(
Interpreter(code.first_lineno).abs_i_cfg(cfg), current)
glob_deps = tuple(current.globals)
instrs = current.instrs
instrs = current.pass_push_pop_inline(instrs)
return PyCodeInfo(code.name, tuple(glob_deps), code.argnames,
code.freevars, code.cellvars, code.filename,
code.first_lineno, code.argcount,
code.kwonlyargcount,
bool(code.flags & CompilerFlags.GENERATOR),
bool(code.flags & CompilerFlags.VARKEYWORDS),
bool(code.flags & CompilerFlags.VARARGS),
instrs)
bc_code.argcount = (len(self.args) -
self.kwonlyargs -
self.posonlyargs -
self.varargs -
self.varkwargs)
if POS_ONLY_ARGS:
bc_code.posonlyargcount = self.posonlyargs
bc_code.kwonlyargcount = self.kwonlyargs
for name in ("name", "filename", "firstlineno", "docstring"):
setattr(bc_code, name, getattr(self, name))
# Set flags appropriately and update flags based on the instructions
for setting, flag in zip((self.varargs, self.varkwargs, self.newlocals),
(bc.CompilerFlags.VARARGS,
bc.CompilerFlags.VARKEYWORDS,
bc.CompilerFlags.NEWLOCALS)
):
# Set the flag
if setting:
bc_code.flags |= flag
# Unset the flag if it was set
else:
bc_code.flags ^= (bc_code.flags & flag)
bc_code.update_flags()
return bc_code.to_code()
def code_info(cls, code: Bytecode) -> PyCodeInfo[Repr]:
cfg = ControlFlowGraph.from_bytecode(code)
current = cls.empty()
run_machine(
Interpreter(code.first_lineno).abs_i_cfg(cfg), current)
glob_deps = tuple(current.globals)
instrs = current.instrs
instrs = current.pass_push_pop_inline(instrs)
return PyCodeInfo(code.name, tuple(glob_deps), code.argnames,
code.freevars, code.cellvars, code.filename,
code.first_lineno, code.argcount,
code.kwonlyargcount,
bool(code.flags & CompilerFlags.GENERATOR),
bool(code.flags & CompilerFlags.VARKEYWORDS),
bool(code.flags & CompilerFlags.VARARGS),
instrs)
def get_func_from_code(code_object, fn_name):
executor_code = Bytecode()
executor_code.append(Instr('LOAD_CONST', code_object))
executor_code.append(Instr('LOAD_CONST', fn_name))
executor_code.append(Instr('MAKE_FUNCTION', 0))
executor_code.append(Instr('RETURN_VALUE'))
executor_code.flags = CompilerFlags.OPTIMIZED | CompilerFlags.NEWLOCALS | CompilerFlags.NOFREE
return eval(executor_code.to_code())
"""
matches = []
n_args = len(args)
argspec = None
for spec in self.specializations:
# Before scoring for a match, rule out incompatible specs
# based on the number of arguments. To few arguments is no
# match, and too many is no match unless the specialization
# accepts variadic arguments.
n_params = len(spec.paramspec)
if n_args < n_params:
continue
n_total = n_params + len(spec.func.__defaults__ or ())
variadic = spec.func.__code__.co_flags & CompilerFlags.VARARGS
if n_args > n_total and not variadic:
continue
# Defer creating the argpec until needed
if argspec is None:
argspec = self.make_paramspec(args)
# Scoring a match is done by ranking the arguments using a
# closeness measure. If an argument is an exact match to
# the parameter, it gets a score of 0. If an argument is a
# subtype of a type parameter, it gets a score equal to the
# index of the type in the mro of the subtype. If the arg
# is not an exact match or a subtype, the specialization is
# not a match. If the parameter has no specialization, the
# argument gets a score of 1 << 16, which is arbitrary but
# large enough that it's highly unlikely to be outweighed
inserts[idx] = tracing_code
elif i_name == "RETURN_VALUE":
tracing_code = [
bc.Instr("DUP_TOP"), # obj
bc.Instr(tracer_op, '_[tracer]'), # obj -> obj -> tracer
bc.Instr("LOAD_ATTR", 'return_value'), # obj -> obj -> tracefunc
bc.Instr("ROT_TWO"), # obj -> tracefunc -> obj
bc.Instr("CALL_FUNCTION", 0x0001), # obj -> retval
bc.Instr("POP_TOP"), # obj
]
inserts[idx] = tracing_code
elif isinstance(i_arg, CodeType):
# Inject tracing in nested code object if they use their parent
# locals.
inner = bc.Bytecode.from_code(i_arg)
if not inner.flags & bc.CompilerFlags.NEWLOCALS:
instr.arg = inject_tracing(inner, nested=True).to_code()
# Create a new code list which interleaves the generated code with
# the original code at the appropriate location.
new_code = bytecode.copy()
new_code.clear()
for idx, code_op in enumerate(bytecode):
if idx in inserts:
new_code.extend(inserts[idx])
new_code.append(code_op)
return new_code