Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if self.is_signed:
quotient = (args[0].SDiv(claripy.SignExt(self._from_size - self._to_size, args[1])))
remainder = (args[0].SMod(claripy.SignExt(self._from_size - self._to_size, args[1])))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
else:
quotient = (args[0] // claripy.ZeroExt(self._from_size - self._to_size, args[1]))
remainder = (args[0] % claripy.ZeroExt(self._from_size - self._to_size, args[1]))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
def _op_divmod(self, args):
if self.is_signed:
quotient = (args[0].SDiv(claripy.SignExt(self._from_size - self._to_size, args[1])))
remainder = (args[0].SMod(claripy.SignExt(self._from_size - self._to_size, args[1])))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
else:
quotient = (args[0] // claripy.ZeroExt(self._from_size - self._to_size, args[1]))
remainder = (args[0] % claripy.ZeroExt(self._from_size - self._to_size, args[1]))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
quotient = (args[0].SDiv(claripy.SignExt(self._from_size - self._to_size, args[1])))
remainder = (args[0].SMod(claripy.SignExt(self._from_size - self._to_size, args[1])))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
else:
quotient = (args[0] // claripy.ZeroExt(self._from_size - self._to_size, args[1]))
remainder = (args[0] % claripy.ZeroExt(self._from_size - self._to_size, args[1]))
quotient_size = self._to_size
remainder_size = self._to_size
return claripy.Concat(
claripy.Extract(remainder_size - 1, 0, remainder),
claripy.Extract(quotient_size - 1, 0, quotient)
)
def _op_extract(self, args):
return claripy.Extract(self._to_size - 1, 0, args[0])
def generic_shift_thing(self, args, op):
if self._vector_size is not None:
shifted = []
if args[1].length != self._vector_size:
shift_by = args[1].zero_extend(self._vector_size - args[1].length)
else:
shift_by = args[1]
for i in reversed(range(self._vector_count)):
left = claripy.Extract((i+1) * self._vector_size - 1,
i * self._vector_size,
args[0])
shifted.append(op(left, shift_by))
return claripy.Concat(*shifted)
else:
raise SimOperationError("you done fucked")
def _op_generic_Ctz(self, args):
"""Count the trailing zeroes"""
wtf_expr = claripy.BVV(self._from_size, self._from_size)
for a in reversed(range(self._from_size)):
bit = claripy.Extract(a, a, args[0])
wtf_expr = claripy.If(bit == 1, claripy.BVV(a, self._from_size), wtf_expr)
return wtf_expr
def _consolidate_expr(e0):
if hasattr(e0, 'op') and e0.op == 'Reverse':
e1 = e0.args[0]
if e1.op == 'Extract':
p0 = e1.args[0]
p1 = e1.args[1]
e2 = e1.args[2]
if e2.op == 'Reverse':
return claripy.Extract(e2.size()-1-p1, e2.size()-1-p0, e2.args[0])
return e0
def armg_calculate_flags_nzcv(state, cc_op, cc_dep1, cc_dep2, cc_dep3):
# NOTE: adding constraints afterwards works here *only* because the constraints are actually useless, because we require
# cc_op to be unique. If we didn't, we'd need to pass the constraints into any functions called after the constraints were
# created.
n = armg_calculate_flag_n(state, cc_op, cc_dep1, cc_dep2, cc_dep3)
z = armg_calculate_flag_z(state, cc_op, cc_dep1, cc_dep2, cc_dep3)
c = armg_calculate_flag_c(state, cc_op, cc_dep1, cc_dep2, cc_dep3)
v = armg_calculate_flag_v(state, cc_op, cc_dep1, cc_dep2, cc_dep3)
vec = [(ARMG_CC_SHIFT_N, claripy.Extract(0, 0, n)),
(ARMG_CC_SHIFT_Z, claripy.Extract(0, 0, z)),
(ARMG_CC_SHIFT_C, claripy.Extract(0, 0, c)),
(ARMG_CC_SHIFT_V, claripy.Extract(0, 0, v))]
return _concat_flags(ARMG_NBITS, vec)
def generic_compare(self, args, comparison):
if self._vector_size is not None:
res_comps = []
for i in reversed(range(self._vector_count)):
a_comp = claripy.Extract((i+1) * self._vector_size - 1,
i * self._vector_size,
args[0])
b_comp = claripy.Extract((i+1) * self._vector_size - 1,
i * self._vector_size,
args[1])
res_comps.append(claripy.If(comparison(a_comp, b_comp),
claripy.BVV(-1, self._vector_size),
claripy.BVV(0, self._vector_size)))
return claripy.Concat(*res_comps)
else:
return claripy.If(comparison(args[0], args[1]), claripy.BVV(1, 1), claripy.BVV(0, 1))