Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_index_mode_detection(self, indexed, expected):
"""
Test detection of IterationInstance access modes (AFFINE vs IRREGULAR).
Proper detection of access mode is a prerequisite to any sort of
data dependence analysis.
"""
grid = Grid(shape=(4, 4, 4))
x, y, z = grid.dimensions # noqa
sx = SubDimension.middle('sx', x, 1, 1) # noqa
u = Function(name='u', grid=grid) # noqa
c = Constant(name='c') # noqa
sc = Scalar(name='sc', is_const=True) # noqa
s = Scalar(name='s') # noqa
ii = IterationInstance(eval(indexed))
assert ii.index_mode == expected
@pytest.mark.parametrize('exprs', [
['Eq(ti0[x,y,z], ti0[x,y,z] + t0*2.)', 'Eq(ti0[0,0,z], 0.)'],
['Eq(ti0[x,y,z], ti0[x,y,z-1] + t0*2.)', 'Eq(ti0[0,0,z], 0.)'],
['Eq(ti0[x,y,z], ti0[x,y,z] + t0*2.)', 'Eq(ti0[0,y,0], 0.)'],
['Eq(ti0[x,y,z], ti0[x,y,z] + t0*2.)', 'Eq(ti0[0,y,z], 0.)'],
])
def test_directly_indexed_expression(self, ti0, t0, exprs):
"""
Test that equations using integer indices are inserted in the right
loop nest, at the right loop nest depth.
"""
grid = Grid(shape=(4, 4, 4))
x, y, z = grid.dimensions # noqa
ti0 = Function(name='ti0', grid=grid, space_order=0) # noqa
f0 = Scalar(name='t0') # noqa
eqs = [eval(exprs[0]), eval(exprs[1])]
op = Operator(eqs, dse='noop', dle='noop')
trees = retrieve_iteration_tree(op)
assert len(trees) == 2
assert trees[0][-1].nodes[0].exprs[0].expr.rhs == eqs[0].rhs
assert trees[1][-1].nodes[0].exprs[0].expr.rhs == eqs[1].rhs
def test_cse(exprs, expected):
"""Test common subexpressions elimination."""
grid = Grid((3, 3, 3))
dims = grid.dimensions
tu = TimeFunction(name="tu", grid=grid, space_order=2) # noqa
tv = TimeFunction(name="tv", grid=grid, space_order=2) # noqa
tw = TimeFunction(name="tw", grid=grid, space_order=2) # noqa
tz = TimeFunction(name="tz", grid=grid, space_order=2) # noqa
ti0 = Array(name='ti0', shape=(3, 5, 7), dimensions=dims).indexify() # noqa
ti1 = Array(name='ti1', shape=(3, 5, 7), dimensions=dims).indexify() # noqa
t0 = Scalar(name='t0') # noqa
t1 = Scalar(name='t1') # noqa
t2 = Scalar(name='t2') # noqa
# List comprehension would need explicit locals/globals mappings to eval
for i, e in enumerate(list(exprs)):
exprs[i] = DummyEq(indexify(diffify(eval(e).evaluate)))
counter = generator()
make = lambda: Scalar(name='r%d' % counter()).indexify()
processed = _cse(exprs, make)
assert len(processed) == len(expected)
assert all(str(i.rhs) == j for i, j in zip(processed, expected))
def test_makeit_ssa(exprs, exp_u, exp_v):
"""
A test building Operators with non-trivial sequences of input expressions
that push hard on the `makeit_ssa` utility function.
"""
grid = Grid(shape=(4, 4))
x, y = grid.dimensions # noqa
u = Function(name='u', grid=grid) # noqa
v = Function(name='v', grid=grid) # noqa
s = Scalar(name='s') # noqa
# List comprehension would need explicit locals/globals mappings to eval
for i, e in enumerate(list(exprs)):
exprs[i] = eval(e)
op = Operator(exprs)
op.apply()
assert np.all(u.data == exp_u)
assert np.all(v.data == exp_v)
def test_collection(self, exprs, expected):
"""
Unit test for the detection and collection of aliases out of a series
of input expressions.
"""
grid = Grid(shape=(4, 4))
x, y = grid.dimensions # noqa
xi, yi = grid.interior.dimensions # noqa
t0 = Scalar(name='t0') # noqa
t1 = Scalar(name='t1') # noqa
t2 = Scalar(name='t2') # noqa
t3 = Scalar(name='t3') # noqa
fa = Function(name='fa', grid=grid, shape=(4,), dimensions=(x,), space_order=4) # noqa
fb = Function(name='fb', grid=grid, shape=(4,), dimensions=(x,), space_order=4) # noqa
fc = Function(name='fc', grid=grid, space_order=4) # noqa
fd = Function(name='fd', grid=grid, space_order=4) # noqa
# List/dict comprehension would need explicit locals/globals mappings to eval
for i, e in enumerate(list(exprs)):
exprs[i] = DummyEq(indexify(eval(e).evaluate))
for i, e in enumerate(list(expected)):
expected[i] = eval(e)
aliases = collect(exprs, False, lambda i: False)
make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
rule = iq_timevarying(cluster.trace)
def __new_stage2__(cls, name, spacing=None):
newobj = sympy.Symbol.__xnew__(cls, name)
newobj._spacing = spacing or Scalar(name='h_%s' % name)
return newobj
@cached_property
def symbolic_start(self):
"""
The symbol defining the iteration start for this dimension.
"""
return Scalar(name=self.min_name, dtype=np.int32)
make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
rule = iq_timeinvariant(cluster.trace)
make = lambda: Scalar(name=template(), dtype=cluster.dtype).indexify()
rule = q_sum_of_product