Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def init_configuration(configuration=configuration, env_vars_mapper=env_vars_mapper,
env_vars_deprecated=env_vars_deprecated):
# Populate `configuration` with user-provided options
if environ.get('DEVITO_CONFIG') is None:
# It is important to configure `platform`, `compiler` and `backend` in this order
process_order = filter_ordered(['platform', 'compiler', 'backend'] +
list(env_vars_mapper.values()))
queue = sorted(env_vars_mapper.items(), key=lambda i: process_order.index(i[1]))
unprocessed = OrderedDict([(v, environ.get(k, configuration._defaults[v]))
for k, v in queue])
# Handle deprecated env vars
mapper = dict(queue)
for k, (v, msg) in env_vars_deprecated.items():
if environ.get(k):
warning("`%s` is deprecated. %s" % (k, msg))
if environ.get(v):
warning("Both `%s` and `%s` set. Ignoring `%s`" % (k, v, k))
else:
warning("Setting `%s=%s`" % (v, environ[k]))
unprocessed[mapper[v]] = environ[k]
else:
# Attempt reading from the specified configuration file
raise NotImplementedError("Devito doesn't support configuration via file yet.")
# Parameters validation
for k, v in unprocessed.items():
try:
items = v.split(';')
# Env variable format: 'var=k1:v1;k2:v2:k3:v3:...'
keys, values = zip(*[i.split(':') for i in items])
# Casting
else:
# Ongoing section
if (line.startswith('physical id') or line.startswith('cpu cores')):
key, value = line.split('\t:', 1)
current_info[key] = int(value)
physical = sum(mapper.values())
if not physical:
# Fallback 1: it should now be fine to use psutil
physical = psutil.cpu_count(logical=False)
if not physical:
# Fallback 2: we might end up here on more exotic platforms such a Power8
# Hopefully we can rely on `lscpu`
try:
physical = lscpu()['Core(s) per socket'] * lscpu()['Socket(s)']
except KeyError:
warning("Physical core count autodetection failed")
physical = 1
cpu_info['physical'] = physical
return cpu_info
Field data to plot.
xmax : int, optional
Length of the x-axis.
ymax : int, optional
Length of the y-axis.
view: int, optional
View point to intialise.
"""
if xmin > xmax or ymin > ymax:
raise ValueError("Dimension min cannot be larger than dimension max.")
if (zmin is not None and zmax is not None):
if zmin > zmax:
raise ValueError("Dimension min cannot be larger than dimension max.")
elif(zmin is None and zmax is not None):
if np.min(field) >= zmax:
warning("zmax is less than field's minima. Figure deceptive.")
elif(zmin is not None and zmax is None):
if np.max(field) <= zmin:
warning("zmin is larger than field's maxima. Figure deceptive.")
x_coord = np.linspace(xmin, xmax, field.shape[0])
y_coord = np.linspace(ymin, ymax, field.shape[1])
fig = pyplot.figure(figsize=(11, 7), dpi=100)
ax = fig.gca(projection='3d')
X, Y = np.meshgrid(x_coord, y_coord, indexing='ij')
ax.plot_surface(X, Y, field[:], cmap=cm.viridis, rstride=1, cstride=1,
linewidth=linewidth, antialiased=False)
# Enforce axis measures and set view if given
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
if zmin is None:
zmin = np.min(field)
def load(self):
"""
Load timing results from individually keyed files.
"""
for params in self.sweep():
filename = '%s_%s.json' % (self.name, self.param_string(params.items()))
try:
with open(path.join(self.resultsdir, filename), 'r') as f:
datadict = json.loads(f.read())
self.timings[tuple(params.items())] = datadict['timings']
self.meta[tuple(params.items())] = datadict['meta']
except:
warning("Could not load file: %s" % filename)
dtype=np.int32)
source_id = Function(name='source_id', grid=model.grid, dtype=np.int32, time_order=2,
space_order=2)
source_id.data[nzinds[0], nzinds[1], nzinds[2]] = tuple(np.arange(1, len(nzinds[0])+1))
source_mask.data[nzinds[0], nzinds[1], nzinds[2]] = 1
# import pdb; pdb.set_trace()
print("Number of unique affected points is:", len(nzinds[0]))
assert(source_id.data[nzinds[0][0], nzinds[1][0], nzinds[2][0]] == 1)
assert(source_id.data[nzinds[0][-1], nzinds[1][-1], nzinds[2][-1]] == len(nzinds[0]))
assert(source_id.data[nzinds[0][len(nzinds[0])-1], nzinds[1][len(nzinds[0])-1],
nzinds[2][len(nzinds[0])-1]] == len(nzinds[0]))
warning("---Source_mask and source_id is built here-------")
nnz_shape = (model.grid.shape[0], model.grid.shape[1]) # Change only 3rd dim
x, y, z = model.grid.dimensions
nnz_sp_source_mask = Function(name='nnz_sp_source_mask', shape=shape[:2],
dimensions=(x, y), dtype=np.int32)
nnz_sp_source_mask.data[:, :] = source_mask.data.sum(2)
inds = np.where(source_mask.data == 1)
# = nnz_sp_source_mask.data[:,:].max()
maxz = len(np.unique(inds[2]))
sparse_shape = (model.grid.shape[0], model.grid.shape[1], maxz) # Change only 3rd dim
assert(len(nnz_sp_source_mask.dimensions) == 2)
sp_source_mask = Function(name='sp_source_mask', shape=sparse_shape,
binary = state.pop('binary', None)
for k, v in state.items():
setattr(self, k, v)
# If the `sonames` don't match, there *might* be a hidden bug as the
# unpickled Operator might be generating code that differs from that
# generated by the pickled Operator. For example, a stupid bug that we
# had to fix was due to rebuilding SymPy expressions which weren't
# automatically getting the flag `evaluate=False`, thus producing x+2
# on the unpickler instead of x+1+1). However, different `sonames`
# doesn't necessarily means there's a bug: if the unpickler and the
# pickler are two distinct processes and the unpickler runs with a
# different `configuration` dictionary, then the `sonames` might indeed
# be different, depending on which entries in `configuration` differ.
if soname is not None:
if soname != self._soname:
warning("The pickled and unpickled Operators have different .sonames; "
"this might be a bug, or simply a harmless difference in "
"`configuration`. You may check they produce the same code.")
self._compiler.save(self._soname, binary)
self._lib = self._compiler.load(self._soname)
self._lib.name = self._soname
def __call__(cls, *args, **kwargs):
"""
Create an instance of the request class for the current backend.
"""
# Try the selected backend first
try:
t = cls._backend.__dict__[cls.__name__]
except KeyError as e:
warning('Backend %s does not appear to implement class %s'
% (cls._backend.__name__, cls.__name__))
raise e
# Invoke the constructor with the arguments given
return t(*args, **kwargs)
def grid(self):
grids = {getattr(i, 'grid', None) for i in self._args_diff} - {None}
if len(grids) > 1:
warning("Expression contains multiple grids, returning first found")
try:
return grids.pop()
except KeyError:
raise ValueError("No grid found")
def set_backend(backend):
"""
Set the Devito backend.
"""
global _BackendSelector
if _BackendSelector._backend != void:
warning("WARNING: Switching backend to %s" % backend)
try:
# We need to pass a non-empty fromlist so that __import__
# returns the submodule (i.e. the backend) rather than the
# package.
mod = __import__('devito.%s' % backend, fromlist=['None'])
except ImportError as e:
warning('Unable to import backend %s' % backend)
raise e
backends[backend] = mod
_BackendSelector._backend = mod
def _arg_check(self, args, intervals):
"""
Check that ``args`` contains legal runtime values bound to ``self``.
"""
if self.name not in args:
raise InvalidArgument("No runtime value for %s" % self.name)
key = args[self.name]
try:
# Might be a plain number, w/o a dtype field
if key.dtype != self.dtype:
warning("Data type %s of runtime value `%s` does not match the "
"Constant data type %s" % (key.dtype, self.name, self.dtype))
except AttributeError:
pass