Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
----
system : `System`
Parent `System` object.
"""
if not system.is_active():
return
# allocate and cache the ksp problem for each voi
for voi in iterkeys(system.dumat):
sizes = system._local_unknown_sizes[voi]
lsize = np.sum(sizes[system.comm.rank, :])
size = np.sum(sizes)
if trace: debug("creating petsc matrix of size (%d,%d)" % (lsize, size))
jac_mat = PETSc.Mat().createPython([(lsize, size), (lsize, size)],
comm=system.comm)
if trace: debug("petsc matrix creation DONE for %s" % voi)
jac_mat.setPythonContext(self)
jac_mat.setUp()
if trace: # pragma: no cover
debug("creating KSP object for system", system.pathname)
ksp = self.ksp[voi] = PETSc.KSP().create(comm=system.comm)
if trace: debug("KSP creation DONE")
ksp.setOperators(jac_mat)
ksp.setType(self.options['ksp_type'])
ksp.setGMRESRestart(1000)
ksp.setPCSide(PETSc.PC.Side.RIGHT)
ksp.setMonitor(Monitor(self))
self._bc = '0'
# non-dimensionalization for spatial variables
self.R = self.wavelength/(2*np.pi)
# Solve problem of the form Ax = lBx
# define A and B matrices here
# factor of 3 due to 3 field components
self._A = PETSc.Mat()
self._A.create(PETSc.COMM_WORLD)
self._A.setSizes([3*self._N, 3*self._N])
self._A.setType('aij')
self._A.setUp()
self._B = PETSc.Mat()
self._B.create(PETSc.COMM_WORLD)
self._B.setSizes([3*self._N, 3*self._N])
self._B.setType('aij')
self._B.setUp()
# setup the solver
self._solver = SLEPc.EPS()
self._solver.create()
# we need to set up the spectral transformation so that it doesnt try
# to invert
st = self._solver.getST()
st.setType('sinvert')
# Let's use MUMPS for any system solving since it is fast
ksp = st.getKSP()
options.setValue('snes_linesearch_type', 'basic')
# options.setValue('snes_linesearch_type', 'l2')
# options.setValue('snes_linesearch_type', 'cp')
pressure_mesh = graph.pressure_mesh
ns_x_mesh = graph.ns_x_mesh
ns_y_mesh = graph.ns_y_mesh
X = _create_X(ns_x_mesh.phi, ns_y_mesh.phi, pressure_mesh.phi, graph)
if PLOT_JACOBIAN:
_plot_jacobian(graph, X)
# Creates the Jacobian matrix structure.
COMM = PETSc.COMM_WORLD
N = len(X)
J = PETSc.Mat().createAIJ(N, comm=COMM)
J.setPreallocationNNZ(N)
logger.info("Building J...")
if FULL_JACOBIAN:
for i in range(N):
for j in range(N):
J.setValue(i, j, 0.0)
else:
j_structure = _calculate_jacobian_mask(N, graph)
for i, j in zip(*np.nonzero(j_structure)):
J.setValue(i, j, 1.0)
logger.info("Done.")
J.setUp()
J.assemble()
def assemble_matrix(mask):
"""Assemble the matrix corresponding to the standard 5-point stencil
approximation of the Laplace operator on the domain defined by
mask == True, where mask is a 2D NumPy array. The stencil wraps
around the grid, i.e. this is an approximation of the Laplacian
on a torus.
The grid spacing is ignored, which is equivalent to assuming equal
spacing in x and y directions.
"""
PETSc.Sys.Print("Assembling the matrix...")
# grid size
nrow, ncol = mask.shape
# create sparse matrix
A = PETSc.Mat()
A.create(PETSc.COMM_WORLD)
A.setSizes([nrow * ncol, nrow * ncol])
A.setType('aij') # sparse
A.setPreallocationNNZ(5)
# precompute values for setting
# diagonal and non-diagonal entries
diagonal = 4.0
offdx = - 1.0
offdy = - 1.0
def R(i, j):
"Map from the (row,column) pair to the linear row number."
return i * ncol + j
# loop over owned block of rows on this
residuals = []
sol = ml.solve(b, tol=self.iterative_solver_tolerance, accel=self.solver_subtype, residuals=residuals)
print("AMG solver time is {}".format(time() - t_solve))
elif self.solver_type == "petsc" and self.has_petsc:
if self.solver_subtype != "gmres" and self.solver_subtype != "minres" and self.solver_subtype != "cg":
self.solver_subtype == "cg"
if self.iterative_solver_tolerance < 1e-9:
self.iterative_solver_tolerance = 1e-7
from petsc4py import PETSc
t_solve = time()
pA = PETSc.Mat().createAIJ(size=A.shape, csr=(A.indptr, A.indices, A.data))
pb = PETSc.Vec().createWithArray(b)
ksp = PETSc.KSP()
ksp.create(PETSc.COMM_WORLD)
# ksp.create()
ksp.setType(self.solver_subtype)
ksp.setTolerances(atol=self.iterative_solver_tolerance,
rtol=self.iterative_solver_tolerance)
# ILU
ksp.getPC().setType('icc')
# CREATE INITIAL GUESS
psol = PETSc.Vec().createWithArray(np.ones(b.shape[0]))
# SOLVE
ksp.setOperators(pA)
ksp.setFromOptions()
"""
Function to return the Jacobian matrix
Args:
snes: nonlinear solver object
X: input vector
J: Jacobian matrix (current?)
P: Jacobian matrix (new)
Returns:
matrix status
"""
self.da.globalToLocal(X, self.localX)
x = self.da.getVecArray(self.localX)
P.zeroEntries()
row = PETSc.Mat.Stencil()
col = PETSc.Mat.Stencil()
(xs, xe), (ys, ye) = self.da.getRanges()
for j in range(ys, ye):
for i in range(xs, xe):
row.index = (i, j)
col.index = (i, j)
row.field = 0
col.field = 0
P.setValueStencil(row, col, 1.0 - self.factor * (-x[i, j, 1] ** 2 - self.params.A))
row.field = 0
col.field = 1
P.setValueStencil(row, col, self.factor * 2.0 * x[i, j, 0] * x[i, j, 1])
row.field = 1
col.field = 1
P.setValueStencil(row, col, 1.0 - self.factor * (2.0 * x[i, j, 0] * x[i, j, 1] - self.params.B))
row.field = 1
# and insert entry values (for parallel computing).
self.Istart, self.Iend = self.petsc_A.getOwnershipRange()
# Assign values to the coefficients matrix from the scipy
# sparse csr one: petscMat = \
# PETSc.Mat().createAIJ(size=existingMat.shape, csr= \
# (existingMat.indptr,existingMat.indices,existingMat.data))
size_tmp = self.A.shape
csr1 = (self.A.indptr[self.Istart:self.Iend+1] -
self.A.indptr[self.Istart])
ind1 = self.A.indptr[self.Istart]
ind2 = self.A.indptr[self.Iend]
csr2 = self.A.indices[ind1:ind2]
csr3 = self.A.data[ind1:ind2]
self.petsc_A = PETSc.Mat().createAIJ(size=size_tmp,
csr=(csr1, csr2, csr3))
# Communicate off-processor values
# and setup internal data structures
# for performing parallel operations
self.petsc_A.assemblyBegin()
self.petsc_A.assemblyEnd()
self.dCoefdx[index+j,col_counter] = 1.0
col_counter += 1
# end for
# end for
# end for
if USE_PETSC:
self.dCoefdx.assemblyBegin()
self.dCoefdx.assemblyEnd()
# end if
# Now Do the Try the matrix multiplication
if USE_PETSC:
if self.dPtdCoef:
if self.dPtdx == None:
self.dPtdx = PETSc.Mat()
# end
self.dPtdCoef.matMult(self.dCoefdx,result=self.dPtdx)
# end
else:
if self.dPtdCoef:
self.dPtdx = dot(self.dPtdCoef,self.dCoefdx)
# end
# end if
return
def _DatMat(sparsity, dat=None):
"""A :class:`PETSc.Mat` with global size nx1 or nx1 implemented as a
:class:`.Dat`"""
if isinstance(sparsity.dsets[0], GlobalDataSet):
dset = sparsity.dsets[1]
sizes = ((None, 1), (dset.size*dset.cdim, None))
elif isinstance(sparsity.dsets[1], GlobalDataSet):
dset = sparsity.dsets[0]
sizes = ((dset.size * dset.cdim, None), (None, 1))
else:
raise ValueError("Not a DatMat")
A = PETSc.Mat().createPython(sizes, comm=sparsity.comm)
A.setPythonContext(_DatMatPayload(sparsity, dat))
A.setUp()
return A
self.mu = mu
if(backwards):
self._dir = -1.0
else:
self._dir = 1.0
self._bc = '0'
# non-dimensionalization for spatial variables
self.R = self.wavelength/(2*np.pi)
# Solve problem of the form Ax = lBx
# define A and B matrices here
# factor of 3 due to 3 field components
self._A = PETSc.Mat()
self._A.create(PETSc.COMM_WORLD)
self._A.setSizes([3*self._N, 3*self._N])
self._A.setType('aij')
self._A.setUp()
self._B = PETSc.Mat()
self._B.create(PETSc.COMM_WORLD)
self._B.setSizes([3*self._N, 3*self._N])
self._B.setType('aij')
self._B.setUp()
# setup the solver
self._solver = SLEPc.EPS()
self._solver.create()
# we need to set up the spectral transformation so that it doesnt try