Skip to content

Commit b72465f

Browse files
authored
Merge pull request #2635 from devitocodes/petsc_mpi
compiler: Add Devito+PETSc MPI test
2 parents 857eb9c + 72e8222 commit b72465f

5 files changed

Lines changed: 91 additions & 16 deletions

File tree

.github/workflows/pytest-petsc.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,12 @@ on:
1111
branches:
1212
- main
1313
- petsc
14+
- biharmonic
1415
pull_request:
1516
branches:
1617
- main
1718
- petsc
19+
- biharmonic
1820

1921
jobs:
2022
pytest:

devito/logger.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,10 @@ def set_log_level(level, comm=None):
7676
used, for example, if one wants to log to one file per rank.
7777
"""
7878
from devito import configuration
79+
from devito.mpi.distributed import MPI
7980

8081
if comm is not None and configuration['mpi']:
81-
if comm.rank != 0:
82+
if comm != MPI.COMM_NULL and comm.rank != 0:
8283
logger.removeHandler(stream_handler)
8384
logger.addHandler(logging.NullHandler())
8485
else:

devito/petsc/iet/passes.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -52,17 +52,20 @@ def lower_petsc(iet, **kwargs):
5252
# Assumption is that all solves are on the same grid
5353
if len(unique_grids) > 1:
5454
raise ValueError("All PETScSolves must use the same Grid, but multiple found.")
55+
grid = unique_grids.pop()
56+
devito_mpi = kwargs['options'].get('mpi', False)
57+
comm = grid.distributor._obj_comm if devito_mpi else 'PETSC_COMM_WORLD'
5558

5659
# Create core PETSc calls (not specific to each PETScSolve)
57-
core = make_core_petsc_calls(objs, **kwargs)
60+
core = make_core_petsc_calls(objs, comm)
5861

5962
setup = []
6063
subs = {}
6164
efuncs = {}
6265

6366
for iters, (injectsolve,) in injectsolve_mapper.items():
6467

65-
builder = Builder(injectsolve, objs, iters, **kwargs)
68+
builder = Builder(injectsolve, objs, iters, comm, **kwargs)
6669

6770
setup.extend(builder.solversetup.calls)
6871

@@ -108,9 +111,8 @@ def finalize(iet):
108111
return iet._rebuild(body=finalize_body)
109112

110113

111-
def make_core_petsc_calls(objs, **kwargs):
112-
call_mpi = petsc_call_mpi('MPI_Comm_size', [objs['comm'], Byref(objs['size'])])
113-
114+
def make_core_petsc_calls(objs, comm):
115+
call_mpi = petsc_call_mpi('MPI_Comm_size', [comm, Byref(objs['size'])])
114116
return call_mpi, BlankLine
115117

116118

@@ -123,16 +125,18 @@ class Builder:
123125
returning subclasses of the objects initialised in __init__,
124126
depending on the properties of `injectsolve`.
125127
"""
126-
def __init__(self, injectsolve, objs, iters, **kwargs):
128+
def __init__(self, injectsolve, objs, iters, comm, **kwargs):
127129
self.injectsolve = injectsolve
128130
self.objs = objs
129131
self.iters = iters
132+
self.comm = comm
130133
self.kwargs = kwargs
131134
self.coupled = isinstance(injectsolve.expr.rhs.fielddata, MultipleFieldData)
132135
self.args = {
133136
'injectsolve': self.injectsolve,
134137
'objs': self.objs,
135138
'iters': self.iters,
139+
'comm': self.comm,
136140
**self.kwargs
137141
}
138142
self.args['solver_objs'] = self.objbuilder.solver_objs
@@ -190,9 +194,6 @@ def populate_matrix_context(efuncs, objs):
190194
)
191195

192196

193-
# TODO: Devito MPI + PETSc testing
194-
# if kwargs['options']['mpi'] -> communicator = grid.distributor._obj_comm
195-
communicator = 'PETSC_COMM_WORLD'
196197
subdms = PointerDM(name='subdms')
197198
fields = PointerIS(name='fields')
198199
submats = PointerMat(name='submats')
@@ -208,7 +209,6 @@ def populate_matrix_context(efuncs, objs):
208209
# they are semantically identical.
209210
objs = frozendict({
210211
'size': PetscMPIInt(name='size'),
211-
'comm': communicator,
212212
'err': PetscErrorCode(name='err'),
213213
'block': CallbackMat('block'),
214214
'submat_arr': PointerMat(name='submat_arr'),

devito/petsc/iet/routines.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -934,7 +934,7 @@ def _submat_callback_body(self):
934934

935935
ptr = DummyExpr(objs['submat_arr']._C_symbol, Deref(objs['Submats']), init=True)
936936

937-
mat_create = petsc_call('MatCreate', [self.objs['comm'], Byref(objs['block'])])
937+
mat_create = petsc_call('MatCreate', [sobjs['comm'], Byref(objs['block'])])
938938

939939
mat_set_sizes = petsc_call(
940940
'MatSetSizes', [
@@ -1041,6 +1041,7 @@ def __init__(self, **kwargs):
10411041
self.injectsolve = kwargs.get('injectsolve')
10421042
self.objs = kwargs.get('objs')
10431043
self.sregistry = kwargs.get('sregistry')
1044+
self.comm = kwargs.get('comm')
10441045
self.fielddata = self.injectsolve.expr.rhs.fielddata
10451046
self.solver_objs = self._build()
10461047

@@ -1080,6 +1081,7 @@ def _build(self):
10801081
'dmda': DM(sreg.make_name(prefix='da'), dofs=len(targets)),
10811082
'callbackdm': CallbackDM(sreg.make_name(prefix='dm')),
10821083
}
1084+
base_dict['comm'] = self.comm
10831085
self._target_dependent(base_dict)
10841086
return self._extend_build(base_dict)
10851087

@@ -1235,7 +1237,7 @@ def _setup(self):
12351237

12361238
solver_params = self.injectsolve.expr.rhs.solver_parameters
12371239

1238-
snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])])
1240+
snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])])
12391241

12401242
snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda])
12411243

@@ -1261,7 +1263,7 @@ def _setup(self):
12611263
v for v, dim in zip(target.shape_allocated, target.dimensions) if dim.is_Space
12621264
)
12631265
local_x = petsc_call('VecCreateMPIWithArray',
1264-
['PETSC_COMM_WORLD', 1, local_size, 'PETSC_DECIDE',
1266+
[sobjs['comm'], 1, local_size, 'PETSC_DECIDE',
12651267
field_from_ptr, Byref(sobjs['xlocal'])])
12661268

12671269
# TODO: potentially also need to set the DM and local/global map to xlocal
@@ -1364,11 +1366,12 @@ def _create_dmda_calls(self, dmda):
13641366

13651367
def _create_dmda(self, dmda):
13661368
objs = self.objs
1369+
sobjs = self.solver_objs
13671370
grid = self.fielddata.grid
13681371
nspace_dims = len(grid.dimensions)
13691372

13701373
# MPI communicator
1371-
args = [objs['comm']]
1374+
args = [sobjs['comm']]
13721375

13731376
# Type of ghost nodes
13741377
args.extend(['DM_BOUNDARY_GHOSTED' for _ in range(nspace_dims)])
@@ -1386,7 +1389,10 @@ def _create_dmda(self, dmda):
13861389
# Number of degrees of freedom per node
13871390
args.append(dmda.dofs)
13881391
# "Stencil width" -> size of overlap
1392+
# TODO: Instead, this probably should be
1393+
# extracted from fielddata.target._size_outhalo?
13891394
stencil_width = self.fielddata.space_order
1395+
13901396
args.append(stencil_width)
13911397
args.extend([objs['Null']]*nspace_dims)
13921398

@@ -1409,7 +1415,7 @@ def _setup(self):
14091415

14101416
solver_params = self.injectsolve.expr.rhs.solver_parameters
14111417

1412-
snes_create = petsc_call('SNESCreate', [objs['comm'], Byref(sobjs['snes'])])
1418+
snes_create = petsc_call('SNESCreate', [sobjs['comm'], Byref(sobjs['snes'])])
14131419

14141420
snes_set_dm = petsc_call('SNESSetDM', [sobjs['snes'], dmda])
14151421

tests/test_petsc.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121

2222
@skipif('petsc')
23+
@pytest.fixture(scope='session', autouse=True)
2324
def test_petsc_initialization():
2425
# TODO: Temporary workaround until PETSc is automatically
2526
# initialized
@@ -28,6 +29,14 @@ def test_petsc_initialization():
2829
PetscInitialize()
2930

3031

32+
@skipif('petsc')
33+
@pytest.mark.parallel(mode=[1, 2, 4, 6])
34+
def test_petsc_initialization_parallel(mode):
35+
configuration['compiler'] = 'custom'
36+
os.environ['CC'] = 'mpicc'
37+
PetscInitialize()
38+
39+
3140
@skipif('petsc')
3241
def test_petsc_local_object():
3342
"""
@@ -1311,3 +1320,60 @@ def define(self, dimensions):
13111320
+ 'MATOP_MULT,(void (*)(void))J00_MatMult0)' in str(create)
13121321

13131322
# TODO: Test mixed, time dependent solvers
1323+
1324+
1325+
class TestMPI:
1326+
# TODO: Add test for DMDACreate() in parallel
1327+
1328+
@pytest.mark.parametrize('nx, unorm', [
1329+
(17, 7.441506654790017),
1330+
(33, 10.317652759863675),
1331+
(65, 14.445123374862874),
1332+
(129, 20.32492895656658),
1333+
(257, 28.67050632840985)
1334+
])
1335+
@skipif('petsc')
1336+
@pytest.mark.parallel(mode=[2, 4, 8])
1337+
def test_laplacian_1d(self, nx, unorm, mode):
1338+
"""
1339+
"""
1340+
configuration['compiler'] = 'custom'
1341+
os.environ['CC'] = 'mpicc'
1342+
PetscInitialize()
1343+
1344+
class SubSide(SubDomain):
1345+
def __init__(self, side='left', grid=None):
1346+
self.side = side
1347+
self.name = f'sub{side}'
1348+
super().__init__(grid=grid)
1349+
1350+
def define(self, dimensions):
1351+
x, = dimensions
1352+
return {x: (self.side, 1)}
1353+
1354+
grid = Grid(shape=(nx,), dtype=np.float64)
1355+
sub1, sub2 = [SubSide(side=s, grid=grid) for s in ('left', 'right')]
1356+
1357+
u = Function(name='u', grid=grid, space_order=2)
1358+
f = Function(name='f', grid=grid, space_order=2)
1359+
1360+
u0 = Constant(name='u0', value=-1.0, dtype=np.float64)
1361+
u1 = Constant(name='u1', value=-np.exp(1.0), dtype=np.float64)
1362+
1363+
eqn = Eq(-u.laplace, f, subdomain=grid.interior)
1364+
1365+
X = np.linspace(0, 1.0, nx).astype(np.float64)
1366+
f.data[:] = np.float64(np.exp(X))
1367+
1368+
# Create boundary condition expressions using subdomains
1369+
bcs = [EssentialBC(u, u0, subdomain=sub1)]
1370+
bcs += [EssentialBC(u, u1, subdomain=sub2)]
1371+
1372+
petsc = PETScSolve([eqn] + bcs, target=u, solver_parameters={'ksp_rtol': 1e-10})
1373+
1374+
op = Operator(petsc, language='petsc')
1375+
op.apply()
1376+
1377+
# Expected norm computed "manually" from sequential run
1378+
# What rtol and atol should be used?
1379+
assert np.isclose(norm(u), unorm, rtol=1e-13, atol=1e-13)

0 commit comments

Comments
 (0)