Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test with single precision #4006

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,12 @@ jobs:

- name: Build Firedrake
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
cd ..
# Linting should ignore unquoted shell variable $COMPLEX
# shellcheck disable=SC2086
./firedrake/scripts/firedrake-install \
$COMPLEX \
--honour-petsc-dir \
--mpicc="$MPICH_DIR"/mpicc \
--mpicxx="$MPICH_DIR"/mpicxx \
--mpif90="$MPICH_DIR"/mpif90 \
Expand Down Expand Up @@ -92,6 +92,7 @@ jobs:
- name: Install test dependencies
id: build
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
sudo apt update
sudo apt -y install parallel
. ../firedrake_venv/bin/activate
Expand All @@ -101,6 +102,7 @@ jobs:

- name: Run tests (nprocs = 1)
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
: # Use pytest-xdist here so we can have a single collated output (not possible
: # for parallel tests)
. ../firedrake_venv/bin/activate
Expand All @@ -110,42 +112,49 @@ jobs:
# Run even if earlier tests failed
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 2 6 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake2_{#}.xml"

- name: Run tests (nprocs = 3)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 3 4 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake3_{#}.xml"

- name: Run tests (nprocs = 4)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 4 3 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake4_{#}.xml"

- name: Run tests (nprocs = 5)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 5 2 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake5_{#}.xml"

- name: Run tests (nprocs = 6)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 6 2 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake6_{#}.xml"

- name: Run tests (nprocs = 7)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 7 1 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake7_{#}.xml"

- name: Run tests (nprocs = 8)
if: ${{ success() || steps.build.conclusion == 'success' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
firedrake-run-split-tests 8 1 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake8_{#}.xml"

Expand All @@ -171,6 +180,7 @@ jobs:
- name: Test pyadjoint
if: ${{ matrix.scalar-type == 'real' }}
run: |
unset PETSC_DIR PETSC_ARCH SLEPC_DIR
. ../firedrake_venv/bin/activate
cd ../firedrake_venv/src/pyadjoint
python -m pytest \
Expand Down
8 changes: 4 additions & 4 deletions firedrake/assemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from firedrake.petsc import PETSc
from firedrake.slate import slac, slate
from firedrake.slate.slac.kernel_builder import CellFacetKernelArg, LayerCountKernelArg
from firedrake.utils import ScalarType, assert_empty, tuplify
from firedrake.utils import ScalarType, assert_empty, tuplify, IntType
from pyop2 import op2
from pyop2.exceptions import MapValueError, SparsityFormatError
from pyop2.types.mat import _GlobalMatPayload, _DatMatPayload
Expand Down Expand Up @@ -1699,8 +1699,8 @@ def _make_mat_global_kernel_arg(self, Vrow, Vcol):
else:
rmap_arg, cmap_arg = (V.topological.entity_node_map(self._mesh.topology, self._integral_type, self._subdomain_id, self._all_integer_subdomain_ids)._global_kernel_arg for V in [Vrow, Vcol])
# PyOP2 matrix objects have scalar dims so we flatten them here
rdim = numpy.prod(self._get_dim(relem), dtype=int)
cdim = numpy.prod(self._get_dim(celem), dtype=int)
rdim = numpy.prod(self._get_dim(relem), dtype=IntType)
cdim = numpy.prod(self._get_dim(celem), dtype=IntType)
return op2.MatKernelArg((((rdim, cdim),),), (rmap_arg, cmap_arg), unroll=self._unroll)

@staticmethod
Expand Down Expand Up @@ -1777,7 +1777,7 @@ def _as_global_kernel_arg_coefficient(_, self):
@_as_global_kernel_arg.register(kernel_args.ConstantKernelArg)
def _as_global_kernel_arg_constant(_, self):
const = next(self._constants)
value_size = numpy.prod(const.ufl_shape, dtype=int)
value_size = numpy.prod(const.ufl_shape, dtype=IntType)
return op2.GlobalKernelArg((value_size,))


Expand Down
4 changes: 2 additions & 2 deletions firedrake/cython/supermeshimpl.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,8 @@ def intersection_finder(mesh_A, mesh_B):

libsupermesh_tree_intersection_finder_query_output(&nindices)

indices = numpy.empty((nindices,), dtype=int)
indptr = numpy.empty((mesh_A.num_cells() + 1,), dtype=int)
indices = numpy.empty((nindices,), dtype=IntType)
indptr = numpy.empty((mesh_A.num_cells() + 1,), dtype=IntType)

libsupermesh_tree_intersection_finder_get_output(&ncells_A, &nindices, <long*>indices.data, <long*>indptr.data)

Expand Down
2 changes: 1 addition & 1 deletion firedrake/functionspaceimpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ def __init__(self, mesh, element, name=None):
the number of components of their
:attr:`finat.ufl.finiteelementbase.FiniteElementBase.value_shape`."""

self.block_size = int(numpy.prod(self.shape, dtype=int))
self.block_size = int(numpy.prod(self.shape, dtype=PETSc.IntType))
r"""The total number of degrees of freedom at each function
space node."""
self.name = name
Expand Down
4 changes: 2 additions & 2 deletions firedrake/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1014,9 +1014,9 @@ def callable():
# Make sure we have an expression of the right length i.e. a value for
# each component in the value shape of each function space
loops = []
if numpy.prod(expr.ufl_shape, dtype=int) != V.value_size:
if numpy.prod(expr.ufl_shape, dtype=PETSc.IntType) != V.value_size:
raise RuntimeError('Expression of length %d required, got length %d'
% (V.value_size, numpy.prod(expr.ufl_shape, dtype=int)))
% (V.value_size, numpy.prod(expr.ufl_shape, dtype=PETSc.intType)))

if len(V) == 1:
loops.extend(_interpolator(V, tensor, expr, subset, arguments, access, bcs=bcs))
Expand Down
48 changes: 24 additions & 24 deletions firedrake/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def _from_triangle(filename, dim, comm):
nodecount = header[0]
nodedim = header[1]
assert nodedim == dim
coordinates = np.loadtxt(nodefile, usecols=list(range(1, dim+1)), skiprows=1, dtype=np.double)
coordinates = np.loadtxt(nodefile, usecols=list(range(1, dim+1)), skiprows=1, dtype=PETSc.RealType)
assert nodecount == coordinates.shape[0]

with open(basename+".ele") as elefile:
Expand Down Expand Up @@ -477,7 +477,7 @@ def plex_from_cell_list(dim, cells, coords, comm, name=None):
# and double (not PetscInt, PetscReal).
if comm.rank == 0:
cells = np.asarray(cells, dtype=np.int32)
coords = np.asarray(coords, dtype=np.double)
coords = np.asarray(coords, dtype=PETSc.RealType)
comm.bcast(cells.shape, root=0)
comm.bcast(coords.shape, root=0)
# Provide the actual data on rank 0.
Expand All @@ -491,7 +491,7 @@ def plex_from_cell_list(dim, cells, coords, comm, name=None):
# A subsequent call to plex.distribute() takes care of parallel partitioning
plex = PETSc.DMPlex().createFromCellList(dim,
np.zeros(cell_shape, dtype=np.int32),
np.zeros(coord_shape, dtype=np.double),
np.zeros(coord_shape, dtype=PETSc.RealType),
comm=icomm)
if name is not None:
plex.setName(name)
Expand Down Expand Up @@ -522,7 +522,7 @@ def _from_cell_list(dim, cells, coords, comm, name=None):
# and double (not PetscInt, PetscReal).
if comm.rank == 0:
cells = np.asarray(cells, dtype=np.int32)
coords = np.asarray(coords, dtype=np.double)
coords = np.asarray(coords, dtype=PETSc.RealType)
comm.bcast(cells.shape, root=0)
comm.bcast(coords.shape, root=0)
# Provide the actual data on rank 0.
Expand All @@ -536,7 +536,7 @@ def _from_cell_list(dim, cells, coords, comm, name=None):
# A subsequent call to plex.distribute() takes care of parallel partitioning
plex = PETSc.DMPlex().createFromCellList(dim,
np.zeros(cell_shape, dtype=np.int32),
np.zeros(coord_shape, dtype=np.double),
np.zeros(coord_shape, dtype=PETSc.RealType),
comm=comm)
if name is not None:
plex.setName(name)
Expand Down Expand Up @@ -619,7 +619,7 @@ def callback(self):
# Mark OP2 entities and derive the resulting Plex renumbering
with PETSc.Log.Event("Mesh: numbering"):
self._mark_entity_classes()
self._entity_classes = dmcommon.get_entity_classes(self.topology_dm).astype(int)
self._entity_classes = dmcommon.get_entity_classes(self.topology_dm).astype(IntType)
if perm_is:
self._dm_renumbering = perm_is
else:
Expand Down Expand Up @@ -1816,13 +1816,13 @@ def node_classes(self, nodes_per_entity, real_tensorproduct=False):
:returns: the number of nodes in each of core, owned, and ghost classes.
"""
if real_tensorproduct:
nodes = np.asarray(nodes_per_entity)
nodes = np.asarray(nodes_per_entity, dtype=IntType)
nodes_per_entity = sum(nodes[:, i] for i in range(2))
return super(ExtrudedMeshTopology, self).node_classes(nodes_per_entity)
elif self.variable_layers:
return extnum.node_classes(self, nodes_per_entity)
else:
nodes = np.asarray(nodes_per_entity)
nodes = np.asarray(nodes_per_entity, dtype=IntType)
if self.extruded_periodic:
nodes_per_entity = sum(nodes[:, i]*(self.layers - 1) for i in range(2))
else:
Expand Down Expand Up @@ -2482,8 +2482,8 @@ def spatial_index(self):

# Calculate the bounding boxes for all cells by running a kernel
V = functionspace.VectorFunctionSpace(self, "DG", 0, dim=gdim)
coords_min = function.Function(V, dtype=RealType)
coords_max = function.Function(V, dtype=RealType)
coords_min = function.Function(V, dtype=PETSc.RealType)
coords_max = function.Function(V, dtype=PETSc.RealType)

coords_min.dat.data.fill(np.inf)
coords_max.dat.data.fill(-np.inf)
Expand All @@ -2493,7 +2493,7 @@ def spatial_index(self):
raise ValueError("Coordinate field has non-zero imaginary part")
coords = function.Function(self.coordinates.function_space(),
val=self.coordinates.dat.data_ro_with_halos.real.copy(),
dtype=RealType)
dtype=PETSc.RealType)
else:
coords = self.coordinates

Expand Down Expand Up @@ -2584,7 +2584,7 @@ def locate_cell_and_reference_coordinate(self, x, tolerance=None, cell_ignore=No
(cell number, reference coordinates) of type (int, numpy array),
or, when point is not in the domain, (None, None).
"""
x = np.asarray(x)
x = np.asarray(x, dtype=PETSc.RealType)
if x.size != self.geometric_dimension():
raise ValueError("Point must have the same geometric dimension as the mesh")
x = x.reshape((1, self.geometric_dimension()))
Expand Down Expand Up @@ -2621,7 +2621,7 @@ def locate_cells_ref_coords_and_dists(self, xs, tolerance=None, cells_ignore=Non
tolerance = self.tolerance
else:
self.tolerance = tolerance
xs = np.asarray(xs, dtype=utils.ScalarType)
xs = np.asarray(xs, dtype=PETSc.RealType)
xs = xs.real.copy()
if xs.shape[1] != self.geometric_dimension():
raise ValueError("Point coordinate dimension does not match mesh geometric dimension")
Expand Down Expand Up @@ -3349,7 +3349,7 @@ def VertexOnlyMesh(mesh, vertexcoords, reorder=None, missing_points_behaviour='e
else:
mesh.tolerance = tolerance
mesh.init()
vertexcoords = np.asarray(vertexcoords, dtype=RealType)
vertexcoords = np.asarray(vertexcoords, dtype=PETSc.RealType)
if reorder is None:
reorder = parameters["reorder_meshes"]
gdim = mesh.geometric_dimension()
Expand Down Expand Up @@ -3578,7 +3578,7 @@ def _pic_swarm_in_mesh(
parent_mesh.tolerance = tolerance

# Check coords
coords = np.asarray(coords, dtype=RealType)
coords = np.asarray(coords, dtype=PETSc.RealType)

plex = parent_mesh.topology.topology_dm
tdim = parent_mesh.topological_dimension()
Expand Down Expand Up @@ -4094,10 +4094,10 @@ def _parent_mesh_embedding(
ncoords_local = coords_local.shape[0]
coords_global = coords_local
ncoords_global = coords_global.shape[0]
global_idxs_global = np.arange(coords_global.shape[0])
global_idxs_global = np.arange(coords_global.shape[0], dtype=IntType)
input_coords_idxs_local = np.arange(ncoords_local)
input_coords_idxs_global = input_coords_idxs_local
input_ranks_local = np.zeros(ncoords_local, dtype=int)
input_ranks_local = np.zeros(ncoords_local, dtype=IntType)
input_ranks_global = input_ranks_local
else:
# Here, we have to assume that all points we can see are unique.
Expand All @@ -4113,7 +4113,7 @@ def _parent_mesh_embedding(
# The below code looks complicated but it's just an allgather of the
# (variable length) coords_local array such that they are concatenated.
coords_local_size = np.array(coords_local.size)
coords_local_sizes = np.empty(parent_mesh._comm.size, dtype=int)
coords_local_sizes = np.empty(parent_mesh._comm.size, dtype=IntType)
parent_mesh._comm.Allgatherv(coords_local_size, coords_local_sizes)
coords_global = np.empty(
(ncoords_global, coords.shape[1]), dtype=coords_local.dtype
Expand All @@ -4127,12 +4127,12 @@ def _parent_mesh_embedding(
# global_idxs_global = np.arange(startidx, endidx)
global_idxs_global = np.arange(coords_global.shape[0])
input_coords_idxs_local = np.arange(ncoords_local)
input_coords_idxs_global = np.empty(ncoords_global, dtype=int)
input_coords_idxs_global = np.empty(ncoords_global, dtype=IntType)
parent_mesh._comm.Allgatherv(
input_coords_idxs_local, (input_coords_idxs_global, ncoords_local_allranks)
)
input_ranks_local = np.full(ncoords_local, parent_mesh._comm.rank, dtype=int)
input_ranks_global = np.empty(ncoords_global, dtype=int)
input_ranks_local = np.full(ncoords_local, parent_mesh._comm.rank, dtype=IntType)
input_ranks_global = np.empty(ncoords_global, dtype=IntType)
parent_mesh._comm.Allgatherv(
input_ranks_local, (input_ranks_global, ncoords_local_allranks)
)
Expand Down Expand Up @@ -4222,7 +4222,7 @@ def _parent_mesh_embedding(
# point.
changed_ranks_tied = changed_ranks & ~changed_ref_cell_dists_l1
if any(changed_ranks_tied):
cells_ignore_T = np.asarray([np.copy(parent_cell_nums)])
cells_ignore_T = np.asarray([np.copy(parent_cell_nums)], dtype=IntType)
while any(changed_ranks_tied):
(
parent_cell_nums[changed_ranks_tied],
Expand Down Expand Up @@ -4347,8 +4347,8 @@ def _swarm_original_ordering_preserve(
(plex_parent_cell_nums_global, ncoords_local_allranks),
)

reference_coords_local_size = np.array(reference_coords_local.size)
reference_coords_local_sizes = np.empty(comm.size, dtype=int)
reference_coords_local_size = np.array(reference_coords_local.size, dtype=IntType)
reference_coords_local_sizes = np.empty(comm.size, dtype=IntType)
comm.Allgatherv(reference_coords_local_size, reference_coords_local_sizes)
reference_coords_global = np.empty(
(ncoords_global, reference_coords_local.shape[1]),
Expand Down
2 changes: 1 addition & 1 deletion firedrake/mg/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ def set_coordinates(self, domain):
def _coefficient(self, coefficient, name):
element = create_element(coefficient.ufl_element())
shape = self.shape + element.index_shape
size = numpy.prod(shape, dtype=int)
size = numpy.prod(shape, dtype=IntType)
funarg = lp.GlobalArg(name, dtype=ScalarType, shape=(size,))
expression = gem.reshape(gem.Variable(name, (size, )), shape)
expression = gem.partial_indexed(expression, self.indices)
Expand Down
4 changes: 2 additions & 2 deletions firedrake/pointquery_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ def to_reference_coords_newton_step(ufl_coordinate_element, parameters, x0_dtype
x0_expr = builder._coefficient(x0, "x0")
loopy_args = [
lp.GlobalArg(
"C", dtype=ScalarType, shape=(numpy.prod(Cexpr.shape, dtype=int),)
"C", dtype=ScalarType, shape=(numpy.prod(Cexpr.shape, dtype=IntType),)
),
lp.GlobalArg(
"x0", dtype=x0_dtype, shape=(numpy.prod(x0_expr.shape, dtype=int),)
"x0", dtype=x0_dtype, shape=(numpy.prod(x0_expr.shape, dtype=IntType),)
),
]

Expand Down
Loading
Loading