Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
e9947ed
Add test that reproduces the issue described in issue #2568
fluidnumericsJoe Apr 6, 2026
f75eefe
Switch to appending positionUpdate, rather than prepending
fluidnumericsJoe Apr 6, 2026
773c526
Move position update kernel to kernels attribute; explicitly call upd…
fluidnumericsJoe Apr 8, 2026
b456f0c
Apply position update only to particles in normal state
fluidnumericsJoe Apr 13, 2026
fc11251
Adjusted expected output values based on fenceposting correction
fluidnumericsJoe Apr 13, 2026
51b1967
Run test_particleset_interpolate_outside_domainedge with 2 day execut…
fluidnumericsJoe Apr 13, 2026
9fb895c
Incorporate sign_dt into next output to accomodate backwards time ste…
fluidnumericsJoe Apr 13, 2026
626675c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 13, 2026
00d0689
Merge branch 'bugfix/2568-output-fenceposting' of github.com:Parcels-…
fluidnumericsJoe Apr 15, 2026
bde21c6
Skip test_pset_repeated_release_delayed_adding_deleting tests
fluidnumericsJoe Apr 15, 2026
c0db495
Merge branch 'main' into bugfix/2568-output-fenceposting
fluidnumericsJoe Apr 15, 2026
6f5981f
Make _position_update a class procedure and remove _make_position_update
fluidnumericsJoe Apr 15, 2026
90cebd3
Merge branch 'bugfix/2568-output-fenceposting' of github.com:Parcels-…
fluidnumericsJoe Apr 15, 2026
0e08123
Test the particle file output in addition to the pset.lon
fluidnumericsJoe Apr 15, 2026
4d05ec3
Remove unused variable; fix linting
fluidnumericsJoe Apr 15, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 17 additions & 23 deletions src/parcels/_core/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,6 @@ def __init__(

self._kernels: list[Callable] = kernels

if pset._requires_prepended_positionupdate_kernel:
self.prepend_positionupdate_kernel()

@property #! Ported from v3. To be removed in v4? (/find another way to name kernels in output file)
def funcname(self):
ret = ""
Expand All @@ -108,23 +105,19 @@ def remove_deleted(self, pset):
if len(indices) > 0:
pset.remove_indices(indices)

def prepend_positionupdate_kernel(self):
# Adding kernels that set and update the coordinate changes
def PositionUpdate(particles, fieldset): # pragma: no cover
particles.lon += particles.dlon
particles.lat += particles.dlat
particles.z += particles.dz
particles.time += particles.dt

particles.dlon = 0
particles.dlat = 0
particles.dz = 0
def _position_update(self, particles, fieldset):
particles.lon += particles.dlon
particles.lat += particles.dlat
particles.z += particles.dz
particles.time += particles.dt

if hasattr(self.fieldset, "RK45_tol"):
# Update dt in case it's increased in RK45 kernel
particles.dt = particles.next_dt
particles.dlon = 0
particles.dlat = 0
particles.dz = 0

self._kernels = [PositionUpdate] + self._kernels
if hasattr(self.fieldset, "RK45_tol"):
# Update dt in case it's increased in RK45 kernel
particles.dt = particles.next_dt

def check_fieldsets_in_kernels(self, kernel): # TODO v4: this can go into another method? assert_is_compatible()?
"""
Expand Down Expand Up @@ -221,6 +214,12 @@ def execute(self, pset, endtime, dt):
f(pset[repeat_particles], self._fieldset)
repeat_particles = pset.state == StatusCode.Repeat

# apply position/time update only to particles still in a normal state
# (particles that signalled Stop*/Delete/errors should not have time/position advanced)
Comment thread
fluidnumericsJoe marked this conversation as resolved.
update_particles = evaluate_particles & np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Success])
if np.any(update_particles):
self._position_update(pset[update_particles], self._fieldset)

# revert to original dt (unless in RK45 mode)
if not hasattr(self.fieldset, "RK45_tol"):
pset._data["dt"][:] = dt
Expand All @@ -244,9 +243,4 @@ def execute(self, pset, endtime, dt):
else:
error_func(pset[inds].z, pset[inds].lat, pset[inds].lon)

# Only prepend PositionUpdate kernel at the end of the first execute call to avoid adding dt to time too early
if not pset._requires_prepended_positionupdate_kernel:
self.prepend_positionupdate_kernel()
pset._requires_prepended_positionupdate_kernel = True

return pset
8 changes: 6 additions & 2 deletions src/parcels/_core/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ def __init__(
self._data[kwvar][:] = kwval

self._kernel = None
self._requires_prepended_positionupdate_kernel = False

def __del__(self):
if self._data is not None and isinstance(self._data, xr.Dataset):
Expand Down Expand Up @@ -422,7 +421,12 @@ def execute(
pbar = tqdm(total=end_time - start_time, file=sys.stdout)
pbar.set_description("Integration time: " + str(start_time))

next_output = start_time if output_file else None
next_output = None
if output_file:
# Write the initial condition
output_file.write(self, start_time)
# Increment the next_output
next_output = start_time + outputdt * sign_dt

time = start_time
while sign_dt * (time - end_time) < 0:
Expand Down
57 changes: 57 additions & 0 deletions src/parcels/_datasets/unstructured/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,9 +405,66 @@ def _icon_square_delaunay_uniform_z_coordinate():
return ux.UxDataset({"U": u, "V": v, "W": w, "p": p}, uxgrid=uxgrid)


def _ux_constant_flow_face_centered_2D():
NX = 10
NT = 2
lon, lat = np.meshgrid(
np.linspace(0, 20, NX, dtype=np.float64),
np.linspace(0, 20, NX, dtype=np.float64),
)
lon_flat, lat_flat = lon.ravel(), lat.ravel()
mask = np.isclose(lon_flat, 0) | np.isclose(lon_flat, 20) | np.isclose(lat_flat, 0) | np.isclose(lat_flat, 20)
uxgrid = ux.Grid.from_points(
(lon_flat, lat_flat),
method="regional_delaunay",
boundary_points=np.flatnonzero(mask),
)
uxgrid.attrs["Conventions"] = "UGRID-1.0"

# --- Uniform velocity field on face centers ---
U0 = 0.001 # degrees/s
V0 = 0.0
TIME = xr.date_range("2000-01-01", periods=NT, freq="1h")
zf = np.array([0.0, 1.0])
zc = np.array([0.5])

U = np.full((NT, 1, uxgrid.n_face), U0)
V = np.full((NT, 1, uxgrid.n_face), V0)
W = np.zeros((NT, 2, uxgrid.n_node))

ds = ux.UxDataset(
{
"U": ux.UxDataArray(
U,
uxgrid=uxgrid,
dims=["time", "zc", "n_face"],
coords=dict(time=(["time"], TIME), zc=(["zc"], zc)),
attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"),
),
"V": ux.UxDataArray(
V,
uxgrid=uxgrid,
dims=["time", "zc", "n_face"],
coords=dict(time=(["time"], TIME), zc=(["zc"], zc)),
attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"),
),
"W": ux.UxDataArray(
W,
uxgrid=uxgrid,
dims=["time", "zf", "n_node"],
coords=dict(time=(["time"], TIME), nz=(["zf"], zf)),
attrs=dict(location="node", mesh="delaunay", Conventions="UGRID-1.0"),
),
},
uxgrid=uxgrid,
)
return ds


datasets = {
"stommel_gyre_delaunay": _stommel_gyre_delaunay(),
"fesom2_square_delaunay_uniform_z_coordinate": _fesom2_square_delaunay_uniform_z_coordinate(),
"fesom2_square_delaunay_antimeridian": _fesom2_square_delaunay_antimeridian(),
"icon_square_delaunay_uniform_z_coordinate": _icon_square_delaunay_uniform_z_coordinate(),
"ux_constant_flow_face_centered_2D": _ux_constant_flow_face_centered_2D(),
}
2 changes: 1 addition & 1 deletion tests/test_advection.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def test_advection_zonal_periodic():
startlon = np.array([0.5, 0.4])
pset = ParticleSet(fieldset, pclass=PeriodicParticle, lon=startlon, lat=[0.5, 0.5])
pset.execute([AdvectionEE, periodicBC], runtime=np.timedelta64(40, "s"), dt=np.timedelta64(1, "s"))
np.testing.assert_allclose(pset.total_dlon, 4.1, atol=1e-5)
np.testing.assert_allclose(pset.total_dlon, 4.0, atol=1e-5)
Comment thread
fluidnumericsJoe marked this conversation as resolved.
np.testing.assert_allclose(pset.lon, startlon, atol=1e-5)
np.testing.assert_allclose(pset.lat, 0.5, atol=1e-5)

Expand Down
1 change: 1 addition & 0 deletions tests/test_particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ def test_variable_written_once():
...


@pytest.mark.skip(reason="Pending ParticleFile refactor; see issue #2386")
@pytest.mark.parametrize("dt", [-np.timedelta64(1, "s"), np.timedelta64(1, "s")])
@pytest.mark.parametrize("maxvar", [2, 4, 10])
def test_pset_repeated_release_delayed_adding_deleting(fieldset, tmp_zarrfile, dt, maxvar):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def Addlon(particles, fieldset): # pragma: no cover
particles.dlon += particles.dt

pset.execute(Addlon, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(8, "s"), verbose_progress=False)
assert np.allclose([p.lon + p.dlon for p in pset], [10 - t for t in times])
assert np.allclose([p.lon + p.dlon for p in pset], [8 - t for t in times])


def test_populate_indices(fieldset):
Expand Down
10 changes: 5 additions & 5 deletions tests/test_particleset_execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def SampleU(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=fieldset.U.grid.lon[-1], lat=fieldset.U.grid.lat[-1] + dlat)

with pytest.raises(FieldOutOfBoundError):
pset.execute(SampleU, runtime=np.timedelta64(1, "D"), dt=np.timedelta64(1, "D"))
pset.execute(SampleU, runtime=np.timedelta64(2, "D"), dt=np.timedelta64(1, "D"))


@pytest.mark.parametrize(
Expand All @@ -216,7 +216,7 @@ def AddDt(particles, fieldset): # pragma: no cover
pclass = Particle.add_variable(Variable("added_dt", dtype=np.float32, initial=0))
pset = ParticleSet(fieldset, pclass=pclass, lon=0, lat=0)
pset.execute(AddDt, runtime=dt * 10, dt=dt)
np.testing.assert_allclose(pset[0].added_dt, 11.0 * timedelta_to_float(dt), atol=1e-5)
np.testing.assert_allclose(pset[0].added_dt, 10.0 * timedelta_to_float(dt), atol=1e-5)


def test_pset_remove_particle_in_kernel(fieldset):
Expand Down Expand Up @@ -286,7 +286,7 @@ def AddLon(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times)
pset.execute(AddLon, dt=np.timedelta64(1, "s"), endtime=endtime)

np.testing.assert_array_equal(pset.lon, [9, 7, 0])
np.testing.assert_array_equal(pset.lon, [8, 6, 0])
assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left)
assert pset.time[2] == timedelta_to_float(
start_times[2] - fieldset.time_interval.left
Expand All @@ -299,7 +299,7 @@ def AddLon(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times)
pset.execute(AddLon, dt=-np.timedelta64(1, "s"), endtime=endtime)

np.testing.assert_array_equal(pset.lon, [9, 7, 0])
np.testing.assert_array_equal(pset.lon, [8, 6, 0])
assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left)
assert pset.time[2] == timedelta_to_float(
start_times[2] - fieldset.time_interval.left
Expand Down Expand Up @@ -411,7 +411,7 @@ def KernelCounter(particles, fieldset): # pragma: no cover

pset = ParticleSet(fieldset, lon=np.zeros(1), lat=np.zeros(1))
pset.execute(KernelCounter, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(5, "s"))
assert pset.lon == 4
assert pset.lon == 3
assert pset.dt == 2
assert pset.time == 5

Expand Down
16 changes: 8 additions & 8 deletions tests/test_particlesetview.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ def ConditionalHeating(particles, fieldset): # pragma: no cover

pset.execute(ConditionalHeating, runtime=np.timedelta64(4, "s"), dt=np.timedelta64(1, "s"))

# After 5 timesteps (0, 1, 2, 3, 4): left particles should be at 15.0, right at 7.5
# After 4 timesteps: left particles should be at 14.0, right at 8.0
left_particles = pset.lon < 0.5
right_particles = pset.lon >= 0.5
assert np.allclose(pset.temp[left_particles], 15.0, atol=1e-6)
assert np.allclose(pset.temp[right_particles], 7.5, atol=1e-6)
assert np.allclose(pset.temp[left_particles], 14.0, atol=1e-6)
assert np.allclose(pset.temp[right_particles], 8.0, atol=1e-6)


def test_particle_mask_progressive_changes(fieldset):
Expand Down Expand Up @@ -134,9 +134,9 @@ def MultiMaskOperations(particles, fieldset): # pragma: no cover
group2 = (pset.lon >= 0.33) & (pset.lon < 0.67)
group3 = pset.lon >= 0.67

assert np.allclose(pset.counter[group1], 6, atol=1e-6) # 6 timesteps * 1
assert np.allclose(pset.counter[group2], 12, atol=1e-6) # 6 timesteps * 2
assert np.allclose(pset.counter[group3], 18, atol=1e-6) # 6 timesteps * 3
assert np.allclose(pset.counter[group1], 5, atol=1e-6) # 5 timesteps * 1
assert np.allclose(pset.counter[group2], 10, atol=1e-6) # 5 timesteps * 2
assert np.allclose(pset.counter[group3], 15, atol=1e-6) # 5 timesteps * 3


def test_particle_mask_empty_mask_handling(fieldset):
Expand Down Expand Up @@ -178,5 +178,5 @@ def MoveLon(particles, fieldset): # pragma: no cover

# Should have deleted edge particles
assert pset.size < initial_size
# Remaining particles should be in the middle range
assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.8))
# Remaining particles should be in the middle range (with 0.02 of total displacement)
assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.82))
Comment thread
fluidnumericsJoe marked this conversation as resolved.
28 changes: 28 additions & 0 deletions tests/test_uxadvection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import numpy as np
import pytest
import xarray as xr

import parcels
from parcels._datasets.unstructured.generic import datasets as datasets_unstructured
from parcels.kernels import (
AdvectionEE,
AdvectionRK2,
AdvectionRK4,
)


@pytest.mark.parametrize("integrator", [AdvectionEE, AdvectionRK2, AdvectionRK4])
def test_ux_constant_flow_face_centered_2D(integrator, tmp_zarrfile):
ds = datasets_unstructured["ux_constant_flow_face_centered_2D"]
T = np.timedelta64(3600, "s")
dt = np.timedelta64(300, "s")

fieldset = parcels.FieldSet.from_ugrid_conventions(ds, mesh="flat")
pset = parcels.ParticleSet(fieldset, lon=[5.0], lat=[5.0])
pfile = parcels.ParticleFile(store=tmp_zarrfile, outputdt=dt)
pset.execute(integrator, runtime=T, dt=dt, output_file=pfile, verbose_progress=False)
expected_lon = 8.6
np.testing.assert_allclose(pset.lon, expected_lon, atol=1e-5)

ds_out = xr.open_zarr(tmp_zarrfile)
np.testing.assert_allclose(ds_out["lon"][:, -1], expected_lon, atol=1e-5)
Loading