From 4b1e4e6f6e2edf9661f4c069509d036ae2893f13 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 21 Jan 2026 15:19:29 +0100 Subject: [PATCH 1/5] Renaming `pyfunc` to `kernels` --- docs/getting_started/explanation_concepts.md | 2 +- .../examples/tutorial_interaction.ipynb | 4 +- .../tutorial_stommel_uxarray.ipynb | 2 +- src/parcels/_core/kernel.py | 52 +++++++++---------- src/parcels/_core/particleset.py | 25 +++++---- tests/test_kernel.py | 20 +++---- tests/test_particleset_execute.py | 8 +-- 7 files changed, 56 insertions(+), 57 deletions(-) diff --git a/docs/getting_started/explanation_concepts.md b/docs/getting_started/explanation_concepts.md index 8153149725..35d91ec69b 100644 --- a/docs/getting_started/explanation_concepts.md +++ b/docs/getting_started/explanation_concepts.md @@ -181,7 +181,7 @@ dt = np.timedelta64(5, "m") runtime = np.timedelta64(1, "D") # Run the simulation -pset.execute(pyfunc=kernels, dt=dt, runtime=runtime) +pset.execute(kernels=kernels, dt=dt, runtime=runtime) ``` ### Output diff --git a/docs/user_guide/examples/tutorial_interaction.ipynb b/docs/user_guide/examples/tutorial_interaction.ipynb index 09d8079e7c..defac279df 100644 --- a/docs/user_guide/examples/tutorial_interaction.ipynb +++ b/docs/user_guide/examples/tutorial_interaction.ipynb @@ -139,7 +139,7 @@ "]\n", "\n", "pset.execute(\n", - " pyfunc=kernels,\n", + " kernels=kernels,\n", " runtime=np.timedelta64(60, \"s\"),\n", " dt=np.timedelta64(1, \"s\"),\n", " output_file=output_file,\n", @@ -331,7 +331,7 @@ "]\n", "\n", "pset.execute(\n", - " pyfunc=kernels,\n", + " kernels=kernels,\n", " runtime=np.timedelta64(60, \"s\"),\n", " dt=np.timedelta64(1, \"s\"),\n", " output_file=output_file,\n", diff --git a/docs/user_guide/examples_v3/tutorial_stommel_uxarray.ipynb b/docs/user_guide/examples_v3/tutorial_stommel_uxarray.ipynb index 4d41f8b75f..e987401d75 100644 --- a/docs/user_guide/examples_v3/tutorial_stommel_uxarray.ipynb +++ b/docs/user_guide/examples_v3/tutorial_stommel_uxarray.ipynb @@ -334,7 +334,7 @@ " pset.execute(\n", " endtime=endtime,\n", " dt=timedelta(seconds=60),\n", - " pyfunc=AdvectionEE,\n", + " kernels=AdvectionEE,\n", " verbose_progress=False,\n", " )\n", " except FieldOutOfBoundError:\n", diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 0f6493e44a..653cec2586 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -49,8 +49,8 @@ class Kernel: FieldSet object providing the field information (possibly None) ptype : PType object for the kernel particle - pyfunc : - (aggregated) Kernel function + kernels : + list of Kernel functions Notes ----- @@ -62,30 +62,30 @@ def __init__( self, fieldset, ptype, - pyfuncs: list[types.FunctionType], + kernels: list[types.FunctionType], ): - for f in pyfuncs: + for f in kernels: if not isinstance(f, types.FunctionType): - raise TypeError(f"Argument pyfunc should be a function or list of functions. Got {type(f)}") + raise TypeError(f"Argument `kernels` should be a function or list of functions. Got {type(f)}") assert_same_function_signature(f, ref=AdvectionRK4, context="Kernel") - if len(pyfuncs) == 0: - raise ValueError("List of `pyfuncs` should have at least one function.") + if len(kernels) == 0: + raise ValueError("List of `kernels` should have at least one function.") self._fieldset = fieldset self._ptype = ptype self._positionupdate_kernel_added = False - for f in pyfuncs: + for f in kernels: self.check_fieldsets_in_kernels(f) - self._pyfuncs: list[Callable] = pyfuncs + self._kernels: list[Callable] = kernels @property #! Ported from v3. To be removed in v4? (/find another way to name kernels in output file) def funcname(self): ret = "" - for f in self._pyfuncs: + for f in self._kernels: ret += f.__name__ return ret @@ -123,21 +123,21 @@ def PositionUpdate(particles, fieldset): # pragma: no cover # Update dt in case it's increased in RK45 kernel particles.dt = particles.next_dt - self._pyfuncs = (PositionUpdate + self)._pyfuncs + self._kernels = (PositionUpdate + self)._kernels - def check_fieldsets_in_kernels(self, pyfunc): # TODO v4: this can go into another method? assert_is_compatible()? + def check_fieldsets_in_kernels(self, kernel): # TODO v4: this can go into another method? assert_is_compatible()? """ Checks the integrity of the fieldset with the kernels. - This function is to be called from the derived class when setting up the 'pyfunc'. + This function is to be called from the derived class when setting up the 'kernel'. """ if self.fieldset is not None: - if pyfunc is AdvectionAnalytical: + if kernel is AdvectionAnalytical: if self._fieldset.U.interp_method != "cgrid_velocity": raise NotImplementedError("Analytical Advection only works with C-grids") if self._fieldset.U.grid._gtype not in [GridType.CurvilinearZGrid, GridType.RectilinearZGrid]: raise NotImplementedError("Analytical Advection only works with Z-grids in the vertical") - elif pyfunc is AdvectionRK45: + elif kernel is AdvectionRK45: if "next_dt" not in [v.name for v in self.ptype.variables]: raise ValueError('ParticleClass requires a "next_dt" for AdvectionRK45 Kernel.') if not hasattr(self.fieldset, "RK45_tol"): @@ -176,21 +176,21 @@ def merge(self, kernel): return type(self)( self.fieldset, self.ptype, - pyfuncs=self._pyfuncs + kernel._pyfuncs, + kernels=self._kernels + kernel._kernels, ) def __add__(self, kernel): if isinstance(kernel, types.FunctionType): - kernel = type(self)(self.fieldset, self.ptype, pyfuncs=[kernel]) + kernel = type(self)(self.fieldset, self.ptype, kernels=[kernel]) return self.merge(kernel) def __radd__(self, kernel): if isinstance(kernel, types.FunctionType): - kernel = type(self)(self.fieldset, self.ptype, pyfuncs=[kernel]) + kernel = type(self)(self.fieldset, self.ptype, kernels=[kernel]) return kernel.merge(self) @classmethod - def from_list(cls, fieldset, ptype, pyfunc_list): + def from_list(cls, fieldset, ptype, kernels_list): """Create a combined kernel from a list of functions. Takes a list of functions, converts them to kernels, and joins them @@ -202,19 +202,19 @@ def from_list(cls, fieldset, ptype, pyfunc_list): FieldSet object providing the field information (possibly None) ptype : PType object for the kernel particle - pyfunc_list : list of functions + kernels_list : list of functions List of functions to be combined into a single kernel. *args : Additional arguments passed to first kernel during construction. **kwargs : Additional keyword arguments passed to first kernel during construction. """ - if not isinstance(pyfunc_list, list): - raise TypeError(f"Argument `pyfunc_list` should be a list of functions. Got {type(pyfunc_list)}") - if not all([isinstance(f, types.FunctionType) for f in pyfunc_list]): - raise ValueError("Argument `pyfunc_list` should be a list of functions.") + if not isinstance(kernels_list, list): + raise TypeError(f"Argument `kernels_list` should be a list of functions. Got {type(kernels_list)}") + if not all([isinstance(f, types.FunctionType) for f in kernels_list]): + raise ValueError("Argument `kernels_list` should be a list of functions.") - return cls(fieldset, ptype, pyfunc_list) + return cls(fieldset, ptype, kernels_list) def execute(self, pset, endtime, dt): """Execute this Kernel over a ParticleSet for several timesteps. @@ -248,7 +248,7 @@ def execute(self, pset, endtime, dt): pset.dt = np.minimum(np.maximum(pset.dt, -time_to_endtime), 0) # run kernels for all particles that need to be evaluated - for f in self._pyfuncs: + for f in self._kernels: f(pset[evaluate_particles], self._fieldset) # check for particles that have to be repeated diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index c4cd5ffd81..903fe074c9 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -290,27 +290,27 @@ def from_particlefile(cls, fieldset, pclass, filename, restart=True, restarttime "ParticleSet.from_particlefile is not yet implemented in v4." ) # TODO implement this when ParticleFile is implemented in v4 - def Kernel(self, pyfunc): - """Wrapper method to convert a `pyfunc` into a :class:`parcels.kernel.Kernel` object. + def Kernel(self, kernels): + """Wrapper method to convert a kernel or list of kernels into a :class:`parcels.kernel.Kernel` object. Conversion is based on `fieldset` and `ptype` of the ParticleSet. Parameters ---------- - pyfunc : function or list of functions + kernels : kernel function or list of kernels functions Python function to convert into kernel. If a list of functions is provided, the functions will be converted to kernels and combined into a single kernel. """ - if isinstance(pyfunc, list): + if isinstance(kernels, list): return Kernel.from_list( self.fieldset, self._ptype, - pyfunc, + kernels, ) return Kernel( self.fieldset, self._ptype, - pyfuncs=[pyfunc], + kernels=[kernels], ) def data_indices(self, variable_name, compare_values, invert=False): @@ -376,7 +376,7 @@ def set_variable_write_status(self, var, write_status): def execute( self, - pyfunc, + kernels, dt: datetime.timedelta | np.timedelta64 | float, endtime: np.timedelta64 | np.datetime64 | None = None, runtime: datetime.timedelta | np.timedelta64 | float | None = None, @@ -390,10 +390,9 @@ def execute( Parameters ---------- - pyfunc : - Kernel function to execute. This can be the name of a + kernels : + List of Kernel functions to execute. This can be the name of a defined Python function or a :class:`parcels.kernel.Kernel` object. - Kernels can be concatenated using the + operator. dt (np.timedelta64 or float): Timestep interval (as a np.timedelta64 object of float in seconds) to be passed to the kernel. Use a negative value for a backward-in-time simulation. @@ -417,10 +416,10 @@ def execute( if len(self) == 0: return - if not isinstance(pyfunc, Kernel): - pyfunc = self.Kernel(pyfunc) + if not isinstance(kernels, Kernel): + kernels = self.Kernel(kernels) - self._kernel = pyfunc + self._kernel = kernels if output_file is not None: output_file.set_metadata(self.fieldset.gridset[0]._mesh) diff --git a/tests/test_kernel.py b/tests/test_kernel.py index 3474cdc47b..49f895efba 100644 --- a/tests/test_kernel.py +++ b/tests/test_kernel.py @@ -35,22 +35,22 @@ def ErrorKernel(particles, fieldset): # pragma: no cover def test_kernel_init(fieldset): - Kernel(fieldset, ptype=Particle, pyfuncs=[AdvectionRK4]) + Kernel(fieldset, ptype=Particle, kernels=[AdvectionRK4]) def test_kernel_merging(fieldset): - k1 = Kernel(fieldset, ptype=Particle, pyfuncs=[AdvectionRK4]) - k2 = Kernel(fieldset, ptype=Particle, pyfuncs=[MoveEast, MoveNorth]) + k1 = Kernel(fieldset, ptype=Particle, kernels=[AdvectionRK4]) + k2 = Kernel(fieldset, ptype=Particle, kernels=[MoveEast, MoveNorth]) merged_kernel = k1 + k2 assert merged_kernel.funcname == "AdvectionRK4MoveEastMoveNorth" - assert len(merged_kernel._pyfuncs) == 3 - assert merged_kernel._pyfuncs == [AdvectionRK4, MoveEast, MoveNorth] + assert len(merged_kernel._kernels) == 3 + assert merged_kernel._kernels == [AdvectionRK4, MoveEast, MoveNorth] merged_kernel = k2 + k1 assert merged_kernel.funcname == "MoveEastMoveNorthAdvectionRK4" - assert len(merged_kernel._pyfuncs) == 3 - assert merged_kernel._pyfuncs == [MoveEast, MoveNorth, AdvectionRK4] + assert len(merged_kernel._kernels) == 3 + assert merged_kernel._kernels == [MoveEast, MoveNorth, AdvectionRK4] def test_kernel_from_list(fieldset): @@ -77,13 +77,13 @@ def test_kernel_from_list_error_checking(fieldset): """ pset = ParticleSet(fieldset, lon=[0.5], lat=[0.5]) - with pytest.raises(ValueError, match="List of `pyfuncs` should have at least one function."): + with pytest.raises(ValueError, match="List of `kernels` should have at least one function."): pset.Kernel([]) - with pytest.raises(ValueError, match="Argument `pyfunc_list` should be a list of functions."): + with pytest.raises(ValueError, match="Argument `kernels_list` should be a list of functions."): pset.Kernel([AdvectionRK4, "something else"]) - with pytest.raises(ValueError, match="Argument `pyfunc_list` should be a list of functions."): + with pytest.raises(ValueError, match="Argument `kernels_list` should be a list of functions."): kernels_mixed = pset.Kernel([pset.Kernel(AdvectionRK4), MoveEast, MoveNorth]) assert kernels_mixed.funcname == "AdvectionRK4MoveEastMoveNorth" diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index 84e09a4836..5473fcbbb3 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -127,7 +127,7 @@ def test_pset_execute_invalid_arguments(fieldset, fieldset_no_time_interval): def test_particleset_runtime_type(fieldset, runtime, expectation): pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], z=[50.0], pclass=Particle) with expectation: - pset.execute(runtime=runtime, dt=np.timedelta64(10, "s"), pyfunc=DoNothing) + pset.execute(runtime=runtime, dt=np.timedelta64(10, "s"), kernels=DoNothing) @pytest.mark.parametrize( @@ -143,7 +143,7 @@ def test_particleset_runtime_type(fieldset, runtime, expectation): def test_particleset_endtime_type(fieldset, endtime, expectation): pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], z=[50.0], pclass=Particle) with expectation: - pset.execute(endtime=endtime, dt=np.timedelta64(10, "m"), pyfunc=DoNothing) + pset.execute(endtime=endtime, dt=np.timedelta64(10, "m"), kernels=DoNothing) def test_particleset_run_to_endtime(fieldset): @@ -559,7 +559,7 @@ def test_uxstommelgyre_multiparticle_pset_execute(): pset.execute( runtime=np.timedelta64(10, "m"), dt=np.timedelta64(60, "s"), - pyfunc=AdvectionRK4_3D, + kernels=AdvectionRK4_3D, ) @@ -600,5 +600,5 @@ def test_uxstommelgyre_pset_execute_output(): outputdt=np.timedelta64(5, "m"), # the time step of the outputs ) pset.execute( - runtime=np.timedelta64(10, "m"), dt=np.timedelta64(60, "s"), pyfunc=AdvectionEE, output_file=output_file + runtime=np.timedelta64(10, "m"), dt=np.timedelta64(60, "s"), kernels=AdvectionEE, output_file=output_file ) From 2497376153383182dc16ee832a7227f2e7a23948 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 21 Jan 2026 16:16:20 +0100 Subject: [PATCH 2/5] Removing pset.Kernel method --- docs/user_guide/v4-migration.md | 1 + src/parcels/_core/kernel.py | 38 ++++++------------------------- src/parcels/_core/particleset.py | 26 +-------------------- tests-v3/test_kernel_language.py | 2 +- tests/test_diffusion.py | 4 ++-- tests/test_kernel.py | 36 ++++++++++++++++------------- tests/test_particlefile.py | 4 ++-- tests/test_particleset_execute.py | 5 ++-- 8 files changed, 37 insertions(+), 79 deletions(-) diff --git a/docs/user_guide/v4-migration.md b/docs/user_guide/v4-migration.md index 7db6451cc2..c142bd4ec8 100644 --- a/docs/user_guide/v4-migration.md +++ b/docs/user_guide/v4-migration.md @@ -17,6 +17,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat - The `InteractionKernel` class has been removed. Since normal Kernels now have access to _all_ particles, particle-particle interaction can be performed within normal Kernels. - Users need to explicitly use `convert_z_to_sigma_croco` in sampling kernels (such as the `AdvectionRK4_3D_CROCO` or `SampleOMegaCroco` kernels) when working with CROCO data, as the automatic conversion from depth to sigma grids under the hood has been removed. - We added a new AdvectionRK2 Kernel. The AdvectionRK4 kernel is still available, but RK2 is now the recommended default advection scheme as it is faster while the accuracy is comparable for most applications. See also the Choosing an integration method tutorial. +- Functions shouldn't be converted to Kernels before adding to a pset.execute() call. Instead, simply pass the function(s) as a list to pset.execute(), which will convert them to Kernels internally. ## FieldSet diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 653cec2586..7edcf014dd 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -60,10 +60,13 @@ class Kernel: def __init__( self, + kernels: list[types.FunctionType], fieldset, ptype, - kernels: list[types.FunctionType], ): + if not isinstance(kernels, list): + kernels = [kernels] + for f in kernels: if not isinstance(f, types.FunctionType): raise TypeError(f"Argument `kernels` should be a function or list of functions. Got {type(f)}") @@ -174,48 +177,21 @@ def merge(self, kernel): assert self.ptype == kernel.ptype, "Cannot merge kernels with different particle types" return type(self)( + self._kernels + kernel._kernels, self.fieldset, self.ptype, - kernels=self._kernels + kernel._kernels, ) def __add__(self, kernel): if isinstance(kernel, types.FunctionType): - kernel = type(self)(self.fieldset, self.ptype, kernels=[kernel]) + kernel = type(self)([kernel], self.fieldset, self.ptype) return self.merge(kernel) def __radd__(self, kernel): if isinstance(kernel, types.FunctionType): - kernel = type(self)(self.fieldset, self.ptype, kernels=[kernel]) + kernel = type(self)([kernel], self.fieldset, self.ptype) return kernel.merge(self) - @classmethod - def from_list(cls, fieldset, ptype, kernels_list): - """Create a combined kernel from a list of functions. - - Takes a list of functions, converts them to kernels, and joins them - together. - - Parameters - ---------- - fieldset : parcels.Fieldset - FieldSet object providing the field information (possibly None) - ptype : - PType object for the kernel particle - kernels_list : list of functions - List of functions to be combined into a single kernel. - *args : - Additional arguments passed to first kernel during construction. - **kwargs : - Additional keyword arguments passed to first kernel during construction. - """ - if not isinstance(kernels_list, list): - raise TypeError(f"Argument `kernels_list` should be a list of functions. Got {type(kernels_list)}") - if not all([isinstance(f, types.FunctionType) for f in kernels_list]): - raise ValueError("Argument `kernels_list` should be a list of functions.") - - return cls(fieldset, ptype, kernels_list) - def execute(self, pset, endtime, dt): """Execute this Kernel over a ParticleSet for several timesteps. diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 903fe074c9..677030e32e 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -290,29 +290,6 @@ def from_particlefile(cls, fieldset, pclass, filename, restart=True, restarttime "ParticleSet.from_particlefile is not yet implemented in v4." ) # TODO implement this when ParticleFile is implemented in v4 - def Kernel(self, kernels): - """Wrapper method to convert a kernel or list of kernels into a :class:`parcels.kernel.Kernel` object. - - Conversion is based on `fieldset` and `ptype` of the ParticleSet. - - Parameters - ---------- - kernels : kernel function or list of kernels functions - Python function to convert into kernel. If a list of functions is provided, - the functions will be converted to kernels and combined into a single kernel. - """ - if isinstance(kernels, list): - return Kernel.from_list( - self.fieldset, - self._ptype, - kernels, - ) - return Kernel( - self.fieldset, - self._ptype, - kernels=[kernels], - ) - def data_indices(self, variable_name, compare_values, invert=False): """Get the indices of all particles where the value of `variable_name` equals (one of) `compare_values`. @@ -417,8 +394,7 @@ def execute( return if not isinstance(kernels, Kernel): - kernels = self.Kernel(kernels) - + kernels = Kernel(kernels, self.fieldset, self._ptype) self._kernel = kernels if output_file is not None: diff --git a/tests-v3/test_kernel_language.py b/tests-v3/test_kernel_language.py index b75397e92d..a28da2c5b5 100644 --- a/tests-v3/test_kernel_language.py +++ b/tests-v3/test_kernel_language.py @@ -18,7 +18,7 @@ def expr_kernel(name, pset, expr): pycode = (f"def {name}(particle, fieldset, time):\n" f" particle.p = {expr}") # fmt: skip - return Kernel(pset.fieldset, pset.particledata.ptype, pyfunc=None, funccode=pycode, funcname=name) + return Kernel(kernels=None, fieldset=pset.fieldset, ptype=pset._ptype, funccode=pycode, funcname=name) @pytest.fixture diff --git a/tests/test_diffusion.py b/tests/test_diffusion.py index 873bfea22c..75dd850f96 100644 --- a/tests/test_diffusion.py +++ b/tests/test_diffusion.py @@ -34,7 +34,7 @@ def test_fieldKh_Brownian(mesh): np.random.seed(1234) pset = ParticleSet(fieldset=fieldset, lon=np.zeros(npart), lat=np.zeros(npart)) - pset.execute(pset.Kernel(DiffusionUniformKh), runtime=runtime, dt=np.timedelta64(1, "h")) + pset.execute(DiffusionUniformKh, runtime=runtime, dt=np.timedelta64(1, "h")) expected_std_lon = np.sqrt(2 * kh_zonal * mesh_conversion**2 * timedelta_to_float(runtime)) expected_std_lat = np.sqrt(2 * kh_meridional * mesh_conversion**2 * timedelta_to_float(runtime)) @@ -70,7 +70,7 @@ def test_fieldKh_SpatiallyVaryingDiffusion(mesh, kernel): np.random.seed(1636) pset = ParticleSet(fieldset=fieldset, lon=np.zeros(npart), lat=np.zeros(npart)) - pset.execute(pset.Kernel(kernel), runtime=np.timedelta64(3, "h"), dt=np.timedelta64(1, "h")) + pset.execute(kernel, runtime=np.timedelta64(3, "h"), dt=np.timedelta64(1, "h")) tol = 2000 * mesh_conversion # effectively 2000 m errors (because of low numbers of particles) assert np.allclose(np.mean(pset.lon), 0, atol=tol) diff --git a/tests/test_kernel.py b/tests/test_kernel.py index 49f895efba..480475c724 100644 --- a/tests/test_kernel.py +++ b/tests/test_kernel.py @@ -35,12 +35,12 @@ def ErrorKernel(particles, fieldset): # pragma: no cover def test_kernel_init(fieldset): - Kernel(fieldset, ptype=Particle, kernels=[AdvectionRK4]) + Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=Particle) def test_kernel_merging(fieldset): - k1 = Kernel(fieldset, ptype=Particle, kernels=[AdvectionRK4]) - k2 = Kernel(fieldset, ptype=Particle, kernels=[MoveEast, MoveNorth]) + k1 = Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=Particle) + k2 = Kernel(kernels=[MoveEast, MoveNorth], fieldset=fieldset, ptype=Particle) merged_kernel = k1 + k2 assert merged_kernel.funcname == "AdvectionRK4MoveEastMoveNorth" @@ -61,8 +61,8 @@ def test_kernel_from_list(fieldset): mixed functions and kernel objects. """ pset = ParticleSet(fieldset, lon=[0.5], lat=[0.5]) - kernels_single = pset.Kernel([AdvectionRK4]) - kernels_functions = pset.Kernel([AdvectionRK4, MoveEast, MoveNorth]) + kernels_single = Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=pset._ptype) + kernels_functions = Kernel(kernels=[AdvectionRK4, MoveEast, MoveNorth], fieldset=fieldset, ptype=pset._ptype) # Check if the kernels were combined correctly assert kernels_single.funcname == "AdvectionRK4" @@ -78,13 +78,17 @@ def test_kernel_from_list_error_checking(fieldset): pset = ParticleSet(fieldset, lon=[0.5], lat=[0.5]) with pytest.raises(ValueError, match="List of `kernels` should have at least one function."): - pset.Kernel([]) + Kernel(kernels=[], fieldset=fieldset, ptype=pset._ptype) - with pytest.raises(ValueError, match="Argument `kernels_list` should be a list of functions."): - pset.Kernel([AdvectionRK4, "something else"]) + with pytest.raises(TypeError, match=r"Argument `kernels` should be a function or list of functions.*"): + Kernel(kernels=[AdvectionRK4, "something else"], fieldset=fieldset, ptype=pset._ptype) - with pytest.raises(ValueError, match="Argument `kernels_list` should be a list of functions."): - kernels_mixed = pset.Kernel([pset.Kernel(AdvectionRK4), MoveEast, MoveNorth]) + with pytest.raises(TypeError, match=r"Argument `kernels` should be a function or list of functions.*"): + kernels_mixed = Kernel( + kernels=[Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=pset._ptype), MoveEast, MoveNorth], + fieldset=fieldset, + ptype=pset._ptype, + ) assert kernels_mixed.funcname == "AdvectionRK4MoveEastMoveNorth" @@ -93,7 +97,7 @@ def test_RK45Kernel_error_no_next_dt(fieldset): pset = ParticleSet(fieldset, lon=[0.5], lat=[0.5]) with pytest.raises(ValueError, match='ParticleClass requires a "next_dt" for AdvectionRK45 Kernel.'): - pset.Kernel(AdvectionRK45) + Kernel(kernels=AdvectionRK45, fieldset=fieldset, ptype=pset._ptype) def test_kernel_signature(fieldset): @@ -114,23 +118,23 @@ def kernel_switched_args(fieldset, particle): def kernel_with_forced_kwarg(particles, *, fieldset=0): pass - pset.Kernel(good_kernel) + Kernel(kernels=good_kernel, fieldset=fieldset, ptype=pset._ptype) with pytest.raises(ValueError, match="Kernel function must have 2 parameters, got 3"): - pset.Kernel(version_3_kernel) + Kernel(kernels=version_3_kernel, fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'particle' has incorrect name. Expected 'particles', got 'particle'" ): - pset.Kernel(version_3_kernel_without_time) + Kernel(kernels=version_3_kernel_without_time, fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'fieldset' has incorrect name. Expected 'particles', got 'fieldset'" ): - pset.Kernel(kernel_switched_args) + Kernel(kernels=kernel_switched_args, fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'fieldset' has incorrect parameter kind. Expected POSITIONAL_OR_KEYWORD, got KEYWORD_ONLY", ): - pset.Kernel(kernel_with_forced_kwarg) + Kernel(kernels=kernel_with_forced_kwarg, fieldset=fieldset, ptype=pset._ptype) diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 8d559eb450..d642a544c7 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -356,7 +356,7 @@ def Update_lon(particles, fieldset): # pragma: no cover particle = get_default_particle(np.float64) pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0]) ofile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(50, "s")) - pset.execute(pset.Kernel(Update_lon), runtime=5 * dt, dt=dt, output_file=ofile) + pset.execute(Update_lon, runtime=5 * dt, dt=dt, output_file=ofile) assert np.allclose(pset.lon, 0.6) @@ -370,7 +370,7 @@ def Update_lon(particles, fieldset): # pragma: no cover particle = get_default_particle(np.float64) pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0]) ofile = ParticleFile(tmp_zarrfile, outputdt=np.timedelta64(3, "s")) - pset.execute(pset.Kernel(Update_lon), runtime=np.timedelta64(11, "s"), dt=np.timedelta64(2, "s"), output_file=ofile) + pset.execute(Update_lon, runtime=np.timedelta64(11, "s"), dt=np.timedelta64(2, "s"), output_file=ofile) ds = xr.open_zarr(tmp_zarrfile) assert np.allclose(ds.lon.values, [0, 3, 6, 9]) diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index 5473fcbbb3..0c13b46e51 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -9,6 +9,7 @@ FieldInterpolationError, FieldOutOfBoundError, FieldSet, + Kernel, OutsideTimeInterval, Particle, ParticleFile, @@ -224,7 +225,7 @@ def test_pset_remove_particle_in_kernel(fieldset): def DeleteKernel(particles, fieldset): # pragma: no cover particles.state = np.where((particles.lon >= 0.4) & (particles.lon <= 0.6), StatusCode.Delete, particles.state) - pset.execute(pset.Kernel(DeleteKernel), runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s")) + pset.execute(DeleteKernel, runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s")) indices = [i for i in range(npart) if not (40 <= i < 60)] assert [p.trajectory for p in pset] == indices assert pset[70].trajectory == 90 @@ -250,7 +251,7 @@ def test_pset_multi_execute(fieldset, with_delete, npart=10, n=5): def AddLat(particles, fieldset): # pragma: no cover particles.dlat += 0.1 - k_add = pset.Kernel(AddLat) + k_add = Kernel(kernels=AddLat, fieldset=fieldset, ptype=pset._ptype) for _ in range(n): pset.execute(k_add, runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s")) if with_delete: From 5fb6178189ba12b4c2cd30db88475bdb3f16abe5 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 22 Jan 2026 13:59:15 +0100 Subject: [PATCH 3/5] Review feedback --- docs/user_guide/v4-migration.md | 2 +- src/parcels/_core/kernel.py | 10 ++++++---- src/parcels/_core/particleset.py | 9 ++++++--- tests/test_kernel.py | 14 +++++++------- tests/test_particleset_execute.py | 2 +- 5 files changed, 21 insertions(+), 16 deletions(-) diff --git a/docs/user_guide/v4-migration.md b/docs/user_guide/v4-migration.md index c142bd4ec8..7e33be83ce 100644 --- a/docs/user_guide/v4-migration.md +++ b/docs/user_guide/v4-migration.md @@ -17,7 +17,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat - The `InteractionKernel` class has been removed. Since normal Kernels now have access to _all_ particles, particle-particle interaction can be performed within normal Kernels. - Users need to explicitly use `convert_z_to_sigma_croco` in sampling kernels (such as the `AdvectionRK4_3D_CROCO` or `SampleOMegaCroco` kernels) when working with CROCO data, as the automatic conversion from depth to sigma grids under the hood has been removed. - We added a new AdvectionRK2 Kernel. The AdvectionRK4 kernel is still available, but RK2 is now the recommended default advection scheme as it is faster while the accuracy is comparable for most applications. See also the Choosing an integration method tutorial. -- Functions shouldn't be converted to Kernels before adding to a pset.execute() call. Instead, simply pass the function(s) as a list to pset.execute(), which will convert them to Kernels internally. +- Functions shouldn't be converted to Kernels before adding to a pset.execute() call. Instead, simply pass the function(s) as a list to pset.execute(). ## FieldSet diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 7edcf014dd..ad4a4c5d60 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -45,12 +45,12 @@ class Kernel: Parameters ---------- + kernels : + list of Kernel functions fieldset : parcels.Fieldset FieldSet object providing the field information (possibly None) ptype : PType object for the kernel particle - kernels : - list of Kernel functions Notes ----- @@ -65,10 +65,12 @@ def __init__( ptype, ): if not isinstance(kernels, list): - kernels = [kernels] + raise ValueError(f"kernels must be a list. Got {kernels=!r}") for f in kernels: - if not isinstance(f, types.FunctionType): + if isinstance(f, Kernel): + f = f._kernels # unwrap + elif not isinstance(f, types.FunctionType): raise TypeError(f"Argument `kernels` should be a function or list of functions. Got {type(f)}") assert_same_function_signature(f, ref=AdvectionRK4, context="Kernel") diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 677030e32e..715c4c50f8 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -393,9 +393,12 @@ def execute( if len(self) == 0: return - if not isinstance(kernels, Kernel): - kernels = Kernel(kernels, self.fieldset, self._ptype) - self._kernel = kernels + if isinstance(kernels, Kernel): + self._kernel = kernels + else: + if not isinstance(kernels, list): + kernels = [kernels] + self._kernel = Kernel(kernels, self.fieldset, self._ptype) if output_file is not None: output_file.set_metadata(self.fieldset.gridset[0]._mesh) diff --git a/tests/test_kernel.py b/tests/test_kernel.py index 480475c724..bcc5ed1b7c 100644 --- a/tests/test_kernel.py +++ b/tests/test_kernel.py @@ -83,7 +83,7 @@ def test_kernel_from_list_error_checking(fieldset): with pytest.raises(TypeError, match=r"Argument `kernels` should be a function or list of functions.*"): Kernel(kernels=[AdvectionRK4, "something else"], fieldset=fieldset, ptype=pset._ptype) - with pytest.raises(TypeError, match=r"Argument `kernels` should be a function or list of functions.*"): + with pytest.raises(TypeError, match=r".*is not a callable object"): kernels_mixed = Kernel( kernels=[Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=pset._ptype), MoveEast, MoveNorth], fieldset=fieldset, @@ -97,7 +97,7 @@ def test_RK45Kernel_error_no_next_dt(fieldset): pset = ParticleSet(fieldset, lon=[0.5], lat=[0.5]) with pytest.raises(ValueError, match='ParticleClass requires a "next_dt" for AdvectionRK45 Kernel.'): - Kernel(kernels=AdvectionRK45, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[AdvectionRK45], fieldset=fieldset, ptype=pset._ptype) def test_kernel_signature(fieldset): @@ -118,23 +118,23 @@ def kernel_switched_args(fieldset, particle): def kernel_with_forced_kwarg(particles, *, fieldset=0): pass - Kernel(kernels=good_kernel, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[good_kernel], fieldset=fieldset, ptype=pset._ptype) with pytest.raises(ValueError, match="Kernel function must have 2 parameters, got 3"): - Kernel(kernels=version_3_kernel, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[version_3_kernel], fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'particle' has incorrect name. Expected 'particles', got 'particle'" ): - Kernel(kernels=version_3_kernel_without_time, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[version_3_kernel_without_time], fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'fieldset' has incorrect name. Expected 'particles', got 'fieldset'" ): - Kernel(kernels=kernel_switched_args, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[kernel_switched_args], fieldset=fieldset, ptype=pset._ptype) with pytest.raises( ValueError, match="Parameter 'fieldset' has incorrect parameter kind. Expected POSITIONAL_OR_KEYWORD, got KEYWORD_ONLY", ): - Kernel(kernels=kernel_with_forced_kwarg, fieldset=fieldset, ptype=pset._ptype) + Kernel(kernels=[kernel_with_forced_kwarg], fieldset=fieldset, ptype=pset._ptype) diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index 0c13b46e51..ed98c93386 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -251,7 +251,7 @@ def test_pset_multi_execute(fieldset, with_delete, npart=10, n=5): def AddLat(particles, fieldset): # pragma: no cover particles.dlat += 0.1 - k_add = Kernel(kernels=AddLat, fieldset=fieldset, ptype=pset._ptype) + k_add = Kernel(kernels=[AddLat], fieldset=fieldset, ptype=pset._ptype) for _ in range(n): pset.execute(k_add, runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s")) if with_delete: From b1e2e2f9902bcd91cea7be582187f7ae25783c48 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 22 Jan 2026 14:10:49 +0100 Subject: [PATCH 4/5] Removing add on Kernels --- src/parcels/_core/kernel.py | 12 +----------- tests/test_kernel.py | 7 ++----- 2 files changed, 3 insertions(+), 16 deletions(-) diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index ad4a4c5d60..336b59f372 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -128,7 +128,7 @@ def PositionUpdate(particles, fieldset): # pragma: no cover # Update dt in case it's increased in RK45 kernel particles.dt = particles.next_dt - self._kernels = (PositionUpdate + self)._kernels + self._kernels = [PositionUpdate] + self._kernels def check_fieldsets_in_kernels(self, kernel): # TODO v4: this can go into another method? assert_is_compatible()? """ @@ -184,16 +184,6 @@ def merge(self, kernel): self.ptype, ) - def __add__(self, kernel): - if isinstance(kernel, types.FunctionType): - kernel = type(self)([kernel], self.fieldset, self.ptype) - return self.merge(kernel) - - def __radd__(self, kernel): - if isinstance(kernel, types.FunctionType): - kernel = type(self)([kernel], self.fieldset, self.ptype) - return kernel.merge(self) - def execute(self, pset, endtime, dt): """Execute this Kernel over a ParticleSet for several timesteps. diff --git a/tests/test_kernel.py b/tests/test_kernel.py index bcc5ed1b7c..d1edc5da0a 100644 --- a/tests/test_kernel.py +++ b/tests/test_kernel.py @@ -39,15 +39,12 @@ def test_kernel_init(fieldset): def test_kernel_merging(fieldset): - k1 = Kernel(kernels=[AdvectionRK4], fieldset=fieldset, ptype=Particle) - k2 = Kernel(kernels=[MoveEast, MoveNorth], fieldset=fieldset, ptype=Particle) - - merged_kernel = k1 + k2 + merged_kernel = Kernel(kernels=[AdvectionRK4, MoveEast, MoveNorth], fieldset=fieldset, ptype=Particle) assert merged_kernel.funcname == "AdvectionRK4MoveEastMoveNorth" assert len(merged_kernel._kernels) == 3 assert merged_kernel._kernels == [AdvectionRK4, MoveEast, MoveNorth] - merged_kernel = k2 + k1 + merged_kernel = Kernel(kernels=[MoveEast, MoveNorth, AdvectionRK4], fieldset=fieldset, ptype=Particle) assert merged_kernel.funcname == "MoveEastMoveNorthAdvectionRK4" assert len(merged_kernel._kernels) == 3 assert merged_kernel._kernels == [MoveEast, MoveNorth, AdvectionRK4] From 0bf0e42d1e30b9a7ccbd2a0db2bbbb61cc9a54d9 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 22 Jan 2026 14:13:09 +0100 Subject: [PATCH 5/5] Making Kernel private --- src/parcels/__init__.py | 2 -- src/parcels/_core/kernel.py | 2 -- tests/test_kernel.py | 2 +- tests/test_particleset_execute.py | 2 +- 4 files changed, 2 insertions(+), 6 deletions(-) diff --git a/src/parcels/__init__.py b/src/parcels/__init__.py index 5134cf2dce..c13850a333 100644 --- a/src/parcels/__init__.py +++ b/src/parcels/__init__.py @@ -11,7 +11,6 @@ from parcels._core.fieldset import FieldSet from parcels._core.particleset import ParticleSet -from parcels._core.kernel import Kernel from parcels._core.particlefile import ParticleFile from parcels._core.particle import ( Variable, @@ -45,7 +44,6 @@ # Core classes "FieldSet", "ParticleSet", - "Kernel", "ParticleFile", "Variable", "Particle", diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 336b59f372..a596213651 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -27,8 +27,6 @@ if TYPE_CHECKING: from collections.abc import Callable -__all__ = ["Kernel"] - ErrorsToThrow = { StatusCode.ErrorOutsideTimeInterval: _raise_outside_time_interval_error, diff --git a/tests/test_kernel.py b/tests/test_kernel.py index d1edc5da0a..07e1e74760 100644 --- a/tests/test_kernel.py +++ b/tests/test_kernel.py @@ -4,11 +4,11 @@ from parcels import ( Field, FieldSet, - Kernel, Particle, ParticleSet, XGrid, ) +from parcels._core.kernel import Kernel from parcels._datasets.structured.generic import datasets as datasets_structured from parcels.interpolators import XLinear from parcels.kernels import AdvectionRK4, AdvectionRK45 diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index ed98c93386..e6b6d07afb 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -9,7 +9,6 @@ FieldInterpolationError, FieldOutOfBoundError, FieldSet, - Kernel, OutsideTimeInterval, Particle, ParticleFile, @@ -20,6 +19,7 @@ VectorField, XGrid, ) +from parcels._core.kernel import Kernel from parcels._core.utils.time import timedelta_to_float from parcels._datasets.structured.generated import simple_UV_dataset from parcels._datasets.structured.generic import datasets as datasets_structured