Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
76 commits
Select commit Hold shift + click to select a range
ffe0ebf
Update .gitignore
VeckoTheGecko Apr 17, 2026
9ebb653
Disable zarr writing
VeckoTheGecko Apr 17, 2026
0218c28
Fix parquet writing
VeckoTheGecko Apr 17, 2026
4e7de3e
Remove test_vriable_write_double
VeckoTheGecko Apr 17, 2026
bc653f1
Fix all "uses_old_zarr" tests
VeckoTheGecko Apr 17, 2026
5daec3b
Remove test_variable_write_double
VeckoTheGecko Apr 17, 2026
2a07ced
Fixing tests
VeckoTheGecko Apr 17, 2026
b8c5477
More test fixing
VeckoTheGecko Apr 20, 2026
2d438c5
Fix last tests
VeckoTheGecko Apr 20, 2026
32a82fa
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 20, 2026
19fbd8d
Remove old fixtures
VeckoTheGecko Apr 20, 2026
e74672f
Fix pre-commit errors
VeckoTheGecko Apr 20, 2026
db9f983
Cleanup
VeckoTheGecko Apr 20, 2026
ac2a830
Add pandas and pyarrow as explicit dependencies
VeckoTheGecko Apr 20, 2026
de464e5
Add assert_cftime_like_particlefile
VeckoTheGecko Apr 22, 2026
57ccf6f
MAINT: Cleanup create_particle_data
VeckoTheGecko Apr 20, 2026
b2bde50
Add cftime metadata serialization
VeckoTheGecko Apr 22, 2026
55493a9
Add np.timedelta64 support
VeckoTheGecko Apr 22, 2026
bab4d5d
Fix assert_cftime_like_particlefile
VeckoTheGecko Apr 22, 2026
b28665c
Move imports
VeckoTheGecko Apr 22, 2026
7184e1f
Fixing tests
VeckoTheGecko Apr 22, 2026
e7e37ef
Fix test_time_is_age test
VeckoTheGecko Apr 22, 2026
54c829a
Refactor assert_cftime_like_particlefile
VeckoTheGecko Apr 23, 2026
8626d48
Self-review feedback
VeckoTheGecko Apr 23, 2026
3693329
Fix test_particle_schema
VeckoTheGecko Apr 23, 2026
81f127b
Make read_particlefile public
VeckoTheGecko Apr 23, 2026
9fcb5bf
Add docstring to read_particlefile
VeckoTheGecko Apr 23, 2026
41ed3d8
Updarting Argo tutorial to use parquet
erikvansebille Apr 23, 2026
b64a00e
Updating tutorial_nemo to use parquet output
erikvansebille Apr 23, 2026
5e0fc7f
Update tutorial_diffusion to use parquet
erikvansebille Apr 23, 2026
511ce10
Update tutorial_output to use parquet
erikvansebille Apr 24, 2026
04d8676
Review feedback
VeckoTheGecko Apr 24, 2026
6695187
Update migration guide
VeckoTheGecko Apr 24, 2026
b4e2214
Remove obs_written
VeckoTheGecko Apr 24, 2026
a5bdd31
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 24, 2026
663f80c
Update migration guide
VeckoTheGecko Apr 24, 2026
5be22aa
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 24, 2026
002b8f2
Revert from extra_metadata to metadata
VeckoTheGecko Apr 24, 2026
3c52647
Fix test_pfile_array_remove_particles
VeckoTheGecko Apr 24, 2026
39ab122
Merge branch 'main' into push-zlxyoyvlpoqm
VeckoTheGecko Apr 24, 2026
ae375b3
Fix numpy warning
VeckoTheGecko Apr 24, 2026
ebf2b54
Merge branch 'push-zlxyoyvlpoqm' of https://github.com/VeckoTheGecko/…
erikvansebille Apr 24, 2026
8379a88
Merge branch 'pr/2583' into update_parquet_docs
erikvansebille Apr 24, 2026
c10e741
Using polars in tutorial_output
erikvansebille Apr 24, 2026
b986c81
Update explanation_concepts.md
erikvansebille Apr 24, 2026
4d5d670
update tutorial_croco to use parquet
erikvansebille Apr 24, 2026
e005595
using polars for read_particlefile
erikvansebille Apr 28, 2026
f738ca7
Update tutorial_quickstart to use parquet
erikvansebille Apr 28, 2026
ea9fe51
Update tutorial_output to use polars
erikvansebille Apr 28, 2026
4a222e1
Merge branch 'main' into update_parquet_docs
erikvansebille Apr 28, 2026
8b380a0
Update v4-migration.md
erikvansebille Apr 28, 2026
b049a73
Fix using polars in tutorial_output
erikvansebille Apr 28, 2026
59ed170
Fixing read_parquet to use polars
erikvansebille Apr 28, 2026
3f326ce
Update tutorial_delaystart to use parquet
erikvansebille Apr 29, 2026
1a48b44
Update tutorial_dt_integrators to use parquet
erikvansebille Apr 30, 2026
9e3b88a
Fixing parcels.read_particlefile for timedelta time
erikvansebille Apr 30, 2026
9084910
Update tutorial_interaction to use parquet
erikvansebille Apr 30, 2026
3ad3f10
Update tutorial_manipulating_field_data to use parquet
erikvansebille Apr 30, 2026
daad8c9
Update tutorial_mitgcm to use parquet
erikvansebille Apr 30, 2026
f811b8d
Updsate tutorial_nestedgrids to use parquet
erikvansebille Apr 30, 2026
4dd08b9
Update tutorial_sampling to use parquet (and remove to_write="once" s…
erikvansebille Apr 30, 2026
b06a051
Removing old attributes from particlefile.repr
erikvansebille Apr 30, 2026
aa9fbd1
Using more intuitive variable names for polars subsetting
erikvansebille Apr 30, 2026
b57e78f
Fixing repr of particleset
erikvansebille Apr 30, 2026
9aa1459
Update tutorial_Argofloats.ipynb
erikvansebille Apr 30, 2026
d521ad3
Update tutorial_quickstart to use parquet
erikvansebille Apr 30, 2026
3d0c55d
Update tutorial_croco_3D.ipynb
erikvansebille Apr 30, 2026
580d5fb
Using polars in tutorial_diffusion
erikvansebille Apr 30, 2026
4571bca
Use polars in tutorial_nemo
erikvansebille Apr 30, 2026
3d29c72
Use parquet in explanation_kernelloop
erikvansebille Apr 30, 2026
45c9cf0
Update policies.md
erikvansebille Apr 30, 2026
d47202b
Fixing unit tests to use polars in parcels.read_particlefile
erikvansebille Apr 30, 2026
31dabc0
Merge branch 'main' into update_parquet_docs
erikvansebille Apr 30, 2026
31a49e8
Merge branch 'main' into update_parquet_docs
VeckoTheGecko May 1, 2026
3b2a166
Merge branch 'main' into update_parquet_docs
VeckoTheGecko May 1, 2026
1b35bf9
Doc fix: docs/user_guide/examples/tutorial_dt_integrators.ipynb
VeckoTheGecko May 1, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/development/policies.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ Parcels follows [Intended Effort Versioning (EffVer)](https://jacobtomlinson.dev

When making backward incompatible changes, we will make sure these changes and instructions to upgrade are communicated to the user via change logs or migration guides, and (where applicable) informative error messaging.

Note when conducting research we highly recommend documenting which version of Parcels (and other packages) you are using. This can be as easy as doing `conda env export > environment.yml` alongside your project code. The Parcels version used to generate an output file is also stored as metadata entry in the `.zarr` output file.
Note when conducting research we highly recommend documenting which version of Parcels (and other packages) you are using. This can be as easy as doing `conda env export > environment.yml` alongside your project code. The Parcels version used to generate an output file is also stored as metadata entry in the `.parquet` output file.

## Changes in policies

Expand Down
2 changes: 1 addition & 1 deletion docs/getting_started/explanation_concepts.md
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ pset.execute(kernels=kernels, dt=dt, runtime=runtime)

### Output

To analyse the particle data generated in the simulation, we need to define a `parcels.ParticleFile` and add it as an argument to `parcels.ParticleSet.execute()`. The output will be written in a [zarr format](https://zarr.readthedocs.io/en/stable/), which can be opened as an `xarray.Dataset`. The dataset will contain the particle data with at least `time`, `z`, `lat` and `lon`, for each particle at timesteps defined by the `outputdt` argument.
To analyse the particle data generated in the simulation, we need to define a `parcels.ParticleFile` and add it as an argument to `parcels.ParticleSet.execute()`. The output will be written in a [parquet format](https://parquet.apache.org/), which can be opened as a `polars.DataFrame`. The dataset will contain the particle data with at least `time`, `z`, `lat` and `lon`, for each particle at timesteps defined by the `outputdt` argument.

There are many ways to analyze particle output, and although we provide [a short tutorial to get started](./tutorial_output.ipynb), we recommend writing your own analysis code and checking out [related Lagrangian analysis projects in our community page](../community/index.md#analysis-code).

Expand Down
385 changes: 165 additions & 220 deletions docs/getting_started/tutorial_output.ipynb
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've gone through this - really nice update! I think its quite clear

Large diffs are not rendered by default.

40 changes: 20 additions & 20 deletions docs/getting_started/tutorial_quickstart.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ read more, we have a [concepts overview](./explanation_concepts.md) discussing t

## Imports

Parcels depends on `xarray`, expecting inputs in the form of [`xarray.Dataset`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html)
and writing output files that can be read with xarray.
Parcels depends on `xarray`, expecting inputs in the form of [`xarray.Dataset`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html). Output files can be read with `pandas`.
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
Parcels depends on `xarray`, expecting inputs in the form of [`xarray.Dataset`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html). Output files can be read with `pandas`.
Parcels depends on `xarray`, expecting inputs in the form of [`xarray.Dataset`](https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html). Output files can be read with `polars`.


```{code-cell}
import numpy as np
import xarray as xr
import polars as pl
import parcels
import parcels.tutorial
```
Expand Down Expand Up @@ -123,11 +123,11 @@ Before starting the simulation, we must define where and how frequent we want to
We can define this in a {py:obj}`parcels.ParticleFile` object:

```{code-cell}
output_file = parcels.ParticleFile("output-quickstart.zarr", outputdt=np.timedelta64(1, "h"))
output_file = parcels.ParticleFile("output-quickstart.parquet", outputdt=np.timedelta64(1, "h"))
```

The output files are in `.zarr` [format](https://zarr.readthedocs.io/en/stable/), which can be read by `xarray`.
See the [Parcels output tutorial](./tutorial_output.ipynb) for more information on the zarr format. We want to choose
The output files are in `.parquet` [format](https://parquet.apache.org/), which can be read by `polars`.
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would this be a good place to link to Polars?

Suggested change
The output files are in `.parquet` [format](https://parquet.apache.org/), which can be read by `polars`.
The output files are in `.parquet` [format](https://parquet.apache.org/), which can be read by [Polars](https://pola.rs/).

I don't think we link to it yet in the docs here

See the [Parcels output tutorial](./tutorial_output.ipynb) for more information on the parquet format. We want to choose
the `outputdt` argument so that it captures the smallest timescales of our interest.

## Run Simulation: `ParticleSet.execute()`
Expand Down Expand Up @@ -155,23 +155,22 @@ pset.execute(
To start analyzing the trajectories computed by **Parcels**, we can open the `ParticleFile` using `xarray`:
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This needs to be updated from "xarray"


```{code-cell}
ds_particles = xr.open_zarr("output-quickstart.zarr")
ds_particles
df = parcels.read_particlefile("output-quickstart.parquet")
df
```

The 10 particle trajectories are stored along the `trajectory` dimension, and each trajectory contains 25 observations
(initial values + 24 hourly timesteps) along the `obs` dimension. The [working with Parcels output tutorial](./tutorial_output.ipynb)
provides more detail about the dataset and how to analyse it.
The file contains 250 rows: 25 observations for the 10 particle trajectories.
The [working with Parcels output tutorial](./tutorial_output.ipynb) provides more detail about the dataset and how to analyse it.

Let's verify that Parcels has computed the advection of the virtual particles!

```{code-cell}
import matplotlib.pyplot as plt

# plot positions and color particles by number of observation
scatter = plt.scatter(ds_particles.lon.T, ds_particles.lat.T, c=np.repeat(ds_particles.obs.values,npart))
plt.scatter(ds_particles.lon[:,0],ds_particles.lat[:,0],facecolors="none",edgecolors='r') # starting positions
plt.scatter(lon,lat,facecolors="none",edgecolors='r') # starting positions
# plot positions and color particles by time
scatter = plt.scatter(df['lon'], df['lat'], c=df['time'])
plt.scatter(df['lon'][:npart], df['lat'][:npart], facecolors="none", edgecolors='r') # starting positions
plt.scatter(lon, lat, facecolors="none", edgecolors='r') # starting positions
plt.xlim(31,33)
plt.ylabel("Latitude [deg N]")
plt.ylim(-33,-30)
Expand All @@ -196,7 +195,7 @@ location!
```{code-cell}
:tags: [hide-output]
# set up output file
output_file = parcels.ParticleFile("output-backwards.zarr", outputdt=np.timedelta64(1, "h"))
output_file = parcels.ParticleFile("output-backwards.parquet", outputdt=np.timedelta64(1, "h"))

# execute simulation in backwards time
pset.execute(
Expand All @@ -210,10 +209,11 @@ pset.execute(
When we check the output, we can see that the particles have returned to their original position!

```{code-cell}
ds_particles_back = xr.open_zarr("output-backwards.zarr")
df_back = parcels.read_particlefile("output-backwards.parquet")

scatter = plt.scatter(ds_particles_back.lon.T, ds_particles_back.lat.T, c=np.repeat(ds_particles_back.obs.values,npart))
plt.scatter(ds_particles_back.lon[:,0],ds_particles_back.lat[:,0],facecolors="none",edgecolors='r') # starting positions
scatter = plt.scatter(df_back['lon'], df_back['lat'], c=df_back['time'])
particles_at_start = df_back.filter(pl.col("time") == df_back["time"].min())
plt.scatter(particles_at_start['lon'], particles_at_start['lat'], facecolors="none", edgecolors='r') # starting positions
plt.xlabel("Longitude [deg E]")
plt.xlim(31,33)
plt.ylabel("Latitude [deg N]")
Expand All @@ -226,6 +226,6 @@ Using Euler forward advection, the final positions are equal to the original pos

```{code-cell}
# testing that final location == original location
np.testing.assert_almost_equal(ds_particles_back['lat'].values[:,-1],ds_particles['lat'].values[:,0], 2)
np.testing.assert_almost_equal(ds_particles_back['lon'].values[:,-1],ds_particles['lon'].values[:,0], 2)
np.testing.assert_almost_equal(particles_at_start["lat"], lat, 2)
np.testing.assert_almost_equal(particles_at_start['lon'], lon, 2)
```
16 changes: 10 additions & 6 deletions docs/user_guide/examples/explanation_kernelloop.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ lats = np.linspace(-32.5, -30.5, npart)

pset = parcels.ParticleSet(fieldset, pclass=parcels.Particle, z=z, lat=lats, lon=lons)
output_file = parcels.ParticleFile(
store="advection_then_wind.zarr", outputdt=np.timedelta64(6,'h')
path="advection_then_wind.parquet", outputdt=np.timedelta64(6,'h')
)
pset.execute(
[parcels.kernels.AdvectionRK2, wind_kernel],
Expand All @@ -126,7 +126,7 @@ pset_reverse = parcels.ParticleSet(
fieldset, pclass=parcels.Particle, z=z, lat=lats, lon=lons
)
output_file_reverse = parcels.ParticleFile(
store="wind_then_advection.zarr", outputdt=np.timedelta64(6,"h")
path="wind_then_advection.parquet", outputdt=np.timedelta64(6,"h")
)
pset_reverse.execute(
[wind_kernel, parcels.kernels.AdvectionRK2],
Expand All @@ -140,10 +140,14 @@ Finally, plot the trajectories to show that they are identical in the two simula

```{code-cell}
# Plot the resulting particle trajectories overlapped for both cases
advection_then_wind = xr.open_zarr("advection_then_wind.zarr")
wind_then_advection = xr.open_zarr("wind_then_advection.zarr")
plt.plot(wind_then_advection.lon.T, wind_then_advection.lat.T, "-")
plt.plot(advection_then_wind.lon.T, advection_then_wind.lat.T, "--", c="k", alpha=0.7)
advection_then_wind = parcels.read_particlefile("advection_then_wind.parquet")
wind_then_advection = parcels.read_particlefile("wind_then_advection.parquet")

fig, ax = plt.subplots(figsize=(5, 3))
for traj in wind_then_advection.partition_by("particle_id", maintain_order=True):
ax.plot(traj["lon"], traj["lat"], "-")
for traj in advection_then_wind.partition_by("particle_id", maintain_order=True):
ax.plot(traj["lon"], traj["lat"], "--", c="k", alpha=0.7)
plt.show()
```

Expand Down
31 changes: 9 additions & 22 deletions docs/user_guide/examples/tutorial_Argofloats.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -159,9 +159,8 @@
"\n",
"# Create a ParticleFile object to store the output\n",
"output_file = parcels.ParticleFile(\n",
" store=\"argo_float.zarr\",\n",
" \"argo_float.parquet\",\n",
" outputdt=timedelta(minutes=15),\n",
" chunks=(1, 500), # setting to write in chunks of 500 observations\n",
")\n",
"\n",
"# Now execute the Kernels for 30 days, saving data every 30 minutes\n",
Expand All @@ -183,20 +182,6 @@
"First plot the depth as a function of time, with the temperature as color (only on the upcast)."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ds_particles = xr.open_zarr(output_file.store)\n",
"x = ds_particles[\"lon\"][:].squeeze()\n",
"y = ds_particles[\"lat\"][:].squeeze()\n",
"z = ds_particles[\"z\"][:].squeeze()\n",
"time = ds_particles[\"time\"][:].squeeze()\n",
"temp = ds_particles[\"temp\"][:].squeeze()"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -205,10 +190,12 @@
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"df = parcels.read_particlefile(\"argo_float.parquet\")\n",
"\n",
"fig = plt.figure(figsize=(13, 6))\n",
"ax = plt.axes()\n",
"ax.plot(time, z, color=\"gray\")\n",
"cb = ax.scatter(time, z, c=temp, s=20, marker=\"o\", zorder=2)\n",
"ax.plot(df[\"time\"], df[\"z\"], color=\"gray\")\n",
"cb = ax.scatter(df[\"time\"], df[\"z\"], c=df[\"temp\"], s=20, marker=\"o\", zorder=2)\n",
"ax.set_xlabel(\"Time [days]\")\n",
"ax.set_ylabel(\"Depth (m)\")\n",
"ax.invert_yaxis()\n",
Expand All @@ -234,12 +221,12 @@
"fig = plt.figure(figsize=(13, 8))\n",
"ax = plt.axes(projection=\"3d\")\n",
"ax.view_init(azim=-145)\n",
"ax.plot3D(x, y, z, color=\"gray\")\n",
"cb = ax.scatter(x, y, z, c=temp, s=20, marker=\"o\", zorder=2)\n",
"ax.plot3D(df[\"lon\"], df[\"lat\"], df[\"z\"], color=\"gray\")\n",
"cb = ax.scatter(df[\"lon\"], df[\"lat\"], df[\"z\"], c=df[\"temp\"], s=20, marker=\"o\", zorder=2)\n",
"ax.set_xlabel(\"Longitude\")\n",
"ax.set_ylabel(\"Latitude\")\n",
"ax.set_zlabel(\"Depth (m)\")\n",
"ax.set_zlim(np.max(z), 0)\n",
"ax.set_zlim(df[\"z\"].max(), 0)\n",
"fig.colorbar(cb, label=\"Temperature (°C)\")\n",
"plt.show()"
]
Expand All @@ -261,7 +248,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.14.2"
"version": "3.14.4"
}
},
"nbformat": 4,
Expand Down
28 changes: 20 additions & 8 deletions docs/user_guide/examples/tutorial_croco_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,17 @@
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\nimport numpy as np\nimport xarray as xr\n\nimport parcels\nimport parcels.tutorial\n\nds_fields = parcels.tutorial.open_dataset(\"CROCOidealized_data/data\")\n\nds_fields.load(); # Preload data to speed up access"
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import polars as pl\n",
"import xarray as xr\n",
"\n",
"import parcels\n",
"import parcels.tutorial\n",
"\n",
"ds_fields = parcels.tutorial.open_dataset(\"CROCOidealized_data/data\")\n",
"\n",
"ds_fields.load(); # Preload data to speed up access"
]
},
{
Expand Down Expand Up @@ -119,7 +129,7 @@
")\n",
"\n",
"outputfile = parcels.ParticleFile(\n",
" store=\"croco_particles3D.zarr\",\n",
" path=\"croco_particles3D.parquet\",\n",
" outputdt=np.timedelta64(5000, \"s\"),\n",
")\n",
"\n",
Expand Down Expand Up @@ -149,10 +159,11 @@
"outputs": [],
"source": [
"fig, ax = plt.subplots(1, 1, figsize=(6, 4))\n",
"ds = xr.open_zarr(\"croco_particles3D.zarr\")\n",
"df = pl.read_parquet(\"croco_particles3D.parquet\")\n",
"\n",
"ax.plot(X / 1e3, Z, \"k.\", label=\"Initial positions\")\n",
"ax.plot(ds.lon.T / 1e3, ds.z.T, \".-\")\n",
"for traj in df.partition_by(\"particle_id\", maintain_order=True):\n",
" ax.plot(traj[\"lon\"] / 1e3, traj[\"z\"], \".-\")\n",
"\n",
"for z in ds_fields.s_w.values:\n",
" ax.plot(\n",
Expand Down Expand Up @@ -208,7 +219,7 @@
")\n",
"\n",
"outputfile = parcels.ParticleFile(\n",
" store=\"croco_particles_noW.zarr\", outputdt=np.timedelta64(5000, \"s\")\n",
" path=\"croco_particles_noW.parquet\", outputdt=np.timedelta64(5000, \"s\")\n",
")\n",
"\n",
"pset_noW.execute(\n",
Expand All @@ -219,10 +230,11 @@
")\n",
"\n",
"fig, ax = plt.subplots(1, 1, figsize=(6, 4))\n",
"ds = xr.open_zarr(\"croco_particles_noW.zarr\")\n",
"df = pl.read_parquet(\"croco_particles_noW.parquet\")\n",
"\n",
"ax.plot(X / 1e3, Z, \"k.\", label=\"Initial positions\")\n",
"ax.plot(ds.lon.T / 1e3, ds.z.T, \".-\")\n",
"for traj in df.partition_by(\"particle_id\", maintain_order=True):\n",
" ax.plot(traj[\"lon\"] / 1e3, traj[\"z\"], \".-\")\n",
"\n",
"for z in ds_fields.s_w.values:\n",
" ax.plot(\n",
Expand Down Expand Up @@ -306,7 +318,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.14.2"
"version": "3.14.4"
}
},
"nbformat": 4,
Expand Down
Loading
Loading