Skip to content

Commit 5f2e8ec

Browse files
Merge pull request #2362 from Parcels-code/MyST-quickstart
Quickstart Guide and MyST-NB implementation
2 parents 0504d40 + 1ddb76d commit 5f2e8ec

File tree

14 files changed

+461
-194
lines changed

14 files changed

+461
-194
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
build/*
22
docs/_build/*
33
docs/_downloads
4+
docs/jupyter_execute/*
5+
docs/.jupyter_cache/*
46
output
57

68
*.log

docs/conf.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,7 @@
3636
"sphinx.ext.linkcode",
3737
"sphinx.ext.mathjax",
3838
"sphinx.ext.napoleon",
39-
"myst_parser",
40-
"nbsphinx",
39+
"myst_nb",
4140
"numpydoc",
4241
"sphinxcontrib.mermaid",
4342
"sphinx_design",
@@ -114,7 +113,13 @@
114113

115114
# List of patterns, relative to source directory, that match files and
116115
# directories to ignore when looking for source files.
117-
exclude_patterns = ["_build", "**.ipynb_checkpoints", "user_guide/examples_v3"]
116+
exclude_patterns = [
117+
"_build",
118+
"jupyter_execute",
119+
"**.ipynb_checkpoints",
120+
"user_guide/examples_v3",
121+
".jupyter_cache",
122+
]
118123

119124
# The reST default role (used for this markup: `text`) to use for all
120125
# documents.
@@ -294,7 +299,7 @@ def linkcode_resolve(domain, info):
294299

295300
# Custom sidebar templates, maps document names to template names.
296301

297-
html_sidebars = {"**": ["sidebar-nav-bs"], "documentation/additional_examples": []}
302+
html_sidebars = {"**": ["sidebar-nav-bs"]}
298303

299304
# Additional templates that should be rendered to pages, maps page names to
300305
# template names.
@@ -343,17 +348,6 @@ def linkcode_resolve(domain, info):
343348
# Output file base name for HTML help builder.
344349
htmlhelp_basename = "parcelsdoc"
345350

346-
nbsphinx_thumbnails = {
347-
"examples/tutorial_parcels_structure": "_images/parcels_user_diagram.png",
348-
"examples/tutorial_timestamps": "_static/calendar-icon.jpg",
349-
"examples/documentation_homepage_animation": "_images/homepage.gif",
350-
"examples/tutorial_interaction": "_static/pulled_particles_twoatractors_line.gif",
351-
"examples/documentation_LargeRunsOutput": "_static/harddrive.png",
352-
"examples/tutorial_unitconverters": "_static/globe-icon.jpg",
353-
"examples/documentation_geospatial": "_images/tutorial_geospatial_google_earth.png",
354-
"examples/tutorial_kernelloop": "_static/loop-icon.jpeg",
355-
}
356-
nbsphinx_execute = "never"
357351
# -- Options for LaTeX output ---------------------------------------------
358352

359353
BRANCH = (
@@ -528,3 +522,9 @@ def linkcode_resolve(domain, info):
528522
myst_heading_anchors = 3
529523

530524
myst_enable_extensions = ["substitution"]
525+
526+
# -- Options for MyST-nb --------------------------------------------------
527+
nb_execution_mode = "cache"
528+
nb_execution_excludepatterns = ["jupyter_execute", ".jupyter_cache"]
529+
nb_execution_raise_on_error = True
530+
nb_execution_timeout = 75

docs/development/docsguide.md

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22

33
## Vision
44

5-
We believe a clear documentation is important to community building, reproducibility, and transparency in our open-source project. To make it easier to write our documentation in a consistent way, here we outline a brief vision for our documentation based heavily on a few common resources.
5+
We believe a clear documentation is important to community building, reproducibility, and transparency in our open-source
6+
project. To make it easier to write our documentation in a consistent way, here we outline a brief vision for our
7+
documentation based heavily on a few common resources.
68

79
```{note}
810
TODO: outline functions of the documentation based on resources
@@ -15,6 +17,28 @@ TODO: outline functions of the documentation based on resources
1517
- [Write the Docs Guide](https://www.writethedocs.org/guide/)
1618
- [NumPy Documentation Article](https://labs.quansight.org/blog/2020/03/documentation-as-a-way-to-build-community)
1719

20+
## Notebook execution
21+
22+
We run the notebooks in our documentation using [MyST-NB](https://myst-nb.readthedocs.io/en/latest/index.html). Here is
23+
a table showing the latest notebook execution:
24+
25+
```{nb-exec-table}
26+
27+
```
28+
1829
## Style guide
1930

20-
- Write documentation in first person plural ("we"). In our open source code, tutorials and guides can be written by any developer or user, so the documentation teaches all of us how to do something with Parcels.
31+
- **Prefer `import parcels` over `from parcels import class` in tutorials and how-to guides** so its obvious in later
32+
code cells which classes and methods are part of Parcels.
33+
- [**Avoid too much Repitition In Documentation**](https://www.writethedocs.org/guide/writing/docs-principles/#arid):
34+
tutorials and how-to guides notebooks will often have repetition of the general **Parcels** steps, (e.g., imports ) -
35+
this is needed so that users have complete examples that they can copy and experiment with.`. We try to limit each page
36+
in the documentation to a small number of examples.
37+
- Introduce links and cross-references to maximize discoverability of documentation. This also reduces the necessity for
38+
repetition in notebooks.
39+
- **Import packages at the top of the section in which they are first used** to show what they are used for.
40+
- **Write documentation in first person plural ("we").** In our open source code, tutorials and guides can be written
41+
by any developer or user, so the documentation teaches all of us how to do something with Parcels. Sometimes it can be
42+
more natural to take on the tone of a teacher, writing to a student/learner, in which case it is okay to use "you".
43+
Please refrain from using impersonal subjects such as "the user".
44+
- We recommend hard wrapping prose in markdown so that reading it becomes easier in any editor.

docs/getting_started/index.md

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,25 +3,18 @@
33
Getting started with parcels is easy; here you will find:
44

55
```{toctree}
6+
:maxdepth: 1
67
Installation guide <installation.md>
78
Quickstart tutorial <tutorial_quickstart.md>
89
Parcels concepts explainer <concepts_overview.md>
9-
<!-- Simple output tutorial <../examples_v3/tutorial_output.ipynb> -->
10+
Simple output tutorial <tutorial_output.ipynb>
1011
1112
```
1213

1314
```{note}
14-
TODO: Include one line conda installation for most common use
15-
```
16-
17-
```{note}
18-
TODO: Write quickstart tutorial. This should focus on getting users familiar with the different steps to take to run a simulation. Implicitly we should introduce them to the important concepts, but it should not be an in depth explanation. Instead we should reference to the concept overview and the reference API
15+
TODO: Add links to Reference API in quickstart tutorial
1916
```
2017

2118
```{note}
2219
TODO: Rewrite parcels concepts overview. This .md file should contain most of what is currently in the tutorial_parcels_structure notebook.
2320
```
24-
25-
```{note}
26-
TODO: Move new output tutorial here
27-
```

docs/user_guide/examples/tutorial_output.ipynb renamed to docs/getting_started/tutorial_output.ipynb

Lines changed: 36 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@
1515
"source": [
1616
"This tutorial covers the format of the trajectory output exported by Parcels. **Parcels does not include advanced analysis or plotting functionality**, which users are suggested to write themselves to suit their research goals. Here we provide some starting points to explore the parcels output files yourself.\n",
1717
"\n",
18-
"- [**Reading the output file**](#Reading-the-output-file)\n",
19-
"- [**Trajectory data structure**](#Trajectory-data-structure)\n",
20-
"- [**Analysis**](#Analysis)\n",
21-
"- [**Plotting**](#Plotting)\n",
22-
"- [**Animations**](#Animations)\n",
18+
"- [**Reading the output file**](#reading-the-output-file)\n",
19+
"- [**Trajectory data structure**](#trajectory-data-structure)\n",
20+
"- [**Analysis**](#analysis)\n",
21+
"- [**Plotting**](#plotting)\n",
22+
"- [**Animations**](#animations)\n",
2323
"\n",
2424
"For more advanced reading and tutorials on the analysis of Lagrangian trajectories, we recommend checking out the [Lagrangian Diagnostics Analysis Cookbook](https://lagrangian-diags.readthedocs.io/en/latest/tutorials.html) and the project in general. The [TrajAn package](https://opendrift.github.io/trajan/index.html) can be used to read and plot datasets of Lagrangian trajectories."
2525
]
@@ -56,10 +56,10 @@
5656
" \"CopernicusMarine_data_for_Argo_tutorial\"\n",
5757
")\n",
5858
"\n",
59-
"ds = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
60-
"ds.load() # load the dataset into memory\n",
59+
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
60+
"ds_fields.load() # load the dataset into memory\n",
6161
"\n",
62-
"fieldset = parcels.FieldSet.from_copernicusmarine(ds)"
62+
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)"
6363
]
6464
},
6565
{
@@ -72,14 +72,14 @@
7272
"npart = 10 # number of particles to be released\n",
7373
"lon = 32 * np.ones(npart)\n",
7474
"lat = np.linspace(-32.5, -30.5, npart, dtype=np.float32)\n",
75-
"time = ds.time.values[0] + np.arange(0, npart) * np.timedelta64(2, \"h\")\n",
76-
"z = np.repeat(ds.depth.values[0], npart)\n",
75+
"time = ds_fields.time.values[0] + np.arange(0, npart) * np.timedelta64(2, \"h\")\n",
76+
"z = np.repeat(ds_fields.depth.values[0], npart)\n",
7777
"\n",
7878
"pset = parcels.ParticleSet(\n",
7979
" fieldset=fieldset, pclass=parcels.Particle, lon=lon, lat=lat, time=time, z=z\n",
8080
")\n",
8181
"\n",
82-
"output_file = parcels.ParticleFile(\"Output.zarr\", outputdt=np.timedelta64(2, \"h\"))"
82+
"output_file = parcels.ParticleFile(\"output.zarr\", outputdt=np.timedelta64(2, \"h\"))"
8383
]
8484
},
8585
{
@@ -109,7 +109,11 @@
109109
{
110110
"cell_type": "code",
111111
"execution_count": null,
112-
"metadata": {},
112+
"metadata": {
113+
"tags": [
114+
"hide-output"
115+
]
116+
},
113117
"outputs": [],
114118
"source": [
115119
"pset.execute(\n",
@@ -136,9 +140,9 @@
136140
"metadata": {},
137141
"outputs": [],
138142
"source": [
139-
"data_xarray = xr.open_zarr(\"Output.zarr\")\n",
143+
"ds_particles = xr.open_zarr(\"output.zarr\")\n",
140144
"\n",
141-
"print(data_xarray)"
145+
"print(ds_particles)"
142146
]
143147
},
144148
{
@@ -171,7 +175,7 @@
171175
"source": [
172176
"np.set_printoptions(linewidth=160)\n",
173177
"one_hour = np.timedelta64(1, \"h\") # Define timedelta object to help with conversion\n",
174-
"time_from_start = data_xarray[\"time\"].values - fieldset.time_interval.left\n",
178+
"time_from_start = ds_particles[\"time\"].values - fieldset.time_interval.left\n",
175179
"\n",
176180
"print(time_from_start / one_hour) # timedelta / timedelta -> float number of hours"
177181
]
@@ -202,8 +206,8 @@
202206
"source": [
203207
"import matplotlib.pyplot as plt\n",
204208
"\n",
205-
"x = data_xarray[\"lon\"].values\n",
206-
"y = data_xarray[\"lat\"].values\n",
209+
"x = ds_particles[\"lon\"].values\n",
210+
"y = ds_particles[\"lat\"].values\n",
207211
"distance = np.cumsum(\n",
208212
" np.sqrt(np.square(np.diff(x)) + np.square(np.diff(y))), axis=1\n",
209213
") # d = (dx^2 + dy^2)^(1/2)\n",
@@ -262,7 +266,7 @@
262266
"source": [
263267
"### Conditional selection\n",
264268
"\n",
265-
"In other cases, the processing of the data itself however depends on the absolute time at which the observations are made, e.g. studying seasonal phenomena. In that case the array structure is not as simple: the data must be selected by their `time` value. Here we show how the mean location of the particles evolves through time. This also requires the trajectory data to be aligned in time. The data are selected using `xr.DataArray.where()` which compares the time variable to a specific time. This type of selecting data with a condition (`data_xarray['time']==time`) is a powerful tool to analyze trajectory data.\n"
269+
"In other cases, the processing of the data itself however depends on the absolute time at which the observations are made, e.g. studying seasonal phenomena. In that case the array structure is not as simple: the data must be selected by their `time` value. Here we show how the mean location of the particles evolves through time. This also requires the trajectory data to be aligned in time. The data are selected using `xr.DataArray.where()` which compares the time variable to a specific time. This type of selecting data with a condition (`ds_particles['time']==time`) is a powerful tool to analyze trajectory data.\n"
266270
]
267271
},
268272
{
@@ -276,20 +280,20 @@
276280
"mean_lat_x = []\n",
277281
"\n",
278282
"timerange = np.arange(\n",
279-
" np.nanmin(data_xarray[\"time\"].values),\n",
280-
" np.nanmax(data_xarray[\"time\"].values) + np.timedelta64(timedelta(hours=2)),\n",
283+
" np.nanmin(ds_particles[\"time\"].values),\n",
284+
" np.nanmax(ds_particles[\"time\"].values) + np.timedelta64(timedelta(hours=2)),\n",
281285
" timedelta(hours=2),\n",
282286
") # timerange in nanoseconds\n",
283287
"\n",
284288
"for time in timerange:\n",
285289
" # if all trajectories share an observation at time\n",
286-
" if np.all(np.any(data_xarray[\"time\"] == time, axis=1)):\n",
290+
" if np.all(np.any(ds_particles[\"time\"] == time, axis=1)):\n",
287291
" # find the data that share the time\n",
288292
" mean_lon_x += [\n",
289-
" np.nanmean(data_xarray[\"lon\"].where(data_xarray[\"time\"] == time).values)\n",
293+
" np.nanmean(ds_particles[\"lon\"].where(ds_particles[\"time\"] == time).values)\n",
290294
" ]\n",
291295
" mean_lat_x += [\n",
292-
" np.nanmean(data_xarray[\"lat\"].where(data_xarray[\"time\"] == time).values)\n",
296+
" np.nanmean(ds_particles[\"lat\"].where(ds_particles[\"time\"] == time).values)\n",
293297
" ]"
294298
]
295299
},
@@ -344,16 +348,16 @@
344348
"\n",
345349
"###-Points-###\n",
346350
"ax1.set_title(\"Points\")\n",
347-
"ax1.scatter(data_xarray[\"lon\"].T, data_xarray[\"lat\"].T)\n",
351+
"ax1.scatter(ds_particles[\"lon\"].T, ds_particles[\"lat\"].T)\n",
348352
"###-Lines-###\n",
349353
"ax2.set_title(\"Lines\")\n",
350-
"ax2.plot(data_xarray[\"lon\"].T, data_xarray[\"lat\"].T)\n",
354+
"ax2.plot(ds_particles[\"lon\"].T, ds_particles[\"lat\"].T)\n",
351355
"###-Points + Lines-###\n",
352356
"ax3.set_title(\"Points + Lines\")\n",
353-
"ax3.plot(data_xarray[\"lon\"].T, data_xarray[\"lat\"].T, marker=\"o\")\n",
357+
"ax3.plot(ds_particles[\"lon\"].T, ds_particles[\"lat\"].T, marker=\"o\")\n",
354358
"###-Not Transposed-###\n",
355359
"ax4.set_title(\"Not transposed\")\n",
356-
"ax4.plot(data_xarray[\"lon\"], data_xarray[\"lat\"], marker=\"o\")\n",
360+
"ax4.plot(ds_particles[\"lon\"], ds_particles[\"lat\"], marker=\"o\")\n",
357361
"\n",
358362
"plt.show()"
359363
]
@@ -373,7 +377,7 @@
373377
"source": [
374378
"Trajectory plots like the ones above can become very cluttered for large sets of particles. To better see patterns, it's a good idea to create an animation in time and space. To do this, matplotlib offers an [animation package](https://matplotlib.org/stable/api/animation_api.html). Here we show how to use the [**FuncAnimation**](https://matplotlib.org/3.3.2/api/_as_gen/matplotlib.animation.FuncAnimation.html#matplotlib.animation.FuncAnimation) class to animate parcels trajectory data, based on [this visualisation tutorial](https://github.com/Parcels-code/10year-anniversary-session5/blob/eaf7ac35f43c222280fa5577858be81dc346c06b/animations_tutorial.ipynb) from 10-years Parcels. \n",
375379
"\n",
376-
"To correctly reveal the patterns in time we must remember that the `obs` dimension does not necessarily correspond to the `time` variable ([see the section of Trajectory data structure above](#Trajectory-data-structure)). In the animation of the particles, we usually want to draw the points at each consecutive moment in time, not necessarily at each moment since the start of the trajectory. To do this we must [select the correct data](#Conditional-selection) in each rendering.\n"
380+
"To correctly reveal the patterns in time we must remember that the `obs` dimension does not necessarily correspond to the `time` variable ([see the section of Trajectory data structure above](#trajectory-data-structure)). In the animation of the particles, we usually want to draw the points at each consecutive moment in time, not necessarily at each moment since the start of the trajectory. To do this we must [select the correct data](#conditional-selection) in each rendering.\n"
377381
]
378382
},
379383
{
@@ -411,7 +415,7 @@
411415
"\n",
412416
"# Set up the colors and associated trajectories:\n",
413417
"# get release times for each particle (first valide obs for each trajectory)\n",
414-
"release_times = data_xarray[\"time\"].min(dim=\"obs\", skipna=True).values\n",
418+
"release_times = ds_particles[\"time\"].min(dim=\"obs\", skipna=True).values\n",
415419
"\n",
416420
"# get unique release times and assign colors\n",
417421
"unique_release_times = np.unique(release_times[~np.isnat(release_times)])\n",
@@ -431,9 +435,9 @@
431435
"print(\"Pre-computing all particle positions...\")\n",
432436
"all_particles_data = []\n",
433437
"for i, target_time in enumerate(timerange):\n",
434-
" time_id = np.where(data_xarray[\"time\"] == target_time)\n",
435-
" lons = data_xarray[\"lon\"].values[time_id]\n",
436-
" lats = data_xarray[\"lat\"].values[time_id]\n",
438+
" time_id = np.where(ds_particles[\"time\"] == target_time)\n",
439+
" lons = ds_particles[\"lon\"].values[time_id]\n",
440+
" lats = ds_particles[\"lat\"].values[time_id]\n",
437441
" particle_indices = time_id[0]\n",
438442
" valid = ~np.isnan(lons) & ~np.isnan(lats)\n",
439443
"\n",

0 commit comments

Comments
 (0)