From 3701531fb1f805bfb83b5942d3d6862dbf68293b Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Fri, 21 Nov 2025 15:35:54 +0100
Subject: [PATCH 01/63] move old examples to separate folder
---
examples/{ => old_examples}/maxwell_2d_multi_patch.py | 0
examples/{ => old_examples}/poisson_1d.py | 0
.../poisson_2d_jacobi_iteration/plot_cart_poisson.py | 0
.../poisson_2d_jacobi_iteration/test_cart_poisson.py | 0
examples/{ => old_examples}/poisson_2d_mapping.py | 0
examples/{ => old_examples}/poisson_2d_multi_patch.py | 0
examples/{ => old_examples}/poisson_3d_multi_patch.py | 0
examples/{ => old_examples}/poisson_3d_target_torus.py | 0
examples/{ => old_examples}/sample_multi_patch_parallel.py | 0
examples/{ => old_examples}/test_cg_perf.py | 0
examples/{ => old_examples}/test_mass_matrix.py | 0
examples/{ => old_examples}/visualize_matrices.py | 0
12 files changed, 0 insertions(+), 0 deletions(-)
rename examples/{ => old_examples}/maxwell_2d_multi_patch.py (100%)
rename examples/{ => old_examples}/poisson_1d.py (100%)
rename examples/{ => old_examples}/poisson_2d_jacobi_iteration/plot_cart_poisson.py (100%)
rename examples/{ => old_examples}/poisson_2d_jacobi_iteration/test_cart_poisson.py (100%)
rename examples/{ => old_examples}/poisson_2d_mapping.py (100%)
rename examples/{ => old_examples}/poisson_2d_multi_patch.py (100%)
rename examples/{ => old_examples}/poisson_3d_multi_patch.py (100%)
rename examples/{ => old_examples}/poisson_3d_target_torus.py (100%)
rename examples/{ => old_examples}/sample_multi_patch_parallel.py (100%)
rename examples/{ => old_examples}/test_cg_perf.py (100%)
rename examples/{ => old_examples}/test_mass_matrix.py (100%)
rename examples/{ => old_examples}/visualize_matrices.py (100%)
diff --git a/examples/maxwell_2d_multi_patch.py b/examples/old_examples/maxwell_2d_multi_patch.py
similarity index 100%
rename from examples/maxwell_2d_multi_patch.py
rename to examples/old_examples/maxwell_2d_multi_patch.py
diff --git a/examples/poisson_1d.py b/examples/old_examples/poisson_1d.py
similarity index 100%
rename from examples/poisson_1d.py
rename to examples/old_examples/poisson_1d.py
diff --git a/examples/poisson_2d_jacobi_iteration/plot_cart_poisson.py b/examples/old_examples/poisson_2d_jacobi_iteration/plot_cart_poisson.py
similarity index 100%
rename from examples/poisson_2d_jacobi_iteration/plot_cart_poisson.py
rename to examples/old_examples/poisson_2d_jacobi_iteration/plot_cart_poisson.py
diff --git a/examples/poisson_2d_jacobi_iteration/test_cart_poisson.py b/examples/old_examples/poisson_2d_jacobi_iteration/test_cart_poisson.py
similarity index 100%
rename from examples/poisson_2d_jacobi_iteration/test_cart_poisson.py
rename to examples/old_examples/poisson_2d_jacobi_iteration/test_cart_poisson.py
diff --git a/examples/poisson_2d_mapping.py b/examples/old_examples/poisson_2d_mapping.py
similarity index 100%
rename from examples/poisson_2d_mapping.py
rename to examples/old_examples/poisson_2d_mapping.py
diff --git a/examples/poisson_2d_multi_patch.py b/examples/old_examples/poisson_2d_multi_patch.py
similarity index 100%
rename from examples/poisson_2d_multi_patch.py
rename to examples/old_examples/poisson_2d_multi_patch.py
diff --git a/examples/poisson_3d_multi_patch.py b/examples/old_examples/poisson_3d_multi_patch.py
similarity index 100%
rename from examples/poisson_3d_multi_patch.py
rename to examples/old_examples/poisson_3d_multi_patch.py
diff --git a/examples/poisson_3d_target_torus.py b/examples/old_examples/poisson_3d_target_torus.py
similarity index 100%
rename from examples/poisson_3d_target_torus.py
rename to examples/old_examples/poisson_3d_target_torus.py
diff --git a/examples/sample_multi_patch_parallel.py b/examples/old_examples/sample_multi_patch_parallel.py
similarity index 100%
rename from examples/sample_multi_patch_parallel.py
rename to examples/old_examples/sample_multi_patch_parallel.py
diff --git a/examples/test_cg_perf.py b/examples/old_examples/test_cg_perf.py
similarity index 100%
rename from examples/test_cg_perf.py
rename to examples/old_examples/test_cg_perf.py
diff --git a/examples/test_mass_matrix.py b/examples/old_examples/test_mass_matrix.py
similarity index 100%
rename from examples/test_mass_matrix.py
rename to examples/old_examples/test_mass_matrix.py
diff --git a/examples/visualize_matrices.py b/examples/old_examples/visualize_matrices.py
similarity index 100%
rename from examples/visualize_matrices.py
rename to examples/old_examples/visualize_matrices.py
From 0096b01f95484b1f1da8f111cde545c6faeec9d1 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Fri, 21 Nov 2025 15:37:32 +0100
Subject: [PATCH 02/63] move performance scripts to examples folder
---
.../performance}/compare_3d_matrix_assembly_speed.py | 0
.../performance}/matrix_assembly_speed_log.md | 0
{performance => examples/performance}/test_perf_1d.py | 0
{performance => examples/performance}/test_perf_2d.py | 0
{performance => examples/performance}/test_perf_2d_parallel.py | 0
{performance => examples/performance}/test_perf_3d.py | 0
6 files changed, 0 insertions(+), 0 deletions(-)
rename {performance => examples/performance}/compare_3d_matrix_assembly_speed.py (100%)
rename {performance => examples/performance}/matrix_assembly_speed_log.md (100%)
rename {performance => examples/performance}/test_perf_1d.py (100%)
rename {performance => examples/performance}/test_perf_2d.py (100%)
rename {performance => examples/performance}/test_perf_2d_parallel.py (100%)
rename {performance => examples/performance}/test_perf_3d.py (100%)
diff --git a/performance/compare_3d_matrix_assembly_speed.py b/examples/performance/compare_3d_matrix_assembly_speed.py
similarity index 100%
rename from performance/compare_3d_matrix_assembly_speed.py
rename to examples/performance/compare_3d_matrix_assembly_speed.py
diff --git a/performance/matrix_assembly_speed_log.md b/examples/performance/matrix_assembly_speed_log.md
similarity index 100%
rename from performance/matrix_assembly_speed_log.md
rename to examples/performance/matrix_assembly_speed_log.md
diff --git a/performance/test_perf_1d.py b/examples/performance/test_perf_1d.py
similarity index 100%
rename from performance/test_perf_1d.py
rename to examples/performance/test_perf_1d.py
diff --git a/performance/test_perf_2d.py b/examples/performance/test_perf_2d.py
similarity index 100%
rename from performance/test_perf_2d.py
rename to examples/performance/test_perf_2d.py
diff --git a/performance/test_perf_2d_parallel.py b/examples/performance/test_perf_2d_parallel.py
similarity index 100%
rename from performance/test_perf_2d_parallel.py
rename to examples/performance/test_perf_2d_parallel.py
diff --git a/performance/test_perf_3d.py b/examples/performance/test_perf_3d.py
similarity index 100%
rename from performance/test_perf_3d.py
rename to examples/performance/test_perf_3d.py
From 7c75c0eb279d0c909dbae01be1b4f306a168c2d5 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 10:03:57 +0100
Subject: [PATCH 03/63] try nbsphinx
---
docs/requirements.txt | 2 ++
docs/source/conf.py | 1 +
docs/source/examples.rst | 6 ++++++
3 files changed, 9 insertions(+)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 2cfa37e55..b8a28b763 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -3,3 +3,5 @@ pydata_sphinx_theme
numpydoc
tomli
sphinx-math-dollar
+nbsphinx
+ipykernel
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
index ca5f7955d..02ab5a0a0 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -52,6 +52,7 @@ def fixed_init(self, app):
'sphinx.ext.githubpages',
'sphinx_math_dollar',
'sphinx.ext.mathjax',
+'nbsphinx',
]
from docutils.nodes import FixedTextElement, literal,math
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index d05be7e0f..a09375165 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -4,3 +4,9 @@ Examples
+------------------------------------------------------------------------------------------------------------------------+
| Here you will find examples of how to use PSYDAC and explanations thereof as well as links to notebooks in the future. |
+------------------------------------------------------------------------------------------------------------------------+
+
+.. toctree::
+ :maxdepth: 1
+ :caption: Notebooks:
+
+ ../examples/notebooks/Poisson_non_periodic.ipynb
From f5dc200e830e8b529dd8647dd98d9fc34f90888b Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 10:21:01 +0100
Subject: [PATCH 04/63] copy notebooks into docs by CI
---
.github/workflows/documentation.yml | 3 +++
docs/source/examples.rst | 6 ++++--
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 41896bbb9..71c1417c8 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -39,6 +39,9 @@ jobs:
- name: Install Python dependencies
run: |
python -m pip install -r docs/requirements.txt
+ - name: Copy Notebooks
+ run: |
+ cp -r examples/notebooks/* docs/source/
- name: Make the sphinx doc
run: |
rm -rf docs/source/modules/STUBDIR
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index a09375165..451ec90f8 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -3,10 +3,12 @@ Examples
+------------------------------------------------------------------------------------------------------------------------+
| Here you will find examples of how to use PSYDAC and explanations thereof as well as links to notebooks in the future. |
-+------------------------------------------------------------------------------------------------------------------------+
++------------------------------------------------------------------------------------------------------------------------+
+.. The notebooks get copied into the source directory by the continuous integration pipeline.
.. toctree::
:maxdepth: 1
:caption: Notebooks:
- ../examples/notebooks/Poisson_non_periodic.ipynb
+ Poisson_non_periodic
+ Helmholtz_non_periodic
From a6dbd86b57f134492fddd7ff1f876b2b1808a8d5 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 10:22:57 +0100
Subject: [PATCH 05/63] also install pandoc
---
docs/requirements.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index b8a28b763..073a2a0bb 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -4,4 +4,5 @@ numpydoc
tomli
sphinx-math-dollar
nbsphinx
-ipykernel
\ No newline at end of file
+ipykernel
+pandoc
\ No newline at end of file
From 06559fe616854a51ae4f0e5dde938d2c334551b4 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 10:26:03 +0100
Subject: [PATCH 06/63] install pandoc globally
---
.github/workflows/documentation.yml | 2 +-
docs/requirements.txt | 3 +--
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 71c1417c8..66c0e37c4 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -35,7 +35,7 @@ jobs:
- name: Install non-Python dependencies on Ubuntu
run: |
sudo apt update
- sudo apt install graphviz
+ sudo apt install graphviz pandoc
- name: Install Python dependencies
run: |
python -m pip install -r docs/requirements.txt
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 073a2a0bb..b8a28b763 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -4,5 +4,4 @@ numpydoc
tomli
sphinx-math-dollar
nbsphinx
-ipykernel
-pandoc
\ No newline at end of file
+ipykernel
\ No newline at end of file
From 6bb798d326c19415232da4005f5d32c9c794a9d1 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 10:34:13 +0100
Subject: [PATCH 07/63] clear notebook output
---
docs/source/examples.rst | 1 +
.../notebooks/Helmholtz_non_periodic.ipynb | 42 +++++++------------
examples/notebooks/Poisson_non_periodic.ipynb | 41 ++++--------------
3 files changed, 24 insertions(+), 60 deletions(-)
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index 451ec90f8..9a68a1869 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -6,6 +6,7 @@ Examples
+------------------------------------------------------------------------------------------------------------------------+
.. The notebooks get copied into the source directory by the continuous integration pipeline.
+.. The notebooks should have all output cleared before being committed to the repository.
.. toctree::
:maxdepth: 1
:caption: Notebooks:
diff --git a/examples/notebooks/Helmholtz_non_periodic.ipynb b/examples/notebooks/Helmholtz_non_periodic.ipynb
index e680e0eb7..1a02f2e01 100644
--- a/examples/notebooks/Helmholtz_non_periodic.ipynb
+++ b/examples/notebooks/Helmholtz_non_periodic.ipynb
@@ -18,7 +18,7 @@
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -37,7 +37,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -95,7 +95,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -145,21 +145,9 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "> CG info :: {'niter': 46, 'success': True, 'res_norm': 5.330478803717119e-15}\n",
- "> L2 error :: 2.07e-07\n",
- "> H1 error :: 3.80e-05\n",
- "> Solution time :: 3.76e+00s\n",
- "> Evaluat. time :: 4.00e-01s \n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"# Set the solver parameters\n",
"# 'cbig' -> Biconjugate gradient method\n",
@@ -193,7 +181,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -228,7 +216,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -283,13 +271,18 @@
" \n",
"
"
]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "v_psydac",
+ "display_name": "venv3.13",
"language": "python",
- "name": "v_psydac"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -301,12 +294,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.6"
- },
- "vscode": {
- "interpreter": {
- "hash": "e7370f93d1d0cde622a1f8e1c04877d8463912d04d973331ad4851f04de6915a"
- }
+ "version": "3.13.7"
}
},
"nbformat": 4,
diff --git a/examples/notebooks/Poisson_non_periodic.ipynb b/examples/notebooks/Poisson_non_periodic.ipynb
index 873b73374..c042b68b8 100644
--- a/examples/notebooks/Poisson_non_periodic.ipynb
+++ b/examples/notebooks/Poisson_non_periodic.ipynb
@@ -18,22 +18,9 @@
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMEAAAD4CAYAAABVN4L5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOydd3hcV5n/P2eqRqPeu2R1yarultxt2U4jYYElIWxYFjYLCcuGBZ5QAhsgsIEfpMCySQgkhA2EBVLtOO523GVJlqxmWdao997rzNzfH/K9qzJzJceOI9v6Ps88ku49995zR+c95z1v+b5CkiQWsYhbGZqPugOLWMRHjUUhWMQtj0UhWMQtj0UhWMQtj0UhWMQtD91H3YEPAj8/PykqKuqj7sYibiAUFBR0SpLk7+jcDSkEUVFR5Ofnf9TdWMQNBCFEnbNzi+rQIm55LArBIm55LArBIm55LArBIm55LArBIm55XBMhEEK8JIRoF0KUOjkvhBC/FEJUCSGKhRDLppzbKYS4ePnct65FfxaxiCvBtVoJfg/sVDl/GxB3+fMg8ByAEEIL/Pry+WTgPiFE8jXq0yIWMS9cEz+BJEnHhBBRKk3uBv4gTcZtnxFCeAkhgoEooEqSpGoAIcSfL7ctv9I+bN++nfr6enJyctDr9ej1elxcXPD09MTHxwcfHx/8/f2JjIwkKCgIjWZRE1zEJK6XsywUaJjyd+PlY46Or3Z0AyHEg0yuIkRERMw6f+HCBRobG7l48eKcndHpdPj4+BAQEEBkZCTR0dHExcWRkZHB8uXLcXV1ne97LeImwPUSAuHgmKRyfPZBSfoN8BuAFStWzGrj6+uLj48PeXl5jI2NMT4+zuDgIN3d3XR3d9PT00NbWxtNTU20tLQov58+fZp3331XuY9WqyUsLIzIyEiWL19OTk4OGzZswGw2f6AXX8TCx/USgkYgfMrfYUAzYHBy/ANBCIHBYMBgMACTghEZGTnndf39/ZSUlFBUVERRURGlpaWUlZVx7Ngxnn76aXQ6HbGxsWRlZbFz50527tyJu7v7B+3mIhYYrpcQvAN85bLOvxrokySpRQjRAcQJIZYATcC9wGeuU58UeHh4kJ2dTXZ2NgDV1dXk5+eTlJTE2bNnOXnyJHl5ebzyyiu89NJL6HQ6UlNT2blzJ/feey9paWnXu8uLuJaQJOmqP8BrQAswweSs/wXgS8CXLp8XTFqBLEAJsGLKtbcDlZfPfXc+z1u+fLk0E+np6VJ6evqs4x8E+fn50uuvvy7Z7fZpx7u7u6VXX31V+od/+AcpMjJSYlJ1kyIiIqQHH3xQOn369DV5/iKuPYB8ycl4EtINmGi/YsUKaWYUaUZGBgBFRUVXff+DBw+i1WrZvHmzaruSkhL+/Oc/895771FUVIQkSURFRXHPPffw0EMPERcXd9V9WcS1gRCiQJKkFY7OLdoJZ8But9PX14e3t/ecbVNTU/nxj3/MuXPnsFgsfO9738NsNvPMM8+QkJBAVlYWv/nNbxgZGbkOPV/EB8WiEMxAf38/NpttXkIwFUuWLOGHP/whpaWlFBcX88UvfpGLFy/yL//yL4SGhvKv//qvNDQ0zH2jRVx33DTqUFpaGjabjZ///OdoNBo0Gg1arVZxnBkMBlxcXHBxccFkMuHq6orZbEanm24bqKmpIS8vj507d+Lh4XFV/RwdHeWHP/whe/fupbCwEL1ez2233cZjjz3GypUrr+rei7gyqKlDN2RmmTMIIfD09ESSJOx2O1arlZGREfr7+xkfH2diYmLWNSaTCTc3Nzw9PfH09KS1tRWtVoubm9tV96ejo4OMjAy+8pWv0NDQwM9+9jN2797Nrl272Lx5M48//jjr16+/6ucs4upw0wiBPPtnZWU5bWOz2RgdHWVkZITh4WEGBwcZHBxkYGCAurq6aUJy8OBBfH198fX1JSAg4AN5kauqqjCbzQQFBRESEsLrr79OU1MTP/jBD/jjH//Ihg0bWLduHU8++aRinl3E9cdNow5drXVIkiQGBwfZt28fXl5e6PV6uru7FcFwc3MjICCA4OBgAgMDZ6lRM9HX18e+fftITU0lKSlp1vn29nZ+9KMf8fLLLzM8PMzOnTv5+c9/TnLyYvzgh4FbRh26GgghsNvt2O124uLiiIyMRJIk+vr6aG9vp62tjfr6eqqrq9FoNAQGBhISEkJYWBhGo3HW/SwWCxqNhujoaIfPCwgI4Fe/+hXf+c53+Pa3v81rr71Geno6999/P7/4xS/w9fX9sF95EZexaB2agp6eHgDFMiSEwMvLi/j4eNavX88999zDxo0biYmJob+/n4KCAt555x2OHz9ObW0tVqsVgImJCWprawkPD3coIFMRHBzM73//e0pLS9mxYwevvPIKcXFxPPXUU9hstg/3hRcBLArBNHR3d6PT6ZxuiuUVIDMzk9tvv52cnBzi4+Pp6+vj7NmzvPPOO+Tn51NeXo7VaiUmJmbez46Li2P37t3s37+fwMBAvv71r5ORkUFeXt61er1FOMGiEExBT08PXl5e88o1EELg7e1Neno6d9xxB5s3byY0NJS6ujouXryIVqtlcHDwimfznJwcSktLefLJJ6mtrSUrK4uHH36YgYGBD/pai5gDN40QyHEgPT099PX1MTAwwMjICFarlfls/u12O729vVfsJINJgfD392f16tWKdUqv13P27FneffddysvLGR8fn/f9tFotjz76KGVlZWzevJn//u//ZunSpezfv/+K+7aIuXHTbIwlScJms3HgwIFZ5zQaDUajUXGWubq64ubmhru7O25ubri5uTEwMPCBPMUzUVdXpzjFOjs7qayspLS0lIsXLxIXF0dcXNyc+wQZERERvP322zzxxBM899xz3H777fzrv/4rP/vZz9Dr9VfVz0X8H24aIRBCoNVqyc7OVqw8ExMTymd0dJTR0VGGh4fp7Oyc5hPQarXKwBwcHKS3txcPD48rTsEcHR2lsbGRmJgY9Ho9wcHBBAcH09PTQ3l5OeXl5VRWVhIfH09CQsKcA1mSJAoKCsjMzOTMmTN8+ctf5plnnuHgwYP89a9/JTEx8cq/qEXMwk0lBEIIQkND52wrSZKSedbf309fXx+NjY0AymDV6XT4+/sTEBBAYGAgnp6eCOEoEe7/UF1djd1un7Uh9vb2Jjs7m76+PsrKyigvL8disZCUlERsbKxTYauvr6e5uZm0tDTi4+M5cOAAv/zlL/nOd77DypUr+fWvf80DDzwwz29oEc5w0wjBlUAIgdFoxGg0Kvb47u5uXFxcWL16Nd3d3XR0dNDe3k5LSwsArq6uBAcHExoaSkBAwKyBa7fbqa6uJiAgwGnMkaenJ1lZWXR1dSmZbBaLhczMTIKCgqa1HRkZobCwEF9fX+Lj44FJte6RRx5h48aNfPKTn+Rzn/scR48e5fnnn1ey6RZx5bglhWAm5E1xVFQU7u7uuLu7K2mZQ0NDtLW10dzcTG1tLRaLBYPBQFhYGBEREfj7+yOEoKWlheHhYcVzrQZfX182btxIS0sLRUVFHDt2jJCQEDIzMzGbzYoaZLPZWLly5SyBy8zMpKioiM997nO8/PLLlJSUsHv3bgIDAz+U7+dmxzURAiHETuBZQAv8VpKkJ2ec/yZw/5RnJgH+kiR1CyFqgQHABlidubY/TAwODmK1WvHx8Zl1zmw2Ex0dTXR0NFarlba2NhoaGqirq6O6uhqz2cySJUtoa2vDZDIREhIyr2cKIQgJCSEwMJDKykouXLighFnodDqam5tJT093uqq4u7vzxhtv8NOf/pTvfve7LFu2jLfeemsxOvUD4KqFYAqBVg6TqZV5Qoh3JElSuIMkSfp/wP+73P4u4GuSJHVPuc1mSZI6r7YvHxQzPcXOoNPpCA0NJTQ0FKvVSlNTEzU1NZSWThLveXh40NXVhZ+f35z7BxlarZakpCQiIiIoKCigsLBQ8VTPJzPt0UcfJSUlhfvvv59Nmzbx0ksv8elPf3pez17EJK6Fn2AVlwm0JEkaB2QCLWe4j8mc5AWD7u5utFrtFTFI6HQ6IiMj2bRpE3LVnJGREY4cOcLBgwdpaGiYl39ChtlsZt26dXh5eSFJEv39/Vgslnnd44477uDMmTMEBgZy//3389RTT837uYu4NurQlRBouTJJ1/iVKYclYL8QQgJekCb5hRxdq0q+JecQnD9/XjGXTk2oMZlMip9gpo59JZ7imbBarTQ3NxMWFsaqVauora3l0qVLnD59Gg8PD5KSkggPD5/Xvevr6+nt7SU5OZnu7m4KCwtpaWlh1apVuLi4qF6bmJhIbm4uO3bs4Otf/zqNjY1KgtEi1HEthGDeBFrAXcDJGapQtiRJzUKIAOCAEKJCkqRjs244B/mWLARVVVXK746g0WgUZ5mHhweenp709PTwQWugNTQ0MD4+TmxsrMJPFB0dTWNjI+Xl5eTm5lJRUUFqairBwcFO1STZGuTn50dycjJCCCwWC+fPn2f//v2sXbsWf3+HJbcU+Pv7c/LkSe655x6efvppurq6ePnllxcFYQ5cCyFwRqzlCPcyQxWSJKn58s92IcSbTKpXs4RgLshJNZ/4xCcApjnLxsfHGRkZYXR0lKGhISWZpqOjQ4ntqa6upq+vT0mi8fPzm5dX1mKx4OHhMW2AajQaIiIiCA8Pp6GhgdLSUk6cOIGfnx+ZmZmz9h6SJJGfn4/dbp9mDYqNjcXPz49Tp05x9OhRUlJSSExMVN1vmEwm9uzZw2c+8xn+8Ic/MDIywmuvvYZWq73Sr/SWwbUQgjzmQaAlhPAENgKfnXLMDGgkSRq4/Pt24IfXoE9KqIRaiILdbqeyspLi4mIiIiIYHBzk0qVLXLx4ESEEfn5+BAcHExIS4tBKI1M8ZmZmOhyYQggiIiIICwujurqasrIyDhw4QHR0NKmpqUrf6urqaGlpISMjY9a+xMvLi5ycHPLz8ykpKaG3t5eVK1eqJvVotVpee+01XFxc+MMf/kBfXx+7d+9eDLVwgqsWAkmSrEKIrwD7mDSRviRJUpkQ4kuXzz9/uenHgf2SJA1NuTwQePPyANIBf5Ikae/V9mm+0Gg0jI6OotVqlRnYarXS1dVFW1sbra2tFBcXU1xcjIeHB+Hh4URERCgDtaqqCp1ON6cqpdFoiI2NJSIigrKyMqqqqmhsbCQzMxM/Pz9FDXJmDdLr9axZswYvLy9KSkoYHBwkOztbNeVTo9Hw9NNPMzw8zN/+9jc+9alP8cYbbyyqRg5wy6dXHjlyBLvdztatWx2eHxoaorm5mYaGBjo7J624fn5+hIeHc/78eaKiolix4spcG319feTn59PV1YXRaGRiYoIdO3bMyzrV3NzMmTNn0Ov1rF+/Hi8vL4ftent7OXr0KBqNhrfeeouXX36Z++67j1dfffWWFIRF8i0nkEOvnQ0kmDRdxsXFsWXLFu68807S0tIYGxujsLAQu92OzWZjaGjI6fWO4OnpyebNm4mIiGBsbAyYVK3mMyGFhIQoAnv48GHa2tpmtZEFQKfTsWXLFn77299y33338dprr/GlL33pivp6K+CWFoKBgQGsVuu8w6ddXV1JTExkx44dmEwmDAYD9fX17Nmzh9OnT9Pb2zvvZ4+OjtLS0oK3tzc+Pj7k5uZy5syZeeUdeHp6snXrVsxmM8ePH59G6jVVADZt2oSbmxsajYZXX32Vu+++mxdffJEf/OAH8+7nrYBbOnZI9hQ7CpdQQ3t7OyMjI6xevRp/f38uXbqExWKhoaGBkJAQUlJSVFeXqdagNWvWYDabuXjxIqWlpXR3d5OVlTWnYLq6urJ582ZOnDjBmTNnsNlseHl5zRIAGRqNhr/85S9s3ryZH/zgB4SFhfGFL3zhit77ZsVNIwSyarJr1y6EEOh0umllm2TWOTmJxtXVlZ6eHjQazRUzzVVVVWE0GgkLC0Or1ZKenk5iYiJVVVVUVlayf/9+IiMjSUlJcVjco6amhtbW1mnWoKSkJPz9/Tl9+jSHDh0iMzNzzhxlg8HAhg0bOHHiBGfPnlXyImYKwNT2e/bsYc2aNTz00EOEh4ezffv2K3r3mxE3jRAIIdBoNAQFBSkCMTExwdjYGH19fYyOjk7TufV6PZIkodfraWxsxNfXd17VaIaGhmhpaSEhIWGa7d1oNLJ06VJiY2OpqKhQLEAJCQkkJiYqJs3h4WHOnz+Pv7//LGuQn58f27dvJzc3l4KCAvr6+sjIyFDdyMq1Eg4fPozNZiMmJkaVPc/T05P9+/ezcuVK7r33Xs6ePUtsbOyc730z46YSAtnU6Qh2u52RkRGGhoaURJrq6mpsNhtnzpwBJjfBAQEBBAUFERQU5NCuXl1dDeB0ljYajaSnpxMXF0dxcTHl5eXU1taybNkygoODpznFHPkWjEYj69ato7i4mMrKSvr7+8nKynKaL9Db28vx48dxcXHBbDZTUlKC2Wx2GFoiIzw8nDfffFPZ7Ofl5d3SlXduGiGYCxqNBrPZrAz0gYEBLBYLy5cvx8fHh87OTtrb22lsbKSmpgaNRkNAQADh4eGEhoZiMBiw2WxUV1cTHBw856rh6urKmjVriImJoaCggBMnTuDt7U1PTw+ZmZmqs7VGoyEjIwMvLy/y8/M5fPgwGzZsmOUXmLkJdnFx4fjx4+Tm5qLT6VTDuteuXcuvf/1rvvCFL/DpT3+a3bt335KmU7iFrUNTN8Xe3t7ExcWRnZ3N3XffzebNm4mNjWVgYIC8vDzeeecdTp8+TXl5OWNjY1ekPvj7+yv8RD09PQghMJlM87o2KiqK9evXMzIywqFDh+jr61POObIC6XQ6JRL19OnTdHV1qd7/n/7pn3jooYd47733+M///M95v9PNhltaCDQaDZ6entOOazQa/P39ycjI4Pbbb2fr1q1ER0fT1tbGhQsXEEIwMDDgkOHaGTQaDf39/Wg0Gtzd3Tl16hR5eXkKY50aAgMD2bx5M5IkceTIEXp6ehwKgAzZiebi4sKJEyfm5Ct65plnWLFiBT/4wQ84duyKQ7ZuCtzSQuDp6amqAggh8PX1ZdmyZQqFuslkorCwkN27d1NaWqo4u9QgW4PS09PJyckhMTGRmpoaDh06RH9//5zXe3l5sWXLFnQ6HUeOHOHw4cMOBUCGi4sLGzZsAOD48eOqvge9Xs/rr7+Ou7s79913H93d3U7b3qy4JYVA9hRfCcdQbW0tGo2GnJwctm7dSkBAAOXl5bz77ruUlpY6XRmGhoYoKirC39+f2NhYtFotaWlpbNiwgdHRUQ4ePKgwXajBzc2N5cuXY7VasVqtZGRkqO4r3N3dyc7OZnh4mNOnTzsNLYfJ/IyXX36ZlpYW/umf/mnuL+Mmw00jBHIOQXt7O52dnfT19TE8POyQBnFwcJCJiYl5C8HExAR1dXVEREQoDBXZ2dls376doKAgysvL2bNnD1VVVdMGm+wUA2ZZg4KCgsjJycHDw4NTp05RVlamGjbR29tLbm6u4vPIz8+f00Pt5+fHsmXLaGtr4/z586ptP/axj/H5z3+et99+m5dffnk+X8tNg5vGOiQz0B09enTWOYPBMI11Tp615ysEMuP0TLOol5cXWVlZdHd3c/78ec6dO0d1dTWZmZn4+/tTXV1NW1sby5Ytczhry17f/Px8ysrKGBgYYOXKlbNi/2fuAWAy8O/9999n69atqitCdHQ0fX19XLp0SQn8c4b/+q//4ujRozzyyCOsX7/+lvEf3DRRpOnp6UiSxIEDB5RSTePj44yNjc2qTCO/s16vV5JoAgMD8fLymmW7lySJffv2odVqycnJcdonSZJobGzk/PnzDA8PExERQVNTk0KvopYII0kSFRUVlJSUEBAQMM0v0NPTw/vvvz9rD9Df38/hw4cxGAxs2bJFNf3Sbrdz5MgR+vr6yMnJcegTkCSJuro63nrrLb75zW+yfv16Dh8+7PSeNxrUokhvGiGYbyi1zWbj8OHDjI2NERQURGdnp7I5NZlMBAcHEx4ejr+/PxqNhvb2do4ePcrKlStZsmTJnH2zWq2UlpZSWVkJwKpVq+adullbW0teXh6enp5s3LiR4eFhhwIgo6uri6NHj+Lp6cmmTZtUE22Gh4fZv38/JpOJbdu2TVttRkZGKCgooLm5GT8/P/72t7/x3HPP8fvf/57Pfe5z8+r7QseiEEyBJEm8/fbbhIWFKXkAIyMjCsFWa2srVqsVFxcXIiMj6evro7u7mzvvvHPOEk0yLBYLBQUFGI1GxsbGiI+PJzU1dV4pji0tLZw6dQqTycTY2Bh6vd6pFQigqamJkydPEh4ezpo1a1RXnJaWFo4fP058fDwZGRlIkkR9fT2FhYXYbDZSUlKIi4tjbGyMpKQkhoaGuHjx4hUHGC5EfOj5BEKInUKIi0KIKiHEtxyc3ySE6BNCFF3+fH++115rDA0NMT4+Pm0/YDKZiIqKIisri4997GOsXbsWX19fKisraW1tRafT0d7ePq94/6GhIc6fP09AQAC33347sbGxVFZWcuTIkXnlHQQHB5ORkaEQgq1du1ZV5w8NDSU1NZWGhgYuXLgw571jYmKorKykoaGBkydPkpubi7u7O9u3bychIQGNRoPJZOKFF16gq6uLf/u3f5uzzzc6rloIppBv3QYkA/cJIRxVnzsuSVLG5c8Pr/Daa4a5iLZ0Oh3h4eFkZ2crAW42m40TJ06wb98+6uvrnZobZ1qD9Ho9y5YtIysri4GBAQ4cOEB7e/uc/SspKcFoNCKEID8/f05fRGJiIhEREZSWlircqc6QlpaGi4sLp0+fVnwXmzdvnrVP2LFjB3fffTevvfbaTV8t56Mg37pW134gOPMUz4TdbqehoYHAwEDuuusuVq+epFI6c+YMBw4coLm5edbKIFuD0tLSpsUWhYWFsW3bNlxcXHj//fepqqpy+Mze3l5lD7B161bWrVvHwMAAx48fV/UuCyFYsWIFnp6e5ObmMjw87LDdyMgIubm5jI6OApOriDz7O8Ivf/lLjEYjDz/8sKqf4UbHtRACR+RbjvjR1wohzgsh3hNCLL3CaxFCPCiEyBdC5Hd0dHzgzsqe4rn08+bmZkZGRhTq9MjISHbs2MHatWuVleH9999X4nmmqkGOIkzd3d3ZsmULQUFBnDt3juLi4mlC5CgUIigoiDVr1tDd3c2ZM2dUB6JOpyMrKwu73T7LOSbr/vv27aO1tZW0tDTi4uJoaGhA7bsMDw/na1/7Gnl5efzhD39Q/b5uZFwLIZgP+dY5IFKSpHTgV8BbV3Dt5EFJ+o0kSSskSVrhiIRKkiSsViv5+fmcO3dOsdA0NDTQ1dXF2NgYdrt93p5ii8Wi0LErLyoE4eHh7Ny5k8zMTHp7e9m/fz/FxcWcPXsWmO0UmwqDwUB2djbR0dFUVFSQl5enMGI7iwUKCwsjMzOT5ubmOTf97u7urFixgq6uLmV/MDo6yqlTpzhz5oyi+ycmJpKamorZbCY/P1+1rtr3vvc9QkNDefzxx68oXupGwnUh35IkqX/K73uEEP8thPCbz7XzhVyzrLm5WUmomQmDwcD4+DjDw8OKOdBRnP7AwABtbW2kpKQ4VBU0Gg1xcXFERERw/vx5KioqgEndfK4Qa41Gw/LlyzGZTJSVlTEyMkJ3d7eqFSguLo6hoSEqKyvx9PRUzTiLiIigpaWF8vJJPuRLly5htVqVQh/y++h0OpYtW8bx48e5dOmS06o3RqOR7373uzz00EM888wzfPOb31R9vxsRV20iFULogEpgK5PkW3nAZyRJKpvSJghokyRJEkKsAv4GRDLJU6R6rSPMx0QqV6ORHWUDAwM0NzfT0dGBEAJJkhBC4OPjoxTlljeHhYWFWCwW7rzzzjk5QAcHB9m7d6/yzOTkZJKSkuYVm19UVERlZaXiiFNL87Tb7Zw4cYK2tjY2bdqkSsk4MDDA/v37lRpsq1evdnrvEydO0N7ezm233eY0xNtut5OUlER3dze1tbXzysBbaPhQTaSSJFmZJNjdB1wA/iKTb8kEXMAngVIhxHngl8C90iQcXnu1fYL/q0bj5eVFSEgICQkJ+Pr6IoTgYx/7GJs2bSIxMRG73U5JSQnvvfce+/fvp6KigpqaGkJDQ+cUANkapNFo2LZtm0KudezYMWXz6Qw9PT3U1tai1+ux2WyUl5er6vwajUZJyj9z5ozD+8u6/6FDh5R7qVXOgcnJw263U1xcrPrsJ554gs7OTn7+85+rvteNiFvKWfb+++8zNjY2K7l8eHiYxsZG6urqFBNqcHAwaWlpqlakqqoqzp07x/Lly4mJiUGSJGpqaigsLMRoNJKdne1w/zEzFKKhoYGSkhJiYmJYtmyZqsOrp6eHQ4cO4e/vz4YNG5S2o6OjnDt3jsbGRnx8fFi1ahUVFRXU1dWRk5Ojyn5RXFxMRUUF27dvd9rObreTmppKR0cHDQ0N867AuVCwSL6Fevi0q6sr8fHxbNu2DXd3dwwGA+3t7ezbt4+TJ086jNYcHBykuLiYwMBAoqOjgcnVJzo6mi1btgCT5FhNTU3TrptqBpX3AElJSSQkJGCxWJT9hTN4e3uTmZlJW1ubEprR0NDAvn37aG5uJjU1lS1btuDh4UF6ejoGg4H8/HxVR19iYiJ6vX7O1eDb3/42HR0dPPvss6p9vNFwywjB8PDwLE/xTHR3dzMwMEBKSgp33nknycnJtLe3s3///mn2d0mSFAfSihUrZs3c3t7ebNu2DU9PT06dOoXFYgEcm0FlpKWlERERQUlJyZz5BdHR0YSEhFBSUsL777/P6dOnMZvN5OTkTNuPGI1G0tLS6O7upq6uzun9DAYDSUlJtLa2qjrzPvOZzxAbG8uzzz6ralG60XDLCMF8SjLJBLuRkZEYjUZSUlK44447SEhIoKGhgb1791JRUcGlS5fo6OggPT3d6SbRxcWFTZs2ERQUREFBAUVFRU4FACZXkZUrV+Lj48PZs2dVcwXkemd2u12xYm3ZssWh6hYVFYWPjw/FxcWqJs7Y2FhcXFwUq5IjyNUzm5ubefXVV522u9FwSwmBXAvMEcbGxmhoaCAyMnIa1YrBYCA9PZ2dO3cSEGOHIf8AACAASURBVBBAcXExRUVF+Pj4KGqQM+h0OrKzs5XifJIkqQbDycXI9Xo9p06dcpgWKdv98/PzFQGUazM4ghCCzMxMRkdHuXjxompfExISlKQkZ/jiF7+In58fv/zlL9Ve/YbCTSMEclGON954g3feeYf33nuPI0eOkJubS3l5OS0tLbi7uzv1FNfU1GC3250mkri5uZGVlaVYWnp7e7l06dKcQXX9/f1KTbSJiQmam9XdICaTiTVr1jA0NDRLl29sbFR0/5SUFHbu3EloaChlZWUMDg46vaevry/h4eFUVlaqWq1iYmIwGo2qq4HRaOSzn/0s586d48CBA6rvcqPgphECmYFO1pc9PT2RJIn29nZKS0vp7e2lv7+fXbt2kZubS319vTLTSpKExWLB399f1RpksVjo7+8nPT2doKAgioqKOHXqlFM1Q94D6PV6cnJyCA0NpaioSFU/h0maltTUVIUDaWxsjNOnT3Pq1ClcXV3JyckhOTkZrVarFAiZy5uckpKimGKdQafTERcXR2tr6yx1TJIkOjo6OH36NGlpaej1en71q1+pPvNGwU2TXikz0Dkqpt3X18e+ffsICwsDJuPq6+rqFNpGT09PhoaGSE1NdXp/2RoUFBREfHw88fHxSpWbQ4cOsW7dumlqjqNN8Jo1azh27Bh5eXmYTCYCAgKcPi8hIYHW1lYlzshqtSrlmqaqPq6uriQlJVFSUkJraytBQUEO7+fu7s6SJUuorq4mKSnJqWMsJiaGCxcucOnSJVauXKnkV1ssFvr6+tDr9WRkZLBlyxaFLeNKuVwXGm6alUANMvdOQkKCkjOwZcsWYmNj6enpUeJsBgYGHOrhsjVIo9Eo1iAhBAkJCWzcuJHR0VEOHTqkkF05swLJOr+bmxunTp1SVWHGx8fRarXY7XaliEhycrJD3T8+Ph43NzeKiopUHW6JiYlIkqS6NzAajURFRVFXV8fZs2fZtWsX586dUyJV77rrLjIzM3nooYcYHh7mxRdfdHqvGwW3hBDIm2JZ1dFoNPj5+ZGRkcHGjRuBSR7SsrIyhU9oqooz1Ro0kwoxICCArVu3otPpOHr0KFVVVapWIDmIDuDkyZMOQ6QbGxvZu3cvbW1thISEYLVaFeuWI2i1WlJTU+nv71dVtdzc3IiMjMRisTjcG9hsNhoaGuju7sZut1NXV0doaChbt24lJyeH6OhoJbvuzjvvJDQ0lFdeecXp824U3DJC4OHh4TA9sra2FiEEmzdvnkWhUlNTw8DAACUlJQQFBTnNMXZ3d2fr1q2YTCZl1lSzArm7u7N69Wr6+vooLCxUjo+NjXHmzBklvXLbtm1kZ2fj7+/P+fPnVTe1YWFh+Pj4UFZWpmrDT0xMVDhVZQwPD1NaWsq7777L6dOnGR8fx2w2YzKZWLVqlRJuMhUajYZPfvKTlJSUzOngW+i46YVAzVNss9moqakhJCQEV1dXhUJFpjHJy8vj4MGDiiqgFs4wMjLC2NgYQggmJiYYGRlR7VdwcDBJSUnU1NTQ0NBAU1MT+/bto7GxkaVLl7Jt2zaF/UIm3SotLXV6PyEEKSkpDA8PU1tb67Sdh4cHQUFBVFVV0dLSwsmTJ3n33XcpLy/H29ub9evXc/vttyv3UnOeyUU+fvvb36q+60LHTS8E8uB0JASNjY2MjY3NCk329fVly5YthIeHMzExgd1up7W11ak5VI4F0uv1bNmyBbPZzIkTJ6YR6DrC0qVL8fLyIjc3l5MnT+Li4sK2bdtYunTpNN3fw8ODuLg4qqurVZ1ogYGB+Pj4UFFR4XRvMD4+jqurK6Ojoxw/fpzOzk4SEhK4/fbbWb9+vVJwPCwsDIPBQE1NjdPnpaamkpiYyNtvv636ngsdN40QjI2N0dPTwyuvvMI777xDfn4+/f39qp7iqqoq3NzcCAwMnHVucHCQ5uZmAgIC8PX1JT8/n7y8vFmqxsxgOF9fXzZs2IBOp+P48eOqKkxLSwvDw8PY7XZFpXLmzEtOTsZgMKjG9wghSE5OZmhoaFodM7mfeXl57Nq1i+rqaoUcWC5GOFN102q1hIeH09TUpOpp/vjHP05VVdU0te5Gw00jBGazmfr6ev7xH/+Ru+++m5UrV+Lp6UlGRgY/+9nPeOKJJzhw4IAyiHt6eujq6iI2NtYh4ZZsDVq1ahUbN24kOTmZ2tpajh49qgxsR8Fwcl/WrVvH2NgYp06dmiU4su5/8uRJXF1diYuLY2BgQDVJ3mAwkJiYSGtrq2pKZHBwMB4eHlRWVmK1WqmtreXgwYMcOHCA+vp6IiMjycnJYenSpQwMDKgyYERERGCz2VQdfJ/97GRt9j//+c9O2yx03DSh1HJUpbwiNDc3U11dTWlpKRUVFco/Uq4QL4cX33333bOyyyorKykqKppFuNXY2Ehubi4mk4nMzEylGIazTXB9fT1nzpxReH5gMndZZpBISkoiOXmSXOPgwYOMj4+zc+dOp/xGVquVPXv2KORczlBeXk5paSk6nQ6r1Yq7uzsxMTFERUUp7zoyMsLu3buJj48nPT3d4X0kSWL37t14eXkprNyOsGTJEjw9Pa+4hvT1hFoo9TVxlgkhdgLPMpkp9ltJkp6ccf5+4NHLfw4CX5Yk6fzlc7XAAGADrM46OhcCAwMdqjUy6uvreeutt3j77bd5++23+etf/0psbCwdHR388z//sxIvJFuDgoODZzHHhYWF4eLiwrFjx5QSSWpWoIiICDo7O6msrMTHx0dx0nl6erJ+/fppKlpmZiZHjhyhoqKClJQUh/eT43vOnz9PV1cXvr6+yjl532KxWJQVRa/Xk52dTUBAwKzVzmQyERQURH19PWlpaQ43/XJOdVVVFRMTEw7LVwHk5OTwu9/9jubmZtXqOAsV14t3qAbYKElSGvAj4Dczzm++zEf0gQRgPoiIiOCrX/0qhw4doq6ujq9//euMjIzw8MMPExcXxx//+EfsdruiBi1fvtzhwJg6S8uhGmqQfQtnzpyhrq6O5ORktm3bNmuP4u/vP6/4nujoaAwGg+LwGh0d5cKFC+zZs4cTJ07Q09NDcnIyUVFRjI2NOeRXnfqdjIyMqAbMTY1WdYZPfOIT2O123nzzTbWvYsHiuvAOSZJ0SpIk2dtzhsmE+o8MQUFB/PznP6empob/+q//YmJigs9+9rOsW7eOyspKMjMzZznF4P88wQaDgaysLCYmJjh+/LjTjeP4+Dj5+flKHoJc49hZEN984nv0ej3R0dE0NjZy4sQJdu/erRTrW7t2LXfeeScpKSnEx8djt9upr693eq+QkBC0Wq1qG5mMQG1fsHnzZlxdXTl48KDTNgsZ15N3SMYXgPem/C0B+4UQBUKIB51ddK14h6ZCr9fz8MMPU1VVxb//+79TUFDAN77xDfbt2zer7cxQiLCwMLKysujv73fICdTc3MzevXupr68nOTmZhIQEmpubVe3u7u7uREVFUV1d7dDPYLVasVgsyoBsbW0lOjqanTt3snnzZsLDw5WVycvLCy8vL9UBrtfrCQoKckgkJkOOr2ppaXHaxmAwsGzZMqUK6I2G68U7NNlQiM1MCsGjUw5nS5K0jEl16mEhxAZH187FO3Q1MJlM/OIXv+DIkSOEhobypS99iQceeECZ4Z3FAgUFBZGZmUlLS4sSfzQ+Ps7Zs2c5ceIERqORbdu2kZKSwtKlS3F1daWwsFA1vicpKQlJkpTUSZgMxy4sLGTXrl0UFBQoISByMJuzALbw8HC6urpUY5RCQ0MZGRlR9T8EBQUxNjam2mbDhg20traqrmILFddCCObFHSSESAN+C9wtSZJSVlGSpObLP9uBN5lUrz4SZGVlUVRUxN///d/zP//zP2zZsoXGxkbVWKCYmBgiIyMpKyvjwoUL7Nu3j7q6OpKSkqbp/jqdjvT0dPr6+uaM7wkLC6O6upq6ujqOHj3K3r17sVgsBAcHs2XLFrZv305qaipjY2Oqaopcy1gtXTMoKAghxKxc6KmQo13VVrGdO3cC3JA5BtdCCPKAOCHEEiGEAbgXeGdqAyFEBPAG8A+SJFVOOW4WQrjLvwPbAeexAdcBJpOJ//3f/+Wxxx7j5MmT7Ny5E6vV6tQKJIQgNTUVnU5HSUkJer2erVu3OqRin098z8jICDqdjomJCXJzcxkcHCQ1NZU777yTNWvW4OfnhxCCoKAgjEajqrpjNpvx9vZWFRQXFxe8vb1VN76urq64u7urtlm9erVC9Huj4apNpJIkWYUQMneQFnhJ5h26fP554PuAL/Dfly0Vsik0EHjz8jEd8CdJkvZebZ+uBX70ox9hNBr5/ve/z09/+lPuuOMOh+1aWlrIz89XokEDAgKc8vnL8T3Hjh2jtrZWCdeQE1aqqqpoampCkiR0Oh0Gg4HbbrvN4UZao9EQHh5OTU2NqvkyJCSEsrIyRkdHnfIoBQYGUlFRoXoff39/GhoaFNKymZCdeTei5/iaeIwlSdojSVK8JEkxkiT9+PKx5y8LAJIkfVGSJO8p1OwrLh+vliQp/fJnqXztQsFjjz3GT37yE/Lz8/nUpz41TZeXdf/jx4+j1+vZtm0b0dHRSvKJM8jxPRcvXmRsbIxLly6xd+9ejh49Snt7O/Hx8dx+++2kpaUxPDyseq/w8HBsNpuqp1nmUlWbxQMCAhRBdAZfX18mJiam1UW22Wx0dXVRVVVFXl4eMTExWCyWOQsKLjTcNJllHxa+9a1vUV9fz3PPPcf3v/99nnjiCWX2Hx0dVby+Wq0Ws9ms1C2T6wjPhBCC0NBQSkpK2LVrF3a7HR8fH1auXEl4eLjih4iIiKCoqIj6+nqnK4uvry9Go5Hm5mZF/58Jb29vDAYDbW1tREZGOmzj5+eHRqOhs7PTqbNLjmm6cOECGo2Gnp4e+vr6FIuRvBLYbDby8/PZtm2b8y91gWFRCOaBX/3qVxQXF/Pkk0+yZMkS3N3d8fDwIDs7e9oANRqNJCYmUlxcTEdHxzS+UJvNRmNjI1VVVUoGmsxS52iQGwwGgoKCaGhoID093aEKotFoCA4OpqmpCbvd7tBxJ4QgICBAqbTj6D5arRYvLy+lX1arld7eXnp6epSPvCLV1dVhNBrx9vYmODgYb29vvL29cXV1JSIigh//+McUFhYuCsHNBq1Wy1//+leWLl3K448/zptvvklmZqZDXT02NpaLFy9SXl7Oxo0bGRoawmKxKAnzbm5upKenMzw8TFVVlSrfaXh4OM3NzXR3d08LkZiKoKAgamtr6e3tdbpi+Pv709jYyMjIyCwnoDzgtVotnZ2d7N27l4GBAWWGlwd8SEgITU1NSkFzR8KUmpqKwWCYs2byQsOiEMwTwcHB/OQnP+HLX/4yr776qlL0byZkxobS0lKOHDmihCQEBwcTGxtLYGAgQggGBga4dOkSDQ0NJCQkOLyXbL5sbm52KgSy+bKtrU1VbYJJE6fZbJ42w08d8PB/bHTyDG8ymZQBPzExoZqwo9friYqKuuEyzRaF4ArwpS99iT/96U+88MILfO1rX5ulY4+NjVFTU6OkLnZ3d5OYmEhMTMysGdjd3R1vb29VITAajfj6+tLa2uqUCcPFxQV3d/dZ8T8TExOKStPd3Q2gFBKRr/P29iYsLAxvb2+0Wi3Hjh0jNjbW6f7C09MTq9XK0NCQ06DBqKioxZXgZsezzz7LypUrefTRR/nzn/+MJEl0d3dTVVVFQ0MDdrsdPz8/XF1d6e3tJSkpyWlodGhoKKWlpYyMjDilQAkICKC8vJzx8XGHBUVgcvPb0tJCRUWFMvCnWnFcXFzQ6/UYDAYyMzOVGX4qbDYbQgh6e3udCoHsmR4YGHAqBNHR0Rw4cED1nRYaFoXgCpGZmcltt93GG2+8wenTpxkcHKSnpwedTseSJUuIjY3F09OTtrY23n//fVpaWggPD3d4r5CQEEpLS2ltbXWaxC8LQVdXF8HBwYyPj8/atMoDvri4GJPJhLe3NxEREdNUmry8PJqampT0yZnQarW4u7srhc0dQR74amEYsbGxSJLEhQsXWLZsmdN2CwmLQvAB8J3vfIfdu3fz7LPP8o//+I8sW7ZsFoepv78/Li4uNDY2OhUCT09PjEYj7e3tDoVgfHxcccKdP3+ewsLCaQNQHvB+fn7U1NTMSgKaCi8vL2pqahgdHXU6Q7u5uakOcBcXF7RarWqb+Ph4ACoqKhaF4GbG2rVrSUtL4+jRo/zxj3906tENDg6msbFR1Xzp7+9PR0cH4+Pj02b3np6eaYNtcHBQSfSRZ3jZsmS1WqmpqXFauhVQSlENDg6qCkFbW5tTU6oQAjc3N9WUTFkI56KXX0hYFIIPiAceeIBvfOMb7N+/n9tuu81hm6CgIGpqauju7sbPz085LkdkyvUQhoeHeeutt5TzZrMZLy8vhVa9urqarq4uhbRrJnQ6HWazedo+YCZkBuvBwUGn9c7c3Nyw2Wyqq4XJZFKlk5GFYC7i4YWERSH4gHjggQd49NFH+ctf/uJUCOTBVlNTQ0dHhzLDT51J5dk8KipK0eNnlkLq6emhsbFRdXM8lypjNpsRQqi2kQe+2qbWxcVFNZTDbDbj5uamGsqx0LAoBB8QMnP08ePHlWNykn93d7cy4AGFu0eO6oyOjsbHx0dJfXzrrbcUUixHkFWZoaEhp0Igh2w4g0ajwWg0qqZuygI5V5vR0VGnKhNMWquuVeLT9cCiEFwF1qxZwwsvvMB7772HzWabppO7ubnh6+uLXq9nbGyMnTt3Oh3ARqNRVZWZapVxVmnHZDIxPj6OzWZzmr45lyozdSVwBqPRiCRJTExMqK5Kau+z0LAoBFeBzMxMJEmioKCArVu3Ehsbq8zw8gApLS3lwoULTgcmMOdmcz6DU24zOjqqWkJKbZaX+6xGtiVbwNSEYC5T60LDohBcBeQqlWNjY6xdu9ZhGzc3NyRJYnh4WFFrZsJkMqnq2QaDAY1GozqA5X3E2NiYUyHQ6/WqM7RWq1W4VNX6AuqC4u7ufkNtjG8aBrqPAtHR0bi4uCjVKR1hPrP4XDO0EAK9Xu+wdoKMqTO0Whu18/N5juz9VruPq6vrnITECwnXRAiEEDuFEBeFEFVCiG85OC+EEL+8fL5YCLFsvtcuZGg0Gnx9fVUTVqbO0M6g1+uxWq2q9c/mGsCyEDiqdyBDTttUg1wYRO08oNpGfp8bBdeLfOs2IO7y50HguSu4dkHDw8NDVZWZzwyt0+mQJEm1roBWq1U9Lzvj1AanRqOZs9CgRqNRvYdsEZpLCG6kOsfXYk+gkG8BCCFk8q2p3Bt3A3+QJv8DZ4QQXkKIYCBqHtcuaAwODtLa2jqL3n0q5EHhbOBoNBq0Wi3f/va3nd5jLkERQqDT6eb1nG99y/mCO9/nqK1cal7phYjrRb7lrM28ibs+DPKta4GhoaE59V95w+kMctFBNajVKl5oz5mYmFCy1G4EXIuVYD7kW87azJu4S5Kk33CZw3TFihULhko7MTGR9vZ2Ll265PD8xMQEb775JmlpaSQmJjpsY7FYKCgo4K677nI6gx45cgSYpDx0hOHhYXbv3s3y5cudrkoVFRUUFxfzd3/3d07Du/fv34+rqyvr1q1zeL6/v5+9e/eyZs0apyHXX/7yl3n++edVfRYLCdeLfMtZm3kRdy1kyKqBM8gzppouPh89Wwgxp74/1z3kNnPtPebaXIP6HkdOylcj61pIuC7kW5f/fuCylWgN0CdJUss8r13QmMsSMp/BOR+Li6zvq50HdevQfJ1h8/ETqFm7QkMnNdq5ipYvFFy1EEiSZAVk8q0LwF9k8i2ZgAvYA1QDVcCLwENq115tn64n5rLxy5vR+Xph1dqonddqtWg0mnk9R80PMNf76HQ69Hq96j5ITheV+VkXOq6Jx1iSpD1MDvSpx56f8rsEPDzfa28keHh4qIY8wPxt/Ffr6DIYDKoz9JUEyDnLgYDJYL2Z7zw1403GyZMn+fznP+/0WQsFi2ETVwlPT09GRkZUKQznKwRqM7TRaJwzQG6uWXw+3muz2YwkSYyMjDgNvzCbzXR1dVFRUeEwAcjV1ZXg4OBbayW4leHp6QlMxvzL9CczYTAY5lRBYO4ZGiZ1cUcFRGDuKFEXFxc0Go3qyiXHNw0MDGA2m1Uz3oqLi3F1dcXb23tWxtvy5cs5evSo6uSwULAoBFcJeeA3NjY6FQKTyaQaVWk0GhFCzDvM2ZkQmM1mOjs7ncb6azQazGaz08SasbExRZ06f/48BQUF0wRGznjz8/OjtraWtWvXOs2f3r59O7t37+bw4cPs2LHD6XstBCwKwVVCtpXX19c7TSx3cXFRjS8SQuDi4jKnmgKTzjlnRFxubm5MTEwwPj4+KztNhhzmIScATf1MHfByTnN0dLQyw8v3nJiYoK6ujt7eXqdC8OlPf5qvfe1r/OlPf1oUgpsd8iCYWTx7KkwmExMTE1itVqdOKkebzZnnAdVQaFmV6e/vn5ZHPDXjrb+/n4GBgWlV6GdmvFVWVtLf309WVpbD5+j1enx8fFRJwQICAli9ejW7du1SpYVfCFgUgquEXOZVLbVx6iwu7yFmws3NjdbWVqf30Ol0mEwm1Rxh+d719fXTcpqnZrzJgzEmJkZhn5uZHNPb20tLSwvDw8NOVa/g4GBKS0sdtunr68NisZCdnc2pU6d4/vnneeSRR5z2+6PGohBcJfz9/XF1dVXl6JzK9KAmBKOjo6qrhaen57SI1dHR0Wk5zTLdopzf4O7ujq+v77SMN5vNxq5du3Bzc3Na93lqeaaZtZxlhIWFUVpaSmNjo1Ips6mpCYvFQnt7OxqNho997GO88sorvPjii3z1q1+dMybpo8KiEFwlNBoNISEhqkIwH+Y2meKwv7/fIbHuyMgIWq2Wvr4+jh8/Tm9v77Q9hLu7OwEBAXR3dyNJEtu3b3dqlXFzc6Ojo8MpB6qXlxcuLi40Nzc7FQIPDw+F0Gt8fJzq6mpGR0dxdXUlNTWVJUuW4OLiwkMPPcTjjz/OX/7yF+69916n7/9RYlEIrgEiIiJUmZiNRiMGg0FVn5dXiN7eXkwm0yzWiqnm076+Pvz9/fH29lZmeHnAyznNaggICFB4U52RgoWEhFBfX+9wZZIkSal30NfXR19fH8HBwcTExBAUFDTtnt/85jd57rnneOyxx/jEJz6xIM2li0JwDbBkyRLef/99Vb6emaoMoDil5AEvhKCwsJD8/HyljYeHB4GBgYr9/cyZMyQlJTmNFPXz80OSJLq6upxSuAQGBiqEXs6IuOQKmq2trYSFTdZeHx8fp7a2FovFwsDAAHq9XhEYZ8Rgrq6ufP/73+fhhx/mscce46c//anDdh8lFoXgGiAtLQ2bzca5c+ecDgZPT09qampobGycRqgrz/BCCDQaDTqdjtTUVHx8fJRaxTIkSaKwsJCuri5VIdBoNLS1tTkVAnm2bm5udioEAQEBuLi4UFtbi9lsxmKxUFdXh81mw8fHh1WrVhEeHk5xcTFVVVUMDQ059TB/8pOf5MUXX+TZZ5/l7//+71m+fLnT7/KjwKIQXAOsXLkSgIKCArKzsxV2iak2+M7OTmw2G6dOnUIIoZBtyTZ4Ly8vSktLqaqqIiYmxmFohBACX19fVWIrnU43Z96zXq9XqtekpaU5dKxJkqSUf21ubkar1RIREUFsbOw07qOEhAQsFgvl5eXK9yDDarVSVlZGZWUlX/nKV/jqV7/K/fffT2Fh4YLKPFsUgmuAZcuWodVqOXXqFBkZGfT09CieV3nABwQE0NzcTHJyMomJiQ4tQL6+vlRWVtLb26tamaa5uVl15g0ODqa4uFi1TUREBHl5ebNKQc0sLwWT6tPatWsd8gy5uroSExNDVVUV8fHxyt6mq6uLs2fPMjAwQHR0NOnp6dhsNv7lX/6Fz3zmM7z++usLxlq0KATXAEajkbCwMC5cuMDIyAghISF4eXkpKo1Op8Nut/PWW28xPj7u1AQqD8bOzk6nQiCbNdva2oiOjnbYJiQkhOLiYpqbm4mLi3PYJjQ0lIKCAurq6pRi3lVVVbS0tCh6fmxsLJWVlXR3d6sO2OTkZGprayksLGTdunXK7G8ymdiwYYOilj344IOcPXuW3/3ud3zjG9/gqaeecnrP64lFIbhGyMjI4MCBA2zbts0pVbuPj49q7u3UyvHOzJceHh6YTCZaWlqcCoGHhwceHh40NjY6FQKDwUBISAg1NTU0NzczPDyMi4uLsumWHWAajYYjR45QXV2t1B6YCaPRSEpKCoWFhezZs4fR0VFl9p9pDXrhhReor6/n6aefRpIknn76aaffx/XCwliPbgJkZWUxPDzMuXPnnLbx9fWlt7dXNaw6ICBA2T84gjxLt7a2qmaahYeH09HRMatmgWw5ys3NpampCZvNhkajYe3atdxxxx2kpqZO8wD7+/vj7+9PRUWF06y1qTyso6OjrF69mhUrVjg0h2o0Gv7jP/6DFStW8Mwzz/DII4+oZtRdD1yVEAghfIQQB4QQly7/nMUWK4QIF0IcEUJcEEKUCSH+bcq5x4UQTUKIosuf26+mPx8lZErGw4cPO20TGBg4Z+X4oKAgrFbrrEJ8UxESEoLNZlPd/MqBfXKKo9Vqpbq6moMHD3Lo0CGamppYsmQJbm5uaLVawsLCnOYppKamMjo6SmVl5axzXV1dHDhwgIsXLyr3qKmpcTiwJUlSyka98MIL7Nixg2effZZ77rlHtcDIh42rXQm+BRySJCkOOHT575mwAl+XJCkJWAM8PINg62lJkjIuf27YDLPMzEzc3NymUbXPhK+vL1qtVnXwBgQEKOZLtTYGg4H6+nqnbdzd3fH398disXDu3Dl27dpFfn4+drudZcuWcdddd7FixQoSExPp6+tT7ZOfnx8hISFUVFQoXmqbzUZxcTGHDx9mYmKCDRs2kJWVxbJly2hv7rlqBQAAIABJREFUb6esbHqWrCwAtbW1JCcnk5mZybvvvsuDDz7Irl27yMjI4MyZM0778GHiaoXgbuCVy7+/Atwzs4EkSS2SJJ27/PsAk7nEDrmFbmRotVpWrFhBbm6u0+Vdq9XOy3wZEBBAS0uLU4YKeeZubm52qKLY7XYaGxsZGxtjeHgYi8VCUFAQmzdvZvv27cTGxiqqSmRkJCaTifJydb6zjIwM7HY7RUVFdHd3c+DAASoqKoiKimLHjh3K5nfJkiUsWbKECxcuKKvQTAFYunSpwoH0wgsv8Lvf/Y6WlhY2btzIv//7v6umiH4YuFohCLzMGsHln46zSi5DCBEFZAK5Uw5/5TI/6UuO1Kkp1y5I8q2p2LRpE52dnap1fIODg+nv71eNIwoNDWVwcFCV3jEiIgKr1UpTU5NybGRkhLKyMt59911OnTrFxMQEWq1WMXH6+/vP8glotVoSExPp7OxUjWJ1c3MjMTGRhoYGDh48yMTEBOvXr2flypWzTKfLli3D39+fvLw8WltbHQrAVNx333089dRTpKWl8fTTTxMfH8/vf//767ZXmFMIhBAHhRClDj53X8mDhBBuwOvAI5IkyWlWzwExQAbQAvzC2fWSJP1GkqQVkiStcObl/Khx1113AbBr1y6nbWQ6EjV1JzQ0FCGEqrrj7++P2Wymurqa9vZ2Tp8+ze7duykrK8PDw4Ps7GzuuOMOEhISaG1tVY1bio6Oxmw2U1JS4nT16e7uVnImtFotmzZtIjg42GFbrVZLdna2oh6qCYAkSeTn5+Pj48PBgwd56aWXsFqtfP7znychIYEnnnjiQy/4MacQSJK0TZKkFAeft4G2y5yiXP7pkG1JCKFnUgD+KEnSG1Pu3SZJkk2SJDuTVCyrrsVLfVTIyMggMDCQffv2OW3j5uaGh4eHqhC4uLgQGBhIfX2900FptVrx8PCgo6ODo0eP0tbWRlxcHLfddhsbN24kNDQUjUZDTEwMGo1GNcBPq9WSkpJCT0/PLK4gm81GSUkJhw4dwmq1KoVJzp8/PyeLtpeXl5Lq6WgVgsmNe0tLCykpKXh6evL5z3+e6upqnnzySUZGRvje975HaGgon/rUp/jrX//6oVC+X62f4B3gc8CTl3++PbOBmHzz3wEXJEl6asa5YFmdAj4OlF5lfz5SaDQatmzZwuuvv05/f78SHj0ToaGhVFRUqGZcRUREcPbsWTo6OqblLvf29ipxPPJ+ICAggHXr1jl0wplMJqKjo6muriY5OVnVg1xVVUVxcTEhISEYDAa6u7s5e/Ys/f39REVFkZGRgcFgwG63c/78eSorKx36M+TZvb6+nvj4eNra2jh+/DgrV64kMjJSaTcyMkJhYSF+fn7T/BlGo5FHH32UlStXcuzYMU6cOME777zD3/72N4xGI0lJSSQlJbFq1SoSEhKIjIxU8jrkWstXgqvdEzwJ5AghLgE5l/9GCBEihJAtPdnAPwBbHJhCfyaEKBFCFAObga9dZX8+cnz84x9nfHyc119/3WmbiIgIJElSTckMCwtDr9dTU1ODzWajvr6ew4cPs3//fmprawkLC2Pbtm1ERkbS3d2tqj/LHKhqm18hBJmZmYyNjVFcXKzM/rLuv2rVKkX3j4+PJzQ0lOLi4v/f3pkHR3We+fr5utXaV7QhIZCEJLShFYEEQiBAmN1xxubWTGYymZtUYlMzcc3kZmInuCbxeFKh7Liul6qxY7sysZM4iWNPLgyLAAkEYhNoQQsS2neEdrX2rfvcP6RzrKW7JcASCM5TRVnuPqf7O9X9nv7e7ffO8iNkA6itrSUiIoLY2Fi2bduGu7s7OTk5FBYWYjQaleOMRiPr16+flZHu6+ujo6ODgwcPkpGRQXt7O7/+9a955pln0Ov1fPbZZ/zLv/wLe/fuJTIyEi8vLxwdHc0mEC0h5tKrfxRJSEiQppYbP0oMDQ3h4eHB9u3bLfoGp0+fRqfTKfkFU1y7do3GxkZleoz8IQcGBipN711dXWRkZFgU/AUoKCigqqqKXbt2mf2Fkt9T9kWm3v1nMjY2xrlz5xgcHGT79u24uLjMMoC1a9cqxxsMBgoKCqipqcHT0xNfX18KCwuJjY01mYm+efMmlZWV7N+/f1qxnSRJ/OUvf8HX1xetVkt5eTnNzc10d3fzm9/8Rql8nYkQIk+SpART16yWTXzF2NnZsXXrVuULYq5Hd+XKlZSUlNDX1zdtlpkkSUodj+w32NrakpiYyPLly2ftq5ctW4a3tzcVFRUEBwebrUsKDw+ntraWwsJCUlJSZj1vMBgoLS2loaFBUbOLi4uzKCi2efNmMjMzuXjxItu2baOsrMykAcCXIWQPDw/y8vJob2/H0dGR4ODgWa89Pj5ObW0tfn5+s6pN+/r6GB8fx9vbm8DAwGmVq+np6SbXOhdq2cQC8Ld/+7cMDg7yxz/+0ewxAQEBCCGUGccjIyOUl5dz6tQpLl68SGdnJ2FhYbi5uWEwGPD29jY7EyA8PJzh4WHltUxha2tLREQELS0tswZtd3d3k5GRQVlZGQEBAWzevJnR0VHy8/MtOr8ODg5s2bKF8fFxzpw5Y9YApuLv76+0j/b393PhwoVp0o0wIRQwNjZmsmdCPtbcKNv7QTWCBeC5557DycmJTz/91Owx9vb2LF++nJqaGnJycjh+/DiFhYXKXX///v1ER0cTHh7OwMCAxWiSp6cnHh4elJWVWVSlDgkJwcnJiYKCAgwGAwaDgZKSEjIyMhgZGWHz5s1s2LABHx8fIiIiqK+vt2hYMNEs5O7uzvj4ODqdzmxPskxdXR3t7e3ExMSwbt069Ho9Z8+eJScnRwmFVldX4+zsbLLhp7u7G61Wa3FLd6+o26EFwMbGhl27dnHs2DHa2tpmKdONj4/T2NhIX18fo6OjNDY2EhAQQHBwsKLtL+Pr64uDgwO3b99W8gczEUIQHR3NuXPnqKioICLC9Ng3rVZLfHw8Fy5cID8/n66uLvR6Pf7+/sTFxU3b+4eHh9PR0UF+fr5SFj4TORN89+5dAgICaG5u5vz586SkpMy6DpgYJHLz5k08PDxYs2YNQghWrlxJWVkZVVVVNDQ04OnpSXd3N7GxsSavtbu7G1dX16+0F0H9JVggDh06xOjoKO+9957yWF9fHzdv3uT48ePcuHFD2Xu7ubmRkJBg8ouj0WgICwujq6vL4tALub6nvLzcoqaph4eH0uo5NDREcnIyiYmJs5xfjUZDYmIidnZ2XL582WQ16tRM8Pr165UpOufPn5+15ZoZDZK/4NbW1sTExCiJPblwsLq6mrKysmmZdUmS6O7u/kq3QqAawYKRmppKUFAQv/vd72hububixYucOnWKyspKvLy8SE1NZffu3YSHh9PZ2TlrXzyVgIAApb7H0h49Ojqa8fFxSkpMp1u6u7vJzMxEr9ej1WqxtrY2qz0EE35EcnIyY2NjZGdnKyXg5mqBXF1d2b59Ow4ODly6dImysjJlvXV1dYpinamh5ra2toSGhiJJEp6entjY2FBcXMzJkydJT0+noKCAyspKxsfHzWo33S/qdmiB0Gg0HDx4kCNHjvDb3/6WyMhIIiMjWb169bSIR2BgoNKJlZiYaPK15PqegoICiw30zs7OhISEUFFRoUgqwkTk5/bt25SWlmJjY0NycjJarZaLFy9SVFRkVkMVJjSINm3aRHZ2NpcvX2bz5s3k5+ebLYVwcHBg27Zt3Lhxg+LiYjo6Oli7di03b97E09PTbJMPTBiKJEnExcXh6urKwMAAzc3NtLS0UFNTo/RPFBUVUV9fj4ODA3Z2dlhbW6PT6TAajRYHCppDzRMsIE1NTQQHB5OamsrJkyfN7mPlmPiePXsUoa6ZGAwG0tPT0el07Ny50+yHPTo6Snp6Ora2tqSlpdHb28v169fp6elh1apVxMXFKTmGmzdvUlFRYVFdWqa+vp6cnBxF/t1cLZCMJElUVVVNKybcvXu32euTJIlTp05ha2trMndiMBi4fv06TU1NBAQE0N/fz8DAgDJQBODVV19Fo9FQXFw863xLeQJ1O7SA+Pn58cwzz5CZmWlxfldoaChCiDnreyIjI+np6bGYaZbj+z09PWRnZ3P27FmGh4dJTk4mKSlpmlq1LO1y48YNi9LxMJHldnd3Z2hoSKkonWtcbEhICOHh4RiNRqXUwpzo8N27d+nv7zeZN5Cvf3h4GDc3N8X/2L9/P88++yxf//rXOXDgAFZWVvflMKtGsMC89NJLjI+P88Ybb5g9xs7OjsDAQOrq6ix2WK1atQpXV1eKiooshkIdHR2xsrJStk67du1SqlenotVq2bhxI1qtlsuXL5sdJCI7tZ2dnfj4+NDf3092drbFwSMwEQ2qqKjAw8ODqKgo7t69S3p6OiUlJbNaTKurq7GxsTG5TnkNPT09s5xiIQQ6nQ47OzuEEPe1HVKNYIGJi4tj06ZN/P73v7fYHyCXPMzsyJqKRqMhLi6OwcFBk1KLRqORW7dukZmZqQh5DQ8PW5Q+dHBwYNOmTQwMDHD16tVZfcszSyFSUlJITEyko6OD8+fPmzXaqdGgDRs2EB4ezu7du/H19aW0tJSTJ09y+/ZtxsbGGBgYUIQDzBW/9ff3MzY29pVHhkA1gkXh8OHD9Pb2cuTIEbPHODg4EBQURF1dncWtiaenJ6tWraK8vHxanX1PTw8ZGRncunWLFStWsGfPHjZs2EB3d7dFw5Jfc926dbS2tpKbm6tEdMzVAvn7+5OSksLAwAAZGRkmFTRqa2u5e/cu0dHRih/g4ODAxo0bSUtLw83NjaKiIo4fP87Vq1eRJMli8dtCZIplVCNYBPbu3UtcXBy/+tWvLA7iCA8PR6vVmnTsphITE4NWqyU3N1ep+cnIyGBoaIhNmzaxceNGRQtJbnW01DUGE1GqyMhI6uvrlUpPc8VwMCEIsGPHDrRaLefPn6e6uloxnsHBQQoLC/H09DS5x1+2bBlbtmwhLS0Nb29vRVI+NzeXuro6k2oc3d3daDSarzw8CqoRLBqHDx+mu7ub119/3ewxtra2hIWF0dzcbDExZmdnR0xMDO3t7Zw6dYqSkhJWrFjB7t27FfFcmbi4OFxcXMjJyZlT0SEiIkIR3Dp79uyctUAuLi6kpaXh5eVFXl4eOTk5jIyMcOPGDSRJmpYUM8WyZcsUH8Df35++vj6uX7/O0aNHyc7Opqqqit7eXiVJ5uLisiCqdWqIdJEwGo3ExMTQ3NxMbW2t2Tva+Pj4tFCoqQ/daDRSVlambHPi4+PNRlVgYuZBRkYGTk5ObNu2zWylqfza6enp9Pf34+HhwbZt2+Z0NiVJUtZjZWXF2NjYnGuSOXfuHMPDw+zZsweYkHBpbGxUpCbhy/G1rq6uhIeH4+joqDj/U4mNjQUmQr8zWbBSaiHEMuBPQABQB/wvSZJmpT6FEHVAH2AAxuXFzPf8xwGNRsORI0fYv38/P/3pT3nrrbdMHmdlZUVsbCxXrlyhurp6VnJJr9dz/fp1uru78fHxob29ncbGRoKCgsx+WZ2dnUlKSuLSpUvk5uaSmJhoVoQ3NzeX/v5+XF1dFdGAmJiYOcOhERERuLi4cPnyZWBi+zI6OmqyF0Gmp6eHjo6Oaa/v4eGBh4cHsbGx9Pf309bWxt27d5WegStXrijn63Q6bGxs0Ol0WFlZMT4+/lCiQ/PRHZLZNqktNNUa7+X8Jc++fftITk7mww8/nLPR3tvbm5KSEqWn1mg0UlpaytmzZxkcHGTjxo2kpKQQFxdHe3s75eXlFt/b19eXqKgoGhoaTDrKM0sh0tLSlK3R9evXLardyedXVVWh1WpZvXo1dXV1nDx5kurqarNdb/LxpipPhRA4OTkRFBSkJPK2b99OWloaSUlJREVFKbOTzU3qnC8PWjbxNSB18u+PgSzgpUU8f8nxy1/+kk2bNvGDH/zAbL+BEIJ169Zx+vRp8vPzWbt2rXL39/PzIz4+XulNDggIoKWlheLiYuUuao6wsDD6+/spLS3Fzs5Oqdc3VwsUFxeHra2tYoyyw22KmpoaWltblW1QcHAwBQUF5OXlUVVVRVRUFD4+PsqdenR0lIaGBlauXDnnl1h2it3c3NBqtSYrWgGL2zxLLJbukAScEULkCSG+dx/nLwndofmQlJTEc889x5///Gdl62AKR0dHIiIiaG5u5syZM8rdf9OmTdOa84UQJCQkYG9vz9WrVy0KV8nG5ePjQ35+Po2NjWYNQD4+IiKCDRs20NHRoRTfzWRgYIDCwkK8vLwUw3J1dSU1NZWNGzdiMBi4dOkSGRkZNDc3I0mSIhQwH79BdorvtYF+viyW7lCyJEnxwB4mZBi33OtCl4Lu0Hx56623sLe35/vf/77ZrYJer1fGwgoh2Lp1q9n6HmtrazZt2sTIyAhXr1612HQvi++6u7tz7do1Lly4YFEXCCZ+bVJTUxkfHyczM3OaHpLsRwCzokFyv8Du3btJSEhgdHSUy5cvc+rUKcrKypSZa5ZYqPLpqSyK7pAkSXcm/9sG/IUv9YXmdf7jhq+vLz/84Q8pKCjgP//zP6c9J0d+5L2/7DRaEsYClJ6EtrY2iwp4MLFt2Lx5Mzqdjra2NlauXGmxGA4mHNa0tDRcXV25du0aubm5ishva2sr0dHRZuVcNBoNq1evZs+ePSQlJSGEYHh4WHHy29razBruwMDAgmWKlfU94Pmy7hCY1x1yEEI4yX8DT/GlvtCc5z+u/OQnP2HNmjW88sorSgNKb28v586do7i4GF9fX3bt2kVoaChRUVG0tLQo84nNERAQwJo1a6isrLR4rCyeNTo6iq2tLU1NTRaHkcvY29uTmppKaGgoNTU1nD59mps3b07bBllCo9GwatUqnJ2dsbKyYtWqVTQ1NZGVlcWJEyfIy8ubJTm/kJlimQd1jI8AnwkhvgM0AAdhQncI+EiSpL2AN/CXybuMFfCpJEnpls5/EtDpdHz00Uekpqbyve99jzfeeIOSkhKsrKxISkpi5cqVyp05JCSElpYWbt68ibu7u8UvRHR0NL29veTn52Nvbz9LKnFmKURoaCjZ2dlcu3aNkZGROffoGo2GmJgYli9fTnZ2NkajEUdHRwwGw7wc06GhIWWCTmxsLPHx8dy5c4fGxkZlMqZGo8HDwwNPT0/0ej1CiAXJFMuoybKHzLe//W3+67/+i8OHD7Nnzx7WrVtnUpVueHiYM2fOYGVlxc6dOy0WxY2NjZGVlUVvby/btm1T9t3maoHGx8e5evUqLS0thIeHs3bt2jnj7VVVVeTn5+Ph4UFHRwcODg7Exsbi6+tr8dxbt25x69Yt9uzZM6vDzGAw0NbWRmtrK21tbfT09CjP2dnZ4ezsjKOjo9JMY2dnh06nQ6fTKb6OEOKek2WqETxkenp6iIyMZGRkhJKSErNdY4CiO7pixQrlAzfH0NCQMjtg27ZtODs7W6wFMhqN5OfnU1NTw8qVK1m/fr3ZO/vAwACnT5/G3d2dLVu20N7eTn5+Pr29vXh7exMTE2OyX9poNHLixAlcXFzYsmXu2Mjo6CjHjx/H2dkZJycnRc3b3KSf+22qUdsrHzKurq58/PHH7N69m29961sWxXw9PT2JioqiqKiIsrIys6oS8KUI2Pnz58nKylJGtpqrBdJoNKxbtw5HR0eKioro7+9n06ZNs5xdOaQKkJCQgBACLy8vnnrqKaqqqrh16xZnzpzB39+fiIiIaXf7O3fuMDQ0ZLGdcyqjo6OMj48rShxTHx8eHmZ4eJixsTHGxsYwGAz3HUJVC+geAdLS0jh06BBnzpzh3XfftXisLEBbUlIypzPr6OjIli1bGBsbo6mpiaCgIIvCWEIIwsLC2Lx5M319fZw9e3aWakR1dTVtbW3ExMRMMxCNRsOaNWvYt28fYWFhNDU1kZ6eTk5OjpJbqKqqMumnmEN2imeGUa2trZWxuCtWrCAgIEBR31Y7y5Ywb775JhEREbz00ksUFBSYPU5Oji1btozr168rZcimkCSJ8vJyjEYjWq2W5uZmi409Mr6+vuzcuRN7e3uys7MpLCzEYDDQ399PUVER3t7eZmv/ra2tiY6OZu/evaxZs4ampiZOnz5NZmYmbW1tBAYGzvuL2t3dveBOMahG8MhgbW3NF198gVar5bnnnrM4mEIegmFtbU12drbJqTemaoGEEJw/f97iGFkZJycntm/fzurVqykvLyczM1OZKSZvgywhl3vv37+fqKgoxcmtrq5WNFjnYqEzxTKqETxChIWF8d5771FTU8M3v/lNi8fKg7IlSeLixYvTBLdMlUK4uLiwbds2dDodWVlZFgv4ZKysrEhISCA5OZn+/n66urrw8vIyO1PBFDY2NoSEhCCEwMPDA1dXV0pLSzl16pQy98xUJ91iZIplVCN4xPi7v/s7vvOd73D06FH+/d//3eKxzs7ObN68maGhIS5evMjo6KjFWiBHR0d27NiBs7Mzly9fpqqqal5rkmXXbWxsuHPnDhkZGRZHzM5ErhOKiopiy5Yt7N+/n5iYGGBCQyg9PZ2TJ0+Sl5dHU1MTw8PDDA4OMjo6uihGoIZIH0HkwRi5ubl8/vnnPPPMrKGg02hpaeHy5cu4urri6OhIQ0ODxVqgsbExrl27RktLC8HBwcTGxprdp0uSRFZWFt3d3ezatYuenh7y8/MZGhrC39+f6OjoWfLpM88/e/YskiTx1FNPzVqP3GTf0tJCe3u7oqJhY2PDyMgIQUFB+Pr64uTkhL29vUV/4n6balQjeERpb28nPj4evV7PlStXLEZ1YELoS244CQ0NJTo62uK+3Wg0UlxcTHl5OV5eXiQlJZnc5lRWVlJQUEBCQoLiDI+NjVFWVkZFRYUSFQoNDTWZwOvo6ODcuXPz6jQzGo10dXXR0dFBbW3tLL9BCIG9vT12dnbY2tpOa6iRFf+EEBQVFc16bTVPsATx9PTk2LFjbNmyhT179pCTk4Ovr6/JYyVJmhbKbG9vZ2xszGJXl1z+4OLiQl5eHmfPniUpKWmaHHp/fz/FxcUsX76cwMBA5XGdTkd0dDSBgYEUFxdTWlpKdXU1oaGhBAUFTTOG6upqrKysps0qs7QmuSeira1NmZKp1+vp6+ubpjqn1+sZHR1lbGxMKb4zGo33FSJVfwkecf7nf/6HZ599lpCQEK5duzar1GCmD+Dm5sbVq1dxdnZmy5Yt83Jie3p6uHLlCgMDA0RGRirqcllZWfT09LBr1y6zE3dgYmRUcXExra2tWFtbExISQnBwMJIkcfz4cVavXj3vBJl8TceOHcPX13faJBpzx8oKdwkJEzd6U1W0qgzjEubAgQO88847lJWVsW/fvmmqbzMNYO3ataxYsYLk5GT6+vo4d+6cxaHhMq6uruzcuRM/Pz9KSkrIysri1q1byjANSwYAE8msrVu3smPHDtzd3bl16xbHjx9XCuzmU2E6lcHBQUZGRublFAsh0Gq16HQ6VYHuceaFF17g8OHDZGdn87WvfQ2DwaAUw001ABkfHx+2bt3K6OgomZmZFhNqMjqdjqSkJBITE+np6aG0tBRnZ+c5J89Mxd3dnZSUFHbt2sWqVauUjO+NGzeorq6eU7ZRZjHKp6eiGsES4bXXXuPFF18kPT2d5557juvXr1vUBZInaFpZWXH+/Pl59QsIIZR6fyEEvb29SjXqveDi4qLoCQUEBDA+Pk5eXh7Hjh3j8uXLykwycyxWplhGdYyXEG+//TaDg4N89NFHDA0N8frrr1uMGjk7O7N9+3auXLnClStX5pRTh4loUFdXFwkJCWg0Gm7evMmZM2cICQkhIiLCYgn3VKqrq7G1tVWyy11dXTQ0NNDY2EhzczMajQZPT098fHzw9vZWDA8mjEBuvFkMVCNYYrz//vt0d3fzxRdfYGdnx2effWbxi2lnZ0dqaip5eXmUlpai1+tZv369ychRX1/ftGiQEILly5dTVFREeXk59fX1REVF4e/vbzEK09/fT0tLCxEREcpx7u7uuLu7ExMTQ2dnJ3fu3OHOnTtKTN/GxgZPT0+WLVumqF8vFg8UHZqPeJYQInTyGJnVwL9JkvSWEOJnwHcBWT7iJ5IknZzrfZ+k6JApDAYDhw4d4sMPP2T37t0cPXrUYjgUJpzoiooKioqKsLe3Z9OmTdP23JIkcf78efR6vcloUGdnJwUFBXR1deHs7ExUVJTZBprCwkIqKirYt2/fnE71wMCA0kTT2dk5TavVwcEBFxcXnJycpjXT2NraYm1tPeu9H4oCHV+KZx0RQrw8+f/TdIMkSSoHYicXogWamWi2l/m/kiT98gHX8USh1Wr54IMPsLOz45133mHHjh2cOHHC4lhTIQShoaG4u7tz9epVMjMziYmJITg4GCEElZWVdHR0sH79epNfXHd3d3bs2EFTUxPFxcVcvnyZZcuWERERMU1PyGAwUFtbi6+v75wGABNf9NWrVyuJuNraWm7cuEFgYCBjY2P09vZy9+7dWY348lwCnU6HVqtFo9HctwLdYotv7QCqJUkyP7ZFZd68/fbbuLq68tprr5GUlMTZs2fNDrmQ8fDw4KmnniInJ4eCggKlpbK4uBgfHx+L0SBZQmXFihXU1tZy+/ZtLl26hKurK6Ghofj5+dHY2Mjo6Oi89IRM0d/frwh/yT6BJEkMDQ0xMDDA0NAQw8PDjIyMKA014+PjFpU45uJBjWCaeJYQwqx41iR/DfxhxmP/JIT4eyAX+D/mtEgnRbu+BxMTW1QmePXVV/Hz8+Mf//EfSUxM5OTJk0RHR1s8x8bGhpSUFKqrqyksLKS1tRWNRkN8fPy87qQajYagoCACAwNpaGigrKyMnJwcpVzBwcFh1uzm+dLT0zPLKZbLJeb6ZVkwBbqvSHwLIYQ18DTw5ykPvwcEMbFdagHeNHf+4yS+9VXz3e9+ly+++AK9Xk9ycjL//d//PecmPQERAAALO0lEQVQ5QgiCg4MJCQlBkiQMBgP5+fkW5yfMRKPREBAQwO7du0lJScHe3l65Y8uN+5aEwGaymOXTU1kU8a1J9gD5kiS1TnntVkmSDJIkGYEP+VKUS+UeOXDgANnZ2bi5uXHw4EFeffXVOc/p6+ujsrKS5cuXExsbS3t7O6dPn1a60eaLEAIfHx+cnZ3RaDQEBwfT3t5OdnY2x48fJz8/n46Ojjm3LPJWZ7GN4EG3Q7J41hHmFs/6G2ZshYQQPvJ2Cvg6X4pyqdwHsbGxFBQU8PTTT/Ozn/2M3Nxcfv/735t0mI1GIzdu3ECj0bB+/Xrs7OxYsWIF+fn5FBYWUlNTQ1xcnEX1i6nIArv+/v7Ex8cTExNDS0sLDQ0N1NTUUFVVha2tLb6+vvj6+uLl5TVr+7LYmWKZxRDfQghhD+wEnp9x/utCiFgmBHvrTDyvco+4u7uTlZXF97//fT744ANiYmL4/PPPWbdu3bTj5GjQhg0blH4ABwcHUlJSlPj9xYsX8fHxISoqyqSEylTq6uowGAyKQ6zVavHz88PPz4+xsTGam5u5c+eOYhQajYZly5bh7e2Np6cnbm5uSqZ4rvf6qlGrSB9j/vCHP/DCCy8wOjrKz3/+c/75n/8ZjUZDX18fZ86cwdvbm+TkZJPOsMFgoLKykrKyMsbGxvD39ycyMtLkMG5JkkhPT8fa2podO3ZYXJPBYKC9vV3JDch3f7kQTgihtIO6uLhgY2Mz77Cn2lSjYpLKykqeffZZiouL2b59Ox9//LEy+XLXrl0Wu8JgYptz+/ZtKisrMRqNrFq1ivDw8GlbrNbWVi5cuEBiYuK8+gZmvn5nZyednZ3KMPOp/oiVlZWSKLO1tZ2WLJuqPqfRaEhJSQHuvZRaLZt4zAkJCSEvL4+XX36Zt99+m+joaJ5//nleeOGFOQ0AvpRQCQkJoby8nOrqaurr6/Hz82PNmjW4u7tTVVWlTMu8V6ytrfHx8VEa8GNjY1m1ahU9PT1KI01/fz99fX20t7dbrEQdHx+/r6Ya1QieAHQ6HW+++SZPP/003/zmNzly5AgFBQV8+OGHZmcezMTOzo7Y2FjCwsKoqKigurqapqYmXFxc0Ov1hISEPJA0ylShLVtbW5YvX27SKTcYDNOU5+TOMllbSe0nULHI1q1buXXrFs8//zwZGRlERkby5ptv3lM41NbWlujoaA4cOEB8fLwyU62uro6CgoJ5iXuZoqura15OsVarxcHBAVdXVzw9PfH19cXPz49Vq1ah0WhUI1CZGycnJ95//30uXbqEn58fP/zhD4mOjiYrK+ueXsfKykqpNF22bBnLly+nurqa06dPc+bMGW7fvn1Pibfu7m6cnJwWrXx6KqoRPKEkJSVRXFzML37xC5qamti+fTt/9Vd/RX39/Mu67ty5w8jICBEREWzcuJEDBw4o8i1FRUWcOHGCzMxMysrK0Ov1FpNlDyNTLKMawROMVqvl5ZdfpqKigm984xscPXqUsLAwDh06NC+pxqqqKhwcHJS9u42NDWvWrCEtLY29e/eydu1aJEmiuLiY06dPc/LkSW7cuEFDQ8M0xbyHlSmWUR1jFby8vPjd737HD37wA/71X/+V999/n08//ZRDhw7x4x//2GSbo16vp729naioKJMRGXn6ZkREhDKdpqWlhaamJmpra4GJrZm7u7viUC9WO+VM1DyByiwyMzN56aWXyMvLw9nZmW9/+9u88soruLu7K8fIAz0OHDhwT8O0jUYjPT09tLa2KvkBeeysPMBbbqRxcnLCwcFByRHM5fQ+rKYalceQHTt2kJuby4kTJ3jttdd46623+OCDDzh48CAvv/wyQUFB1NXVzWsQ90zkcompI6QuXLhAb28vgYGB6PV6urq6aGxsnHaeEAJbW9tZiTI5WSaEwGAwPJSmGpXHmH379rFv3z7OnTvHz3/+cz755BN++9vfkpyczI4dO9i8efMDv4cQgr6+Pry8vIiKilIeNxgMDAwMTFOdk32H0dFRBgcHlYYaOU9wvwp0SJK05P6tW7dOUll8SktLpW9961uSg4ODBEgrV66UfvSjH0kNDQ33/ZpDQ0PSn/70J+n27dsPvL6YmBgpJibG5HNArmTm+6RGh1TmTXh4OL/5zW+or6/nP/7jP3B2dub1118nMDCQzZs38+67795zsszcSKbFRDUClXvG3d2dw4cPU1JSwrVr1/jGN77B7du3efHFF/H29iYtLY13332XtjZLPVYTyOp4i10+PRXVCFQeiMTERD755BNaW1s5evQo+/fvJzc3lxdffBFfX1/i4+P50Y9+xIULF6ZNqpeRM8XzFfVaCFQjUPlK0Gq1PP3003z++ed0dnaSnp7OP/zDP9DV1cUbb7xBamoqbm5ubN++nVdeeYWMjAyGhoYeaqZY5kHFtw4CPwPCgQ2SJJkM3gshdgNvA1omOs6OTD4+p3iXKdQ8wdKiqqqKY8eOce7cOXJycpRRTzqdjsDAQMLCwli3bh0JCQkkJiZOy0fcCw+lqUYIEQ4YgV8BPzRlBJOCWxVMtFc2ATeAv5EkqVQI8TrQJX0p3uUmSZIl3SJANYKlTmVlJZmZmWRnZ1NQUEB9fT2Dg4PK8y4uLvj5+eHv74+/vz++vr6sWLFC0TySJR1nlm4/1M4yIUQW5o1gI/AzSZJ2Tf7/jwEkSfqFEKIcSJUmNIt8gCxJkkLnej/VCB4vjEYj5eXl5OTkKE3+DQ0NNDU10dnZabbwzs7ODhsbG6ysrNBqtcrIV1MO+cPOGK8Apqb/moDEyb/nLd6lim89vmg0GsLDwwkPD5/13MjICA0NDdTX19PY2Ehrays9PT3o9Xr0ej1DQ0NK0qy4uPi+RL/mNAIhRAZgSnfjsDShPTTnS5h47J5/fiRJ+gD4ACZ+Ce71fJWliTwHOSQkZMHeY04jkCQp7QHfowmY2sPnB8iTpFtl7aF5iHepqCwIixEivQGECCECJ6UY/5oJ0S74UrwL5hbvUlFZEB7ICIQQXxdCNAEbgRNCiNOTj/sKIU4CSJI0DvwTcBooAz6TJOnW5EscAXYKISqZiB4deZD1qKjcD2o/gcoTgTrCVUXFAqoRqDzxqEag8sSjGoHKE8+SdIyFEO2AKYEcD6BjkZezEDwO1/GoXYO/JEkmRxwtSSMwhxAi11wEYCnxOFzHUroGdTuk8sSjGoHKE8/jZgQfPOwFfEU8DtexZK7hsfIJVFTuh8ftl0BF5Z5RjUDliWdJGoEQYrcQolwIUTXZmzzzeSGEeGfy+SIhRPzDWKcl5nENqUIIvRDi5uS/f3sY67SEEOLXQog2IYTJ+dNL4XMAlp4MIxOKFdXAasAaKAQiZhyzFzjFRFdbEpDzsNd9H9eQChx/2Gud4zq2APFAiZnnH+nPQf63FH8JNgBVkiTVSJI0CvwR+NqMY74GfCJNcA1wnexce1SYzzU88kiSdBHosnDIo/45AEtzO2SqcX/FfRzzMJnv+jYKIQqFEKeEEJGLs7SvlEf9cwCWpjT7fBr3v5Lm/gVkPuvLZ6LepV8IsRf4f8DCdZsvDI/65wAszV8CS43793LMw2TO9UmS1CtJUv/k3ycBnRDCY/GW+JXwqH8OwNI0AkuN+zLHgL+fjE4kAXppUt/oEWHOaxBCLBeTY1eEEBuY+Kzmnqb3aPGofw7AEtwOSZI0LoSQG/e1wK8lSbolhHhh8vn3gZNMRCaqgEHgfz+s9ZpintfwHHBICDEODAF/LU2GXB4VhBB/YCKK5TEpuPBTQAdL43OQUcsmVJ54luJ2SEXlK0U1ApUnHtUIVJ54VCNQeeJRjUDliUc1ApUnHtUIVJ54/j8+PX0NDc0oFAAAAABJRU5ErkJggg==",
- "text/plain": [
- ""
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
+ "outputs": [],
"source": [
"import numpy as np\n",
"\n",
@@ -78,7 +65,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -137,7 +124,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -187,21 +174,9 @@
},
{
"cell_type": "code",
- "execution_count": 42,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "> CG info :: {'niter': 109, 'success': True, 'res_norm': 5.704506834276634e-15}\n",
- "> L2 error :: 9.14e-02\n",
- "> H1 error :: 1.62e+00\n",
- "> Solution time :: 1.42e+01s\n",
- "> Evaluat. time :: 4.73e-01s \n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"# Set the solver parameters\n",
"# 'cg' -> Conjugate gradient method\n",
@@ -235,7 +210,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -270,7 +245,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
From 33a5a6597bba86b9b680aee36d6e756c144a840c Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 11:06:22 +0100
Subject: [PATCH 08/63] install psydac in doc ci
---
.github/workflows/documentation.yml | 109 +++++++++++++++++++++++++++-
1 file changed, 106 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 66c0e37c4..36567688a 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -25,31 +25,134 @@ jobs:
runs-on: ubuntu-latest
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN}}
+ PSYDAC_MESH_DIR: ${{ github.workspace }}/mesh
+ OMP_NUM_THREADS: 2
steps:
- name: Checkout
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
+
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.9
+ cache: 'pip'
+ cache-dependency-path: |
+ pyproject.toml
+ requirements.txt
+ requirements_extra.txt
+
- name: Install non-Python dependencies on Ubuntu
+ uses: awalsh128/cache-apt-pkgs-action@latest
+ with:
+ packages: gfortran openmpi-bin libopenmpi-dev libhdf5-openmpi-dev
+ version: 1.0
+ execute_install_scripts: true
+
+ - name: Reconfigure non-Python dependencies on Ubuntu
run: |
- sudo apt update
+ sudo apt-get update
+ sudo apt-get install --reinstall openmpi-bin libhdf5-openmpi-dev liblapack-dev libblas-dev
sudo apt install graphviz pandoc
- - name: Install Python dependencies
+
+ - name: Print information on MPI and HDF5 libraries
+ run: |
+ ompi_info
+ h5pcc -showconfig -echo || true
+
+ - name: Upgrade pip, setuptools, and wheel
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+
+ - name: Determine directory of parallel HDF5 library
+ run: |
+ if [[ "${{ matrix.os }}" == "ubuntu-24.04" ]]; then
+ HDF5_DIR=$(dpkg -L libhdf5-openmpi-dev | grep libhdf5.so | xargs dirname)
+ elif [[ "${{ matrix.os }}" == "macos-14" ]]; then
+ HDF5_DIR=$(brew list hdf5-mpi | grep "libhdf5.dylib" | xargs dirname | xargs dirname)
+ fi
+ echo $HDF5_DIR
+ echo "HDF5_DIR=$HDF5_DIR" >> $GITHUB_ENV
+
+ - name: Cache PETSc
+ uses: actions/cache@v4
+ id: cache-petsc
+ env:
+ cache-name: cache-PETSc
+ with:
+ path: "./petsc"
+ key: petsc-${{ matrix.os }}-${{ matrix.python-version }}
+
+ - if: steps.cache-petsc.outputs.cache-hit != 'true'
+ name: Download a specific release of PETSc
+ run: |
+ git clone --depth 1 --branch v3.23.2 https://gitlab.com/petsc/petsc.git
+
+ - if: steps.cache-petsc.outputs.cache-hit != 'true'
+ name: Install PETSc with complex support
+ working-directory: ./petsc
+ run: |
+ export PETSC_DIR=$(pwd)
+ export PETSC_ARCH=petsc-cmplx
+ ./configure --with-scalar-type=complex --with-fortran-bindings=0 --have-numpy=1
+ make all
+ echo "PETSC_DIR=$PETSC_DIR" > petsc.env
+ echo "PETSC_ARCH=$PETSC_ARCH" >> petsc.env
+
+ # This step is not really necessary and could be combined with PETSc install
+ # step; however it's good to verify if the cached PETSc installation really works!
+ - name: Test PETSc installation
+ working-directory: ./petsc
+ run: |
+ source petsc.env
+ make check
+ echo "PETSC_DIR=$PETSC_DIR" >> $GITHUB_ENV
+ echo "PETSC_ARCH=$PETSC_ARCH" >> $GITHUB_ENV
+
+ - name: Install petsc4py
+ working-directory: ./petsc
+ run: |
+ python -m pip install wheel Cython numpy
+ python -m pip install src/binding/petsc4py
+
+ - name: Install h5py in parallel mode
+ run: |
+ export CC="mpicc"
+ export HDF5_MPI="ON"
+ python -m pip install h5py --no-cache-dir --no-binary h5py
+ python -m pip list
+
+ - name: Check parallel h5py installation
+ run: |
+ python -c "
+ from mpi4py import MPI
+ import h5py
+ # This particular instantiation of h5py.File will fail if parallel h5py isn't installed
+ f = h5py.File('parallel_test.hdf5', 'w', driver='mpio', comm=MPI.COMM_WORLD)
+ print(f)"
+
+ - name: Install project
+ run: |
+ python -m pip install .[test]
+ python -m pip freeze
+
+ - name: Install Python dependencies for Documentation
run: |
python -m pip install -r docs/requirements.txt
+
- name: Copy Notebooks
run: |
cp -r examples/notebooks/* docs/source/
+
- name: Make the sphinx doc
run: |
rm -rf docs/source/modules/STUBDIR
make -C docs clean
make -C docs html
python docs/update_links.py
+
- name: Setup Pages
uses: actions/configure-pages@v5
+
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
From cad20f813c8d4fb83f92dc6ba9c78f455e0731e6 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 11:28:45 +0100
Subject: [PATCH 09/63] remove metadata using nb-clean
---
.../notebooks/Helmholtz_non_periodic.ipynb | 20 +--------------
examples/notebooks/Poisson_non_periodic.ipynb | 25 +------------------
2 files changed, 2 insertions(+), 43 deletions(-)
diff --git a/examples/notebooks/Helmholtz_non_periodic.ipynb b/examples/notebooks/Helmholtz_non_periodic.ipynb
index 1a02f2e01..8ff504869 100644
--- a/examples/notebooks/Helmholtz_non_periodic.ipynb
+++ b/examples/notebooks/Helmholtz_non_periodic.ipynb
@@ -278,25 +278,7 @@
"source": []
}
],
- "metadata": {
- "kernelspec": {
- "display_name": "venv3.13",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.13.7"
- }
- },
+ "metadata": {},
"nbformat": 4,
"nbformat_minor": 4
}
diff --git a/examples/notebooks/Poisson_non_periodic.ipynb b/examples/notebooks/Poisson_non_periodic.ipynb
index c042b68b8..764830d91 100644
--- a/examples/notebooks/Poisson_non_periodic.ipynb
+++ b/examples/notebooks/Poisson_non_periodic.ipynb
@@ -286,30 +286,7 @@
]
}
],
- "metadata": {
- "kernelspec": {
- "display_name": "v_psydac",
- "language": "python",
- "name": "v_psydac"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.6"
- },
- "vscode": {
- "interpreter": {
- "hash": "e7370f93d1d0cde622a1f8e1c04877d8463912d04d973331ad4851f04de6915a"
- }
- }
- },
+ "metadata": {},
"nbformat": 4,
"nbformat_minor": 4
}
From 36d672af7d52dd276e870efed3ebf4851e7dd99d Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 11:32:24 +0100
Subject: [PATCH 10/63] change cp folder
---
.github/workflows/documentation.yml | 5 +++--
docs/source/examples.rst | 4 ++--
2 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 36567688a..98e669c69 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -141,7 +141,8 @@ jobs:
- name: Copy Notebooks
run: |
- cp -r examples/notebooks/* docs/source/
+ mkdir docs/source/examples
+ cp -r examples/notebooks/* docs/source/examples/
- name: Make the sphinx doc
run: |
@@ -152,7 +153,7 @@ jobs:
- name: Setup Pages
uses: actions/configure-pages@v5
-
+
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index 9a68a1869..e9e417ebd 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -11,5 +11,5 @@ Examples
:maxdepth: 1
:caption: Notebooks:
- Poisson_non_periodic
- Helmholtz_non_periodic
+ examples/Poisson_non_periodic
+ examples/Helmholtz_non_periodic
From ebd11950ee321c5b9e42eae7e494ff84dc763efa Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 13:59:40 +0100
Subject: [PATCH 11/63] fix error in notebook and test ipytest
---
docs/requirements.txt | 3 +-
examples/notebooks/Poisson_non_periodic.ipynb | 50 +++++++++++++++++--
2 files changed, 49 insertions(+), 4 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index b8a28b763..e28506761 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -4,4 +4,5 @@ numpydoc
tomli
sphinx-math-dollar
nbsphinx
-ipykernel
\ No newline at end of file
+ipykernel
+ipytest
\ No newline at end of file
diff --git a/examples/notebooks/Poisson_non_periodic.ipynb b/examples/notebooks/Poisson_non_periodic.ipynb
index 764830d91..7837cfb12 100644
--- a/examples/notebooks/Poisson_non_periodic.ipynb
+++ b/examples/notebooks/Poisson_non_periodic.ipynb
@@ -39,14 +39,14 @@
"\n",
"# Second quarter annulus\n",
"domain_log_2 = Square('A_2', bounds1=(0., 1.), bounds2=(np.pi, 3/2 * np.pi))\n",
- "F_2 = PolarMapping('F_1', dim=2, c1=rmin+rmax, c2=0., rmin=rmin, rmax=rmax)\n",
+ "F_2 = PolarMapping('F_2', dim=2, c1=rmin+rmax, c2=0., rmin=rmax, rmax=rmin)\n",
"Omega_2 = F_2(domain_log_2)\n",
"\n",
"# Join the patches\n",
"\n",
"from sympde import Domain\n",
- "connectivity = [((0,1,1),(1,1,-1))]\n",
- "patches = [Omega_1,Omega_2]\n",
+ "connectivity = [((0,1,-1),(1,1,-1), 1)]\n",
+ "patches = [Omega_1, Omega_2]\n",
"Omega = Domain.join(patches, connectivity, 'domain')\n",
"\n",
"# Example of a complex multi-patch domain\n",
@@ -272,6 +272,50 @@
"Pm.close()"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "__file__ = \"Poisson_non_periodic.ipynb\"\n",
+ "\n",
+ "import ipytest\n",
+ "import pytest\n",
+ "\n",
+ "ipytest.autoconfig(raise_on_error=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%%ipytest\n",
+ "\n",
+ "def test_pass():\n",
+ " assert True\n",
+ "\n",
+ "def test_fail():\n",
+ " assert False"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%%ipytest\n",
+ "\n",
+ "def test_l2error():\n",
+ " assert l2_error < 3e-05\n",
+ "\n",
+ "def test_h1error():\n",
+ " assert h1_error < 2e-03"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
From 986607024c032426ea0b9e2ecf74148aa94f00ce Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 14:07:21 +0100
Subject: [PATCH 12/63] remove failing test from notebook
---
examples/notebooks/Poisson_non_periodic.ipynb | 15 ---------------
1 file changed, 15 deletions(-)
diff --git a/examples/notebooks/Poisson_non_periodic.ipynb b/examples/notebooks/Poisson_non_periodic.ipynb
index 7837cfb12..3e269d221 100644
--- a/examples/notebooks/Poisson_non_periodic.ipynb
+++ b/examples/notebooks/Poisson_non_periodic.ipynb
@@ -286,21 +286,6 @@
"ipytest.autoconfig(raise_on_error=True)"
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "%%ipytest\n",
- "\n",
- "def test_pass():\n",
- " assert True\n",
- "\n",
- "def test_fail():\n",
- " assert False"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
From c4b08e223ceddd758b88bdb4ff6095a6ee3959d2 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 25 Nov 2025 14:32:30 +0100
Subject: [PATCH 13/63] make the notebook tests a little nicer
---
examples/notebooks/Poisson_non_periodic.ipynb | 37 ++++++++++---------
1 file changed, 20 insertions(+), 17 deletions(-)
diff --git a/examples/notebooks/Poisson_non_periodic.ipynb b/examples/notebooks/Poisson_non_periodic.ipynb
index 3e269d221..2c6134d71 100644
--- a/examples/notebooks/Poisson_non_periodic.ipynb
+++ b/examples/notebooks/Poisson_non_periodic.ipynb
@@ -272,17 +272,33 @@
"Pm.close()"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Example: Results on the pretzel domain\n",
+ "\n",
+ "\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Testing the notebook"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
- "__file__ = \"Poisson_non_periodic.ipynb\"\n",
- "\n",
"import ipytest\n",
- "import pytest\n",
- "\n",
"ipytest.autoconfig(raise_on_error=True)"
]
},
@@ -300,19 +316,6 @@
"def test_h1error():\n",
" assert h1_error < 2e-03"
]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Example: Results on the pretzel domain\n",
- "\n",
- "\n",
- "
\n",
- "
\n",
- "
\n",
- "
"
- ]
}
],
"metadata": {},
From 44e676002ddd52e39a9f233d9bc6c7ef83c21edd Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Thu, 27 Nov 2025 14:49:49 +0100
Subject: [PATCH 14/63] remove performance folder from header script
---
scripts/add_header.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/scripts/add_header.py b/scripts/add_header.py
index 8708a9709..18e49dd29 100644
--- a/scripts/add_header.py
+++ b/scripts/add_header.py
@@ -45,4 +45,3 @@ def add_header_to_python_files(directory, string_to_add, *, dry_run=True):
add_header_to_python_files('../psydac' , header, dry_run=dry_run)
add_header_to_python_files('../examples' , header, dry_run=dry_run)
add_header_to_python_files('../mesh' , header, dry_run=dry_run)
- add_header_to_python_files('../performance', header, dry_run=dry_run)
From f00c89c8cf8c399fcd149da298ac41700c0813f7 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Thu, 27 Nov 2025 15:41:21 +0100
Subject: [PATCH 15/63] add test to Helmholtz notebook
---
.../notebooks/Helmholtz_non_periodic.ipynb | 29 ++++++++++++++++++-
1 file changed, 28 insertions(+), 1 deletion(-)
diff --git a/examples/notebooks/Helmholtz_non_periodic.ipynb b/examples/notebooks/Helmholtz_non_periodic.ipynb
index 8ff504869..6a0c19d97 100644
--- a/examples/notebooks/Helmholtz_non_periodic.ipynb
+++ b/examples/notebooks/Helmholtz_non_periodic.ipynb
@@ -275,7 +275,34 @@
{
"cell_type": "markdown",
"metadata": {},
- "source": []
+ "source": [
+ "### Testing the notebook"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import ipytest\n",
+ "ipytest.autoconfig(raise_on_error=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%%ipytest\n",
+ "\n",
+ "def test_l2error():\n",
+ " assert l2_error < 3e-07\n",
+ "\n",
+ "def test_h1error():\n",
+ " assert h1_error < 4e-05"
+ ]
}
],
"metadata": {},
From 97fe8bc1b4aa6463547236059dcc8e86a4af3555 Mon Sep 17 00:00:00 2001
From: Martin Campos Pinto
Date: Fri, 28 Nov 2025 19:06:09 +0100
Subject: [PATCH 16/63] add notebook with example of L2 fem projection
---
examples/notebooks/fem_L2_projection.ipynb | 195 +++++++++++++++++++++
1 file changed, 195 insertions(+)
create mode 100644 examples/notebooks/fem_L2_projection.ipynb
diff --git a/examples/notebooks/fem_L2_projection.ipynb b/examples/notebooks/fem_L2_projection.ipynb
new file mode 100644
index 000000000..93076a47a
--- /dev/null
+++ b/examples/notebooks/fem_L2_projection.ipynb
@@ -0,0 +1,195 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "2443e734",
+ "metadata": {},
+ "source": [
+ "# Projecting a function onto a finite element space\n",
+ "\n",
+ "In this notebook we show how to define a scalar FEM space over a given (discretized) domain, how to plot some basis functions, assemble a mass matrix and compute the L2 projection of a given function \n",
+ "\n",
+ "## Step 1 : define the domain and discretize it\n",
+ "\n",
+ "This is similar to what is done in other examples"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3f2bc988",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "from sympde.topology import Square, PolarMapping\n",
+ "from sympde.utilities.utils import plot_domain\n",
+ "\n",
+ "from psydac.api.tests.build_domain import build_pretzel\n",
+ "\n",
+ "# Define the topological geometry for each patch\n",
+ "rmin, rmax = 0.3, 1.\n",
+ "\n",
+ "# First quarter annulus\n",
+ "domain_log_1 = Square('A_1', bounds1=(0., 1.), bounds2=(0., np.pi/2))\n",
+ "F_1 = PolarMapping('F_1', dim=2, c1=0., c2=0., rmin=rmin, rmax=rmax)\n",
+ "Omega_1 = F_1(domain_log_1)\n",
+ "\n",
+ "# Second quarter annulus\n",
+ "domain_log_2 = Square('A_2', bounds1=(0., 1.), bounds2=(np.pi, 3/2 * np.pi))\n",
+ "F_2 = PolarMapping('F_2', dim=2, c1=rmin+rmax, c2=0., rmin=rmax, rmax=rmin)\n",
+ "Omega_2 = F_2(domain_log_2)\n",
+ "\n",
+ "# Join the patches\n",
+ "\n",
+ "# [remark] this could be an opportunity to describe how the interfaces of a domain are numbered\n",
+ "from sympde import Domain\n",
+ "connectivity = [((0,1,-1),(1,1,-1), 1)]\n",
+ "patches = [Omega_1, Omega_2]\n",
+ "Omega = Domain.join(patches, connectivity, 'domain')\n",
+ "\n",
+ "# Example of a complex multi-patch domain\n",
+ "# Omega = build_pretzel()\n",
+ "\n",
+ "# Simple visualization of the topological domain\n",
+ "plot_domain(Omega, draw=False, isolines=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b1f65c13",
+ "metadata": {},
+ "source": [
+ "## Step 2 : define a FEM space and plot some basis functions\n",
+ "\n",
+ "[this could actually be a good opportunity to present how the indices work in psydac arrays]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "39c21151",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sympde.topology import ScalarFunctionSpace\n",
+ "\n",
+ "from psydac.linalg.utilities import array_to_psydac\n",
+ "from psydac.api.discretization import discretize\n",
+ "from psydac.fem.basic import FemField\n",
+ "from psydac.fem.plotting_utilities import plot_field_2d as plot_field\n",
+ "\n",
+ "# the kind argument also specifies the pushforward defining the FEM space on the physical domain\n",
+ "# kind=None results in a (scalar, vector?) space defined by a simple change of variable (0-form pushforward)\n",
+ "V = ScalarFunctionSpace('V', Omega, kind=None)\n",
+ "\n",
+ "ncells = [10, 10]\n",
+ "degree = [2, 2]\n",
+ "\n",
+ "# [remark] for multipatch domains the periodic flag is a priori unrelevant, should raise an error / warning \n",
+ "# periodic = [ False, True]\n",
+ "periodic = [False, False] \n",
+ "\n",
+ "Omega_h = discretize(Omega, ncells=ncells, periodic=periodic)\n",
+ "Vh = discretize(V, Omega_h, degree=degree)\n",
+ "\n",
+ "npatches = len(Vh.patch_spaces)\n",
+ "for k in range(npatches):\n",
+ " knots = Vh.patch_spaces[k].knots[0]\n",
+ " print(f'Spline knots (logical coords) along axis 0 in patch {k}: \\n {knots}')\n",
+ "\n",
+ "# plot some basis function from the space \n",
+ "# [remark] here I'm using numpy arrays, but one could illustrate the psydac indexing\n",
+ "bf_c = np.zeros(Vh.nbasis)\n",
+ "bf_c[Vh.nbasis//4] = 1\n",
+ "bf_c[Vh.nbasis//4+6] = 1\n",
+ "bf = FemField(Vh, coeffs=array_to_psydac(bf_c, Vh.coeff_space))\n",
+ "\n",
+ "plot_field(fem_field=bf, Vh=Vh, space_kind=None, domain=Omega, title='two basis functions', hide_plot=False)\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "36a9632b",
+ "metadata": {},
+ "source": [
+ "## Step 3 : compute the mass matrix\n",
+ "\n",
+ "we assemble the mass matrix and visualize it"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "091d4560",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import matplotlib.pyplot as plt\n",
+ "from matplotlib import cm, colors\n",
+ "\n",
+ "from sympde.topology import elements_of\n",
+ "from sympde.expr.expr import BilinearForm\n",
+ "from sympde.expr.expr import integral \n",
+ "\n",
+ "from psydac.api.settings import PSYDAC_BACKENDS\n",
+ "\n",
+ "backend_language = 'python'\n",
+ "backend = PSYDAC_BACKENDS[backend_language]\n",
+ "\n",
+ "# domain = V.domain # = Omega\n",
+ "u, v = elements_of(V, names='u, v')\n",
+ "a = BilinearForm((u, v), integral(Omega, u * v))\n",
+ "ah = discretize(a, Omega_h, [Vh, Vh], backend=backend)\n",
+ "\n",
+ "M = ah.assemble() # Mass matrix in stencil format (linear operator)\n",
+ "\n",
+ "# visualize the mass matrix by plotting its numpy conversion\n",
+ "mat = M.toarray()\n",
+ "\n",
+ "#----------------\n",
+ "fig,ax = plt.subplots(1,1)\n",
+ "ax.set_title(f\"Mass matrix M on the domain with {npatches} patch(es)\")\n",
+ "im = ax.matshow(mat, norm=colors.LogNorm(), cmap='hot_r')\n",
+ "cb = fig.colorbar(im, ax=ax)\n",
+ "fig.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "dae32f1c",
+ "metadata": {},
+ "source": [
+ "## Step 4 : compute the moments of a given target function and its L2 projection\n",
+ "\n",
+ "we invert the mass matrix on the moments of a given function f"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "db7f74ca",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from psydac.linalg.solvers import inverse\n",
+ "from psydac.fem.projectors import get_dual_dofs\n",
+ "\n",
+ "x,y = Omega.coordinates\n",
+ "ref_f = (x-1/2)**2 + y**2\n",
+ "tilde_f = get_dual_dofs(Vh=Vh, f=ref_f, domain_h=Omega_h, backend_language=backend_language)\n",
+ "\n",
+ "inv_M = inverse(M, solver='cg')\n",
+ "\n",
+ "f_h = inv_M @ tilde_f\n",
+ "\n",
+ "plot_field(stencil_coeffs=f_h, Vh=Vh, space_kind='h1', domain=Omega, title='f_h: L2 projection of f', hide_plot=False)"
+ ]
+ }
+ ],
+ "metadata": {},
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
From e4fc59df1ae15ca2dd55a751ee57c8fbe7d7bc83 Mon Sep 17 00:00:00 2001
From: Martin Campos Pinto
Date: Fri, 28 Nov 2025 21:18:30 +0100
Subject: [PATCH 17/63] added the case of a vector-valued space
---
examples/notebooks/fem_L2_projection.ipynb | 135 ++++++++++++++++++++-
1 file changed, 129 insertions(+), 6 deletions(-)
diff --git a/examples/notebooks/fem_L2_projection.ipynb b/examples/notebooks/fem_L2_projection.ipynb
index 93076a47a..27d3d1d19 100644
--- a/examples/notebooks/fem_L2_projection.ipynb
+++ b/examples/notebooks/fem_L2_projection.ipynb
@@ -66,6 +66,14 @@
"[this could actually be a good opportunity to present how the indices work in psydac arrays]"
]
},
+ {
+ "cell_type": "markdown",
+ "id": "a8ea7ee6",
+ "metadata": {},
+ "source": [
+ "We first define a scalar-valued FEM space V"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -73,7 +81,7 @@
"metadata": {},
"outputs": [],
"source": [
- "from sympde.topology import ScalarFunctionSpace\n",
+ "from sympde.topology import ScalarFunctionSpace, VectorFunctionSpace\n",
"\n",
"from psydac.linalg.utilities import array_to_psydac\n",
"from psydac.api.discretization import discretize\n",
@@ -106,8 +114,39 @@
"bf_c[Vh.nbasis//4+6] = 1\n",
"bf = FemField(Vh, coeffs=array_to_psydac(bf_c, Vh.coeff_space))\n",
"\n",
- "plot_field(fem_field=bf, Vh=Vh, space_kind=None, domain=Omega, title='two basis functions', hide_plot=False)\n",
- "\n"
+ "plot_field(fem_field=bf, Vh=Vh, space_kind=None, domain=Omega, title='two basis functions', hide_plot=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1de55bc2",
+ "metadata": {},
+ "source": [
+ "Then we define a vector-valued FEM space W"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d4b778e1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "W = VectorFunctionSpace('W', Omega, kind=None) # a priori 2d vectors\n",
+ "Wh = discretize(W, Omega_h, degree=degree)\n",
+ "\n",
+ "# plot some basis function from the space \n",
+ "# [remark] here I'm using numpy arrays, but one could illustrate the psydac indexing\n",
+ "dim_Wh = Wh.nbasis\n",
+ "bf2_c = np.zeros(dim_Wh)\n",
+ "bf2_c[dim_Wh//4+5] = 1\n",
+ "bf2_c[dim_Wh//4+42] = 1\n",
+ "bf2_c[dim_Wh//4+102] = 1\n",
+ "bf2 = FemField(Wh, coeffs=array_to_psydac(bf2_c, Wh.coeff_space))\n",
+ "\n",
+ "plot_field(fem_field=bf2, Vh=Wh, space_kind=None, domain=Omega, \n",
+ " plot_type='components', title='three basis vector functions', hide_plot=False)\n"
]
},
{
@@ -117,7 +156,7 @@
"source": [
"## Step 3 : compute the mass matrix\n",
"\n",
- "we assemble the mass matrix and visualize it"
+ "We assemble the mass matrix and visualize it. Again we start with the scalar-valued FEM space"
]
},
{
@@ -139,7 +178,6 @@
"backend_language = 'python'\n",
"backend = PSYDAC_BACKENDS[backend_language]\n",
"\n",
- "# domain = V.domain # = Omega\n",
"u, v = elements_of(V, names='u, v')\n",
"a = BilinearForm((u, v), integral(Omega, u * v))\n",
"ah = discretize(a, Omega_h, [Vh, Vh], backend=backend)\n",
@@ -154,6 +192,42 @@
"ax.set_title(f\"Mass matrix M on the domain with {npatches} patch(es)\")\n",
"im = ax.matshow(mat, norm=colors.LogNorm(), cmap='hot_r')\n",
"cb = fig.colorbar(im, ax=ax)\n",
+ "fig.show()\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "57f85041",
+ "metadata": {},
+ "source": [
+ "We next assemble the mass matrix of the vector-valued FEM space"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "aa017e79",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "## and we do the same for the vector-valued space W\n",
+ "\n",
+ "from sympde.calculus import dot\n",
+ "\n",
+ "u, v = elements_of(W, names='u, v')\n",
+ "a = BilinearForm((u, v), integral(Omega, dot(u,v)))\n",
+ "ah_W = discretize(a, Omega_h, [Wh, Wh], backend=backend)\n",
+ "\n",
+ "M_W = ah_W.assemble() # Mass matrix in stencil format (linear operator)\n",
+ "\n",
+ "# visualize the mass matrix by plotting its numpy conversion\n",
+ "mat_W = M_W.toarray()\n",
+ "\n",
+ "#----------------\n",
+ "fig,ax = plt.subplots(1,1)\n",
+ "ax.set_title(f\"Mass matrix M on the domain with {npatches} patch(es)\")\n",
+ "im = ax.matshow(mat_W, norm=colors.LogNorm(), cmap='hot_r')\n",
+ "cb = fig.colorbar(im, ax=ax)\n",
"fig.show()"
]
},
@@ -164,7 +238,7 @@
"source": [
"## Step 4 : compute the moments of a given target function and its L2 projection\n",
"\n",
- "we invert the mass matrix on the moments of a given function f"
+ "we invert the mass matrix on the moments of a given function f: first for the scalar-valued space"
]
},
{
@@ -187,6 +261,55 @@
"\n",
"plot_field(stencil_coeffs=f_h, Vh=Vh, space_kind='h1', domain=Omega, title='f_h: L2 projection of f', hide_plot=False)"
]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8258971a",
+ "metadata": {},
+ "source": [
+ "next for the vector-valued space"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "1f0f103d",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sympy import pi, cos, sin, Tuple, exp, atan\n",
+ "\n",
+ "envelope = exp(-(x-.6)**2 / (2 * 0.15**2))\n",
+ "ref_gx = envelope * cos(2*pi*y)\n",
+ "ref_gy = envelope * 1\n",
+ "\n",
+ "ref_g = Tuple(ref_gx, ref_gy)\n",
+ "\n",
+ "tilde_g = get_dual_dofs(Vh=Wh, f=ref_g, domain_h=Omega_h, backend_language=backend_language)\n",
+ "\n",
+ "inv_M_W = inverse(M_W, solver='cg')\n",
+ "\n",
+ "g_h = inv_M_W @ tilde_g\n",
+ "\n",
+ "# plot_field(fem_field=, Vh=Wh, space_kind=None, domain=Omega, \n",
+ "# plot_type='components', title='two basis vector functions', hide_plot=False)\n",
+ "\n",
+ "plot_field(stencil_coeffs=g_h, Vh=Wh, space_kind='h1', domain=Omega, \n",
+ " plot_type='vector_field', title='g_h: L2 projection of g', hide_plot=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6c77d25a",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# note: specifying a different space_kind is possible but will a priori result in inconsistent pushforward\n",
+ "\n",
+ "plot_field(stencil_coeffs=g_h, Vh=Wh, space_kind='hcurl', domain=Omega, \n",
+ " plot_type='vector_field', title='g_h: L2 projection of g with inconsistent pushforward', hide_plot=False)"
+ ]
}
],
"metadata": {},
From cbc527a6e1a09d79b701f448fca730f6b7a9cb3c Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 11:20:17 +0100
Subject: [PATCH 18/63] test transparency in docs logo
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 8c891942f..1c634b6c6 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-
+
[](https://github.com/pyccel/psydac/actions/workflows/testing.yml) [](https://github.com/pyccel/psydac/actions/workflows/documentation.yml)
From ce6ce7b22e888169c83a8cc3659b25b73bb0ba3c Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 11:37:16 +0100
Subject: [PATCH 19/63] enable github annotations in doc
---
docs/requirements.txt | 1 +
docs/source/conf.py | 2 ++
2 files changed, 3 insertions(+)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index a0ea40da1..ff96a8e57 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -7,3 +7,4 @@ nbsphinx
ipykernel
ipytest
myst-parser
+click-extra[sphinx]
diff --git a/docs/source/conf.py b/docs/source/conf.py
index c3c0c9272..a537bf79e 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -54,6 +54,7 @@ def fixed_init(self, app):
'sphinx.ext.mathjax',
'nbsphinx',
'myst_parser',
+"click_extra.sphinx",
]
from docutils.nodes import FixedTextElement, literal,math
@@ -99,6 +100,7 @@ def fixed_init(self, app):
# -- Options for myst_parser -------------------------------------------------
myst_heading_anchors = 3
+myst_enable_extensions = ["colon_fence"]
# -- Options for autodoc extension -------------------------------------------
autodoc_member_order = 'bysource'
From 43160456c294e60ec2407b1302e88b458da8c33a Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 11:56:31 +0100
Subject: [PATCH 20/63] don't specify parser
---
docs/source/index.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/source/index.rst b/docs/source/index.rst
index d3a66c26f..e4007bfa5 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -14,7 +14,7 @@ Welcome to PSYDAC's documentation!
For the time being, its purpose is to assist the developers.
.. include:: ../../README.md
- :parser: myst_parser.sphinx_
+ .. :parser: myst_parser.sphinx_
.. toctree::
From c7839760a2ef6415aa1fc9ca39c547c8c136e8bc Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 13:14:30 +0100
Subject: [PATCH 21/63] add new notebook; change md to rst files
---
docs/requirements.txt | 1 -
docs/source/conf.py | 2 -
docs/source/examples.rst | 8 +-
docs/source/index.rst | 7 +-
docs/source/installation.rst | 245 +++++++++++++++++++++++++++++++++++
docs/source/output.rst | 184 ++++++++++++++++++++++++++
docs/source/psydac-mesh.rst | 11 ++
7 files changed, 450 insertions(+), 8 deletions(-)
create mode 100644 docs/source/installation.rst
create mode 100644 docs/source/output.rst
create mode 100644 docs/source/psydac-mesh.rst
diff --git a/docs/requirements.txt b/docs/requirements.txt
index ff96a8e57..a0ea40da1 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -7,4 +7,3 @@ nbsphinx
ipykernel
ipytest
myst-parser
-click-extra[sphinx]
diff --git a/docs/source/conf.py b/docs/source/conf.py
index a537bf79e..c3c0c9272 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -54,7 +54,6 @@ def fixed_init(self, app):
'sphinx.ext.mathjax',
'nbsphinx',
'myst_parser',
-"click_extra.sphinx",
]
from docutils.nodes import FixedTextElement, literal,math
@@ -100,7 +99,6 @@ def fixed_init(self, app):
# -- Options for myst_parser -------------------------------------------------
myst_heading_anchors = 3
-myst_enable_extensions = ["colon_fence"]
# -- Options for autodoc extension -------------------------------------------
autodoc_member_order = 'bysource'
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index e9e417ebd..1db3d852a 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -1,9 +1,9 @@
Examples
========
-+------------------------------------------------------------------------------------------------------------------------+
-| Here you will find examples of how to use PSYDAC and explanations thereof as well as links to notebooks in the future. |
-+------------------------------------------------------------------------------------------------------------------------+
+.. +------------------------------------------------------------------------------------------------------------------------+
+.. | Here you will find examples of how to use PSYDAC and explanations thereof as well as links to notebooks in the future. |
+.. +------------------------------------------------------------------------------------------------------------------------+
.. The notebooks get copied into the source directory by the continuous integration pipeline.
.. The notebooks should have all output cleared before being committed to the repository.
@@ -13,3 +13,5 @@ Examples
examples/Poisson_non_periodic
examples/Helmholtz_non_periodic
+ examples/fem_l2_projection
+
\ No newline at end of file
diff --git a/docs/source/index.rst b/docs/source/index.rst
index e4007bfa5..d34b1e063 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -14,13 +14,16 @@ Welcome to PSYDAC's documentation!
For the time being, its purpose is to assist the developers.
.. include:: ../../README.md
- .. :parser: myst_parser.sphinx_
+ :parser: myst_parser.sphinx_
.. toctree::
:maxdepth: 1
:hidden:
- modules
+ installation
+ output
examples
+ modules
+ psydac-mesh
maintenance
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
new file mode 100644
index 000000000..de9c01e31
--- /dev/null
+++ b/docs/source/installation.rst
@@ -0,0 +1,245 @@
+
+Installation
+============
+
+
+* `Requirements <#requirements>`_
+* `Python setup and project download <#python-setup-and-project-download>`_
+* `Installing the library <#installing-the-library>`_
+* `Optional PETSc installation <#optional-petsc-installation>`_
+* `Uninstall <#uninstall>`_
+
+Requirements
+------------
+
+Psydac requires a certain number of components to be installed on the machine:
+
+
+* Fortran and C compilers with OpenMP support
+* OpenMP library
+* BLAS and LAPACK libraries
+* MPI library
+* HDF5 library with MPI support
+
+The installation instructions depend on the operating system and on the packaging manager used.
+
+Linux Debian-Ubuntu-Mint
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+To install all requirements on a Linux Ubuntu operating system, just use APT, the Advanced Packaging Tool:
+
+.. code-block:: sh
+
+ sudo apt update
+ sudo apt install python3 python3-dev python3-pip
+ sudo apt install gcc gfortran
+ sudo apt install libblas-dev liblapack-dev
+ sudo apt install libopenmpi-dev openmpi-bin
+ sudo apt install libomp-dev libomp5
+ sudo apt install libhdf5-openmpi-dev
+
+macOS
+^^^^^
+
+To install all the requirements on a macOS operating system we recommend using `Homebrew `_\ :
+
+.. code-block:: eh
+
+ brew update
+ brew install gcc
+ brew install openblas
+ brew install lapack
+ brew install open-mpi
+ brew install libomp
+ brew install hdf5-mpi
+
+Other operating systems
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Please see the `instructions for the pyccel library `_ for further details.
+
+High-performance computers using Environment Modules
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Many high-performance computers use `Environment Modules `_.
+On those systems one typically needs to load the correct versions (i.e. compatible with each other) of the modules ``gcc``\ , ``openmpi``\ , and ``hdf5-mpi``\ , e.g.
+
+.. code-block:: sh
+
+ module load gcc/15
+ module load openmpi/5.0
+ module load hdf5-mpi/1.14.1
+
+OpenMP instructions should work out of the box.
+For access to BLAS and LAPACK routines there are usually different options, we refer therefore to any documentation provided by the supercomputer's maintainers.
+
+Python setup and project download
+---------------------------------
+
+We recommend creating a clean Python virtual environment using `venv `_\ :
+
+.. code-block:: sh
+
+ python3 -m venv
+
+where ```` is the location to create the virtual environment.
+(A new directory will be created at the required location.)
+In order to activate the environment just run the command
+
+.. code-block:: sh
+
+ source /bin/activate
+
+At this point the commands ``python`` and `\ ``pip`` `_ will refer to the Python 3 interpreter and package manager of the virtual environment, respectively.
+Additionally, the command ``deactivate`` closes the environment.
+It is good practice to keep ``pip`` up to date with
+
+.. code-block:: sh
+
+ pip install --upgrade pip
+
+One can clone the Psydac repository at any location ```` in the filesystem which does not require administrator privileges, using either
+
+.. code-block:: sh
+
+ git clone https://github.com/pyccel/psydac.git
+
+or
+
+.. code-block:: sh
+
+ git clone git@github.com:pyccel/psydac.git
+
+The latter command requires a GitHub account.
+
+Installing the library
+----------------------
+
+Psydac depends on several Python packages, which should be installed in the newly created virtual environment.
+Almost all of these dependencies will be automatically installed by ``pip`` at the time of installing the ``psydac`` package later on.
+
+The single exception is the ``h5py`` package, which needs to be installed in parallel mode.
+This means that a wheel will be built from sources and linked to the local parallel HDF5 library.
+
+To this end, we first set the environment variable ``HDF5_DIR`` s.t. the path ``$HDF5_DIR/lib/`` will correspond to the folder containing the dynamic library ``libhdf5.so`` (on Ubuntu/Debian) or ``libhdf5.dylib`` (on macOS).
+This path can be obtained with a command which depends on your system.
+
+
+*
+ **Ubuntu/Debian**\ :
+
+ .. code-block:: sh
+
+ export HDF5_DIR=$(dpkg -L libhdf5-openmpi-dev | grep "libhdf5.so" | xargs dirname)
+
+*
+ **macOS**\ :
+
+ .. code-block:: sh
+
+ export HDF5_DIR=$(brew list hdf5-mpi | grep "libhdf5.dylib" | xargs dirname | xargs dirname)
+
+*
+ **High-performance computers using `Environment Modules `_\ **\ :
+
+ The correct location of the HDF5 library can be found using the ``module show`` command, which reveals any environment variables after the ``setenv`` keyword.
+ For example, on this system both ``HDF5_HOME`` and ``HDF5_ROOT`` contain the information we need:
+
+ .. code-block:: sh
+
+ > module show hdf5-mpi/1.14.1
+
+ -------------------------------------------------------------------
+ /mpcdf/soft/SLE_15/sub/gcc_15/sub/openmpi_5_0/modules/libs/hdf5-mpi/1.14.1:
+
+ module-whatis {HDF5 library 1.14.1 with MPI support, built for openmpi_5_0_7_gcc_15_1}
+ conflict hdf5-serial
+ conflict hdf5-mpi
+ setenv HDF5_HOME /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1
+ setenv HDF5_ROOT /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1
+ prepend-path PATH /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1/bin
+ -------------------------------------------------------------------
+
+ Therefore it is sufficient to set
+
+ .. code-block:: sh
+
+ export HDF5_DIR=$HDF5_HOME
+
+Next, install ``h5py`` in parallel mode using ``pip``\ :
+
+.. code-block:: sh
+
+ export CC="mpicc"
+ export HDF5_MPI="ON"
+
+ pip install h5py --no-cache-dir --no-binary h5py
+
+At this point the Psydac library may be installed from the cloned directory ``/psydac`` in **standard mode**\ , which copies the relevant files to the correct locations of the virtual environment, or in **development mode**\ , which only installs symbolic links to the Psydac directory. The latter mode allows one to affect the behavior of Psydac by modifying the source files.
+
+
+*
+ **Standard mode**\ :
+
+ .. code-block:: bash
+
+ pip install .
+
+*
+ **Development mode**\ :
+
+ .. code-block:: bash
+
+ pip install --editable .
+
+Optional PETSc installation
+---------------------------
+
+Although Psydac provides several iterative linear solvers which work with our native matrices and vectors, it is often useful to access a dedicated library like `PETSc `_. To this end, our matrices and vectors have the method ``topetsc()``\ , which converts them to the corresponding ``petsc4py`` objects.
+(\ ``petsc4py`` is a Python package which provides Python bindings to PETSc.) After solving the linear system with a PETSc solver, the function ``petsc_to_psydac`` allows converting the solution vector back to the Psydac format.
+
+In order to use these additional feature, PETSc and petsc4py must be installed as follows.
+First, we download the latest release of PETSc from its `official Git repository `_\ :
+
+.. code-block:: sh
+
+ git clone --depth 1 --branch v3.21.4 https://gitlab.com/petsc/petsc.git
+
+Next, we specify a configuration for complex numbers, and install PETSc in a local directory:
+
+.. code-block:: sh
+
+ cd petsc
+
+ export PETSC_DIR=$(pwd)
+ export PETSC_ARCH=petsc-cmplx
+
+ ./configure --with-scalar-type=complex --with-fortran-bindings=0 --have-numpy=1
+
+ make all check
+
+ cd -
+
+Finally, we install the Python package ``petsc4py`` which is included in the ``PETSc`` source distribution:
+
+.. code-block:: sh
+
+ pip install wheel Cython numpy
+ pip install petsc/src/binding/petsc4py
+
+Uninstall
+---------
+
+
+* **Whichever the install mode**\ :
+ .. code-block:: bash
+
+ pip uninstall psydac
+
+* **If PETSc was installed**\ :
+ .. code-block:: bash
+
+ pip uninstall petsc4py
+
+The non-Python dependencies can be uninstalled manually using the package manager.
+In the case of PETSc, it is sufficient to remove the cloned source directory given that the installation has been performed locally.
diff --git a/docs/source/output.rst b/docs/source/output.rst
new file mode 100644
index 000000000..20ccc54e0
--- /dev/null
+++ b/docs/source/output.rst
@@ -0,0 +1,184 @@
+
+Psydac's outputs
+================
+
+Structure
+---------
+
+Psydac has a class meant to take care of outputing simulation results. This class, named ``OuputManager`` is located in ``psydac/api/postprocessing.py``.
+It writes ``FemSpace`` related information in the Yaml syntax. The file looks like this:
+
+.. code-block:: yaml
+
+ ndim: 2
+ fields: file.h5 # Name of the fields file
+ patches:
+ - name: patch_0
+ breakpoints:
+ - [0.0, 0.25, 0.5, 0.75, 1.0]
+ - [0.0, 0.25, 0.5, 0.75, 1.0]
+ scalar_spaces:
+ - name: Scalar_space
+ ldim: 2
+ kind: l2
+ dtype:
+ rational: false
+ periodic: [false, false]
+ degree: [1, 1]
+ multiplicity: [1, 1]
+ basis: [B, B]
+ knots:
+ - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
+ - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
+ - &id001
+ name: Vector_space[0]
+ ldim: 2
+ kind: undefined
+ dtype:
+ rational: false
+ periodic: [false, false]
+ degree: [1, 2]
+ multiplicity: [1, 1]
+ basis: [B, B]
+ knots:
+ - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
+ - [0.0, 0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0, 1.0]
+ - &id002
+ name: Vector_space[1]
+ ldim: 2
+ kind: undefined
+ dtype:
+ rational: false
+ periodic: [false, false]
+ degree: [2, 1]
+ multiplicity: [1, 1]
+ basis: [B, B]
+ knots:
+ - [0.0, 0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0, 1.0]
+ - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
+ vector_spaces:
+ - name: Vector_space
+ kind: hcurl
+ components:
+ - *id001
+ - *id002
+ - name: patch_1
+ breakpoints:
+ - [1.0, 1.25, 1.5, 1.75, 2.0]
+ - [0.0, 0.25, 0.5, 0.75, 1.0]
+ scalar_spaces:
+ - name: Scalar_space
+ ldim: 2
+ kind: l2
+ dtype:
+ rational: false
+ periodic: [false, false]
+ degree: [1, 1]
+ multiplicity: [1, 1]
+ basis: [B, B]
+ knots:
+ - [1.0, 1.0, 1.25, 1.5, 1.75, 2.0, 2.0]
+ - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
+
+The field coefficients are saved to the ``HDF5`` format in the following manner :
+
+.. code-block:: bash
+
+ file.h5
+ attribute: spaces # name of the aforementioned Yaml file
+ static/
+ scalar_space_1/
+ field_s1_1
+ field_s1_2
+ ....
+ field_s1_n
+ vector_space_1_[0]/
+ attribute: parent_space # 'vector_space_1'
+ field_v1_1_[0]
+ attribute: parent_field # 'field_v1_1'
+ vector_space_1_[1]/
+ attribute: parent_space # 'vector_space_1'
+ field_v1_1_[1]
+ attribute: parent_field # 'field_v1_1'
+ ...
+ snapshot_1/
+ attribute: t
+ attribute: ts
+ space_1/
+ ...
+ space_n/
+ ...
+ snapshot_n/
+
+In addition to that, Psydac also features the ``PostProcessManager`` class to read those files, recreate all the ``FemSpace`` and ``FemField`` objects and export them to ``VTK``.
+
+Usage of class ``OutputManager``
+------------------------------------
+
+An instance of the ``OutputManager`` class is created at the beginning of the simulation, by specifying the following:
+
+
+#. The name of the YAML file (e.g. ``spaces.yaml``\ ) where the information about all FEM spaces will be written, and
+#. The name of the HDF5 file (e.g. ``fields.h5``\ ) where the coefficients of all FEM fields will be written.
+
+References to the available FEM spaces are given to the OutputManager object through the ``add_spaces(**kwargs)`` method, and the corresponding YAML file is created upon calling the method ``export_space_info()``. In order to inform the OutputManager object that the next fields to be exported are time-independent, the user should call the ``set_static()`` method. In the case of time-dependent fields, the user should prepare a time snapshot (which is defined for a specific integer time step ``ts`` and time value ``t``\ ) by calling the method ``add_snapshot(t, ts)``. In both cases the fields are exported to the HDF5 file through a call to the method ``export_fields(**kwargs)``. Here is a usage example:
+
+.. code-block:: python
+
+ # SymPDE Layer
+ # Discretization
+ # V0h and V1h are discretized SymPDE Space
+ # u0 and u1 are FemFields belonging to either of those spaces
+ output_m = OutputManager('spaces.yml', 'fields.h5')
+
+ output_m.add_spaces(V0=V0h, V1=V1h)
+ output_m.export_space_info() # Writes the space information to Yaml
+
+ output_m.set_static() # Tells the object to save in /static/
+ output_m.export_fields(u0_static=u0, u1_static=u1) # Actually does the saving
+
+ output_m.add_snapshot(t=0., ts=0)
+ # The line above tells the object to:
+ # 1. create the group snapshot_x with attribute t and ts
+ # 2. save in this snapshot
+ output_m.export_fields(u0=u0, u1=u1)
+
+Usage of class ``PostProcessManager``
+-----------------------------------------
+
+Typically the ``PostProcessManager`` class is used in a separate post-processing script, which is run after the simulation has finished. In essence it evaluates the FEM fields over a uniform grid (applying the appropriate push-forward operations) and exports the values to a VTK file (or a sequence of files in the case of a time series). An instance of the ``PostProcessManager`` class is created by specifying the following:
+
+
+#. The name of the geometry file (in HDF5 format) which defines the geometry or the topological domain from which the geometry is derived.
+#. The name of the YAML file that contains the information about the FEM spaces
+#. The name of the HDF5 file that contains the coefficients of all the FEM fields
+
+In order to export the fields to a VTK file, the user needs to call the method ``export_to_vtk(base_name, grid, npts_per_cell, snapshots, fields)``\ , where:
+
+
+#. ``base_name`` is the base name for the VTK output files.
+#. ``grid`` is either a user specified evaluation grid or ``None``.
+#. ``npts_per_cell`` specifies the refinement in the case of a uniform grid.
+#. ``snapshots`` specifies which time snapshots should be extracted from the HDF5 file (\ ``none`` in the case of static fields)
+#. ``fields`` is a tuple of ``h5_field_name``.
+
+Here is a usage example:
+
+.. code-block:: python
+
+ # geometry.h5 is where the domain comes from. See PostProcessManager's docstring for more information
+ post = PostProcessManager(geometry_file='geometry.h5', space_file='spaces.yml', fields_file='fields.h5')
+
+ # See PostProcessManager.export_to_vtk's and TensorFemSpace.eval_fields' docstrings for more information
+ post.export_to_vtk('filename_vtk', grid=grid, npts_per_cell=npts_per_cell, snapshots='all', fields = ('u0', 'u1'))
+
+Further Examples
+----------------
+
+Further examples are present in the following files:
+
+
+* ``examples/poisson_3d_target_torus.py``
+* ``examples/sample_multipatch_parallel.py``
+* ``examples/notebooks/Poisson_non_periodic.ipynb``
+* ``psydac/api/tests/test_postprocessing.py``
diff --git a/docs/source/psydac-mesh.rst b/docs/source/psydac-mesh.rst
new file mode 100644
index 000000000..1987994ef
--- /dev/null
+++ b/docs/source/psydac-mesh.rst
@@ -0,0 +1,11 @@
+Psydac mesh
+===============
+
+After installation, the command ``psydac-mesh`` will be available.
+
+Example of usage
+----------------
+
+.. code-block:: bash
+
+ psydac-mesh -n='16,16' -d='3,3' square mesh.h5
From b835af66bcf98df7486420c6aa5951700c8a0337 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 15:24:35 +0100
Subject: [PATCH 22/63] rm md files
---
docs/installation.md | 202 -------------------------------------------
docs/output.md | 167 -----------------------------------
docs/psydac-mesh.md | 9 --
3 files changed, 378 deletions(-)
delete mode 100644 docs/installation.md
delete mode 100644 docs/output.md
delete mode 100644 docs/psydac-mesh.md
diff --git a/docs/installation.md b/docs/installation.md
deleted file mode 100644
index e71e4ddef..000000000
--- a/docs/installation.md
+++ /dev/null
@@ -1,202 +0,0 @@
-# Installation
-
-- [Requirements](#requirements)
-- [Python setup and project download](#python-setup-and-project-download)
-- [Installing the library](#installing-the-library)
-- [Optional PETSc installation](#optional-petsc-installation)
-- [Uninstall](#uninstall)
-
-## Requirements
-
-Psydac requires a certain number of components to be installed on the machine:
-
-- Fortran and C compilers with OpenMP support
-- OpenMP library
-- BLAS and LAPACK libraries
-- MPI library
-- HDF5 library with MPI support
-
-The installation instructions depend on the operating system and on the packaging manager used.
-
-### Linux Debian-Ubuntu-Mint
-
-To install all requirements on a Linux Ubuntu operating system, just use APT, the Advanced Packaging Tool:
-```sh
-sudo apt update
-sudo apt install python3 python3-dev python3-pip
-sudo apt install gcc gfortran
-sudo apt install libblas-dev liblapack-dev
-sudo apt install libopenmpi-dev openmpi-bin
-sudo apt install libomp-dev libomp5
-sudo apt install libhdf5-openmpi-dev
-```
-
-### macOS
-
-To install all the requirements on a macOS operating system we recommend using [Homebrew](https://brew.sh/):
-
-```eh
-brew update
-brew install gcc
-brew install openblas
-brew install lapack
-brew install open-mpi
-brew install libomp
-brew install hdf5-mpi
-```
-
-### Other operating systems
-
-Please see the [instructions for the pyccel library](https://github.com/pyccel/pyccel#Requirements) for further details.
-
-### High-performance computers using Environment Modules
-
-Many high-performance computers use [Environment Modules](https://modules.sourceforge.net/).
-On those systems one typically needs to load the correct versions (i.e. compatible with each other) of the modules `gcc`, `openmpi`, and `hdf5-mpi`, e.g.
-
-```sh
-module load gcc/15
-module load openmpi/5.0
-module load hdf5-mpi/1.14.1
-```
-OpenMP instructions should work out of the box.
-For access to BLAS and LAPACK routines there are usually different options, we refer therefore to any documentation provided by the supercomputer's maintainers.
-
-## Python setup and project download
-
-We recommend creating a clean Python virtual environment using [venv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment):
-```sh
-python3 -m venv
-```
-where `` is the location to create the virtual environment.
-(A new directory will be created at the required location.)
-In order to activate the environment just run the command
-```sh
-source /bin/activate
-```
-At this point the commands `python` and [`pip`](https://pip.pypa.io/en/stable/) will refer to the Python 3 interpreter and package manager of the virtual environment, respectively.
-Additionally, the command `deactivate` closes the environment.
-It is good practice to keep `pip` up to date with
-```sh
-pip install --upgrade pip
-```
-
-One can clone the Psydac repository at any location `` in the filesystem which does not require administrator privileges, using either
-```sh
-git clone https://github.com/pyccel/psydac.git
-```
-or
-```sh
-git clone git@github.com:pyccel/psydac.git
-```
-The latter command requires a GitHub account.
-
-## Installing the library
-
-Psydac depends on several Python packages, which should be installed in the newly created virtual environment.
-Almost all of these dependencies will be automatically installed by `pip` at the time of installing the `psydac` package later on.
-
-The single exception is the `h5py` package, which needs to be installed in parallel mode.
-This means that a wheel will be built from sources and linked to the local parallel HDF5 library.
-
-To this end, we first set the environment variable `HDF5_DIR` s.t. the path `$HDF5_DIR/lib/` will correspond to the folder containing the dynamic library `libhdf5.so` (on Ubuntu/Debian) or `libhdf5.dylib` (on macOS).
-This path can be obtained with a command which depends on your system.
-
-- **Ubuntu/Debian**:
- ```sh
- export HDF5_DIR=$(dpkg -L libhdf5-openmpi-dev | grep "libhdf5.so" | xargs dirname)
- ```
-
-- **macOS**:
- ```sh
- export HDF5_DIR=$(brew list hdf5-mpi | grep "libhdf5.dylib" | xargs dirname | xargs dirname)
- ```
-
-- **High-performance computers using [Environment Modules](https://modules.sourceforge.net/)**:
-
- The correct location of the HDF5 library can be found using the `module show` command, which reveals any environment variables after the `setenv` keyword.
- For example, on this system both `HDF5_HOME` and `HDF5_ROOT` contain the information we need:
-
- ```sh
- > module show hdf5-mpi/1.14.1
-
- -------------------------------------------------------------------
- /mpcdf/soft/SLE_15/sub/gcc_15/sub/openmpi_5_0/modules/libs/hdf5-mpi/1.14.1:
-
- module-whatis {HDF5 library 1.14.1 with MPI support, built for openmpi_5_0_7_gcc_15_1}
- conflict hdf5-serial
- conflict hdf5-mpi
- setenv HDF5_HOME /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1
- setenv HDF5_ROOT /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1
- prepend-path PATH /mpcdf/soft/SLE_15/packages/skylake/hdf5/gcc_15-15.1.0-openmpi_5.0-5.0.7/1.14.1/bin
- -------------------------------------------------------------------
- ```
-
- Therefore it is sufficient to set
-
- ```sh
- export HDF5_DIR=$HDF5_HOME
- ```
-
-Next, install `h5py` in parallel mode using `pip`:
-```sh
-export CC="mpicc"
-export HDF5_MPI="ON"
-
-pip install h5py --no-cache-dir --no-binary h5py
-```
-
-At this point the Psydac library may be installed from the cloned directory `/psydac` in **standard mode**, which copies the relevant files to the correct locations of the virtual environment, or in **development mode**, which only installs symbolic links to the Psydac directory. The latter mode allows one to affect the behavior of Psydac by modifying the source files.
-
-- **Standard mode**:
- ```bash
- pip install .
- ```
-
-- **Development mode**:
- ```bash
- pip install --editable .
- ```
-
-## Optional PETSc installation
-
-Although Psydac provides several iterative linear solvers which work with our native matrices and vectors, it is often useful to access a dedicated library like [PETSc](https://petsc.org). To this end, our matrices and vectors have the method `topetsc()`, which converts them to the corresponding `petsc4py` objects.
-(`petsc4py` is a Python package which provides Python bindings to PETSc.) After solving the linear system with a PETSc solver, the function `petsc_to_psydac` allows converting the solution vector back to the Psydac format.
-
-In order to use these additional feature, PETSc and petsc4py must be installed as follows.
-First, we download the latest release of PETSc from its [official Git repository](https://gitlab.com/petsc/petsc):
-```sh
-git clone --depth 1 --branch v3.21.4 https://gitlab.com/petsc/petsc.git
-```
-Next, we specify a configuration for complex numbers, and install PETSc in a local directory:
-```sh
-cd petsc
-
-export PETSC_DIR=$(pwd)
-export PETSC_ARCH=petsc-cmplx
-
-./configure --with-scalar-type=complex --with-fortran-bindings=0 --have-numpy=1
-
-make all check
-
-cd -
-```
-Finally, we install the Python package `petsc4py` which is included in the `PETSc` source distribution:
-```sh
-pip install wheel Cython numpy
-pip install petsc/src/binding/petsc4py
-```
-
-## Uninstall
-
-- **Whichever the install mode**:
- ```bash
- pip uninstall psydac
- ```
-- **If PETSc was installed**:
- ```bash
- pip uninstall petsc4py
- ```
-
-The non-Python dependencies can be uninstalled manually using the package manager.
-In the case of PETSc, it is sufficient to remove the cloned source directory given that the installation has been performed locally.
diff --git a/docs/output.md b/docs/output.md
deleted file mode 100644
index 3ce4bb540..000000000
--- a/docs/output.md
+++ /dev/null
@@ -1,167 +0,0 @@
-# Psydac's outputs
-## Structure
-Psydac has a class meant to take care of outputing simulation results. This class, named `OuputManager` is located in `psydac/api/postprocessing.py`.
-It writes `FemSpace` related information in the Yaml syntax. The file looks like this:
-```yaml
-ndim: 2
-fields: file.h5 # Name of the fields file
-patches:
-- name: patch_0
- breakpoints:
- - [0.0, 0.25, 0.5, 0.75, 1.0]
- - [0.0, 0.25, 0.5, 0.75, 1.0]
- scalar_spaces:
- - name: Scalar_space
- ldim: 2
- kind: l2
- dtype:
- rational: false
- periodic: [false, false]
- degree: [1, 1]
- multiplicity: [1, 1]
- basis: [B, B]
- knots:
- - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
- - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
- - &id001
- name: Vector_space[0]
- ldim: 2
- kind: undefined
- dtype:
- rational: false
- periodic: [false, false]
- degree: [1, 2]
- multiplicity: [1, 1]
- basis: [B, B]
- knots:
- - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
- - [0.0, 0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0, 1.0]
- - &id002
- name: Vector_space[1]
- ldim: 2
- kind: undefined
- dtype:
- rational: false
- periodic: [false, false]
- degree: [2, 1]
- multiplicity: [1, 1]
- basis: [B, B]
- knots:
- - [0.0, 0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0, 1.0]
- - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
- vector_spaces:
- - name: Vector_space
- kind: hcurl
- components:
- - *id001
- - *id002
-- name: patch_1
- breakpoints:
- - [1.0, 1.25, 1.5, 1.75, 2.0]
- - [0.0, 0.25, 0.5, 0.75, 1.0]
- scalar_spaces:
- - name: Scalar_space
- ldim: 2
- kind: l2
- dtype:
- rational: false
- periodic: [false, false]
- degree: [1, 1]
- multiplicity: [1, 1]
- basis: [B, B]
- knots:
- - [1.0, 1.0, 1.25, 1.5, 1.75, 2.0, 2.0]
- - [0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.0]
-
-```
-The field coefficients are saved to the `HDF5` format in the following manner :
-```bash
-file.h5
- attribute: spaces # name of the aforementioned Yaml file
- static/
- scalar_space_1/
- field_s1_1
- field_s1_2
- ....
- field_s1_n
- vector_space_1_[0]/
- attribute: parent_space # 'vector_space_1'
- field_v1_1_[0]
- attribute: parent_field # 'field_v1_1'
- vector_space_1_[1]/
- attribute: parent_space # 'vector_space_1'
- field_v1_1_[1]
- attribute: parent_field # 'field_v1_1'
- ...
- snapshot_1/
- attribute: t
- attribute: ts
- space_1/
- ...
- space_n/
- ...
- snapshot_n/
-```
-In addition to that, Psydac also features the `PostProcessManager` class to read those files, recreate all the `FemSpace` and `FemField` objects and export them to `VTK`.
-
-## Usage of class `OutputManager`
-
-An instance of the `OutputManager` class is created at the beginning of the simulation, by specifying the following:
-
-1. The name of the YAML file (e.g. `spaces.yaml`) where the information about all FEM spaces will be written, and
-2. The name of the HDF5 file (e.g. `fields.h5`) where the coefficients of all FEM fields will be written.
-
-References to the available FEM spaces are given to the OutputManager object through the `add_spaces(**kwargs)` method, and the corresponding YAML file is created upon calling the method `export_space_info()`. In order to inform the OutputManager object that the next fields to be exported are time-independent, the user should call the `set_static()` method. In the case of time-dependent fields, the user should prepare a time snapshot (which is defined for a specific integer time step `ts` and time value `t`) by calling the method `add_snapshot(t, ts)`. In both cases the fields are exported to the HDF5 file through a call to the method `export_fields(**kwargs)`. Here is a usage example:
-
-```python
-# SymPDE Layer
-# Discretization
-# V0h and V1h are discretized SymPDE Space
-# u0 and u1 are FemFields belonging to either of those spaces
-output_m = OutputManager('spaces.yml', 'fields.h5')
-
-output_m.add_spaces(V0=V0h, V1=V1h)
-output_m.export_space_info() # Writes the space information to Yaml
-
-output_m.set_static() # Tells the object to save in /static/
-output_m.export_fields(u0_static=u0, u1_static=u1) # Actually does the saving
-
-output_m.add_snapshot(t=0., ts=0)
-# The line above tells the object to:
-# 1. create the group snapshot_x with attribute t and ts
-# 2. save in this snapshot
-output_m.export_fields(u0=u0, u1=u1)
-```
-
-## Usage of class `PostProcessManager`
-
-Typically the `PostProcessManager` class is used in a separate post-processing script, which is run after the simulation has finished. In essence it evaluates the FEM fields over a uniform grid (applying the appropriate push-forward operations) and exports the values to a VTK file (or a sequence of files in the case of a time series). An instance of the `PostProcessManager` class is created by specifying the following:
-
-1. The name of the geometry file (in HDF5 format) which defines the geometry or the topological domain from which the geometry is derived.
-2. The name of the YAML file that contains the information about the FEM spaces
-3. The name of the HDF5 file that contains the coefficients of all the FEM fields
-
-In order to export the fields to a VTK file, the user needs to call the method `export_to_vtk(base_name, grid, npts_per_cell, snapshots, fields)`, where:
-1. `base_name` is the base name for the VTK output files.
-2. `grid` is either a user specified evaluation grid or `None`.
-3. `npts_per_cell` specifies the refinement in the case of a uniform grid.
-4. `snapshots` specifies which time snapshots should be extracted from the HDF5 file (`none` in the case of static fields)
-5. `fields` is a tuple of `h5_field_name`.
-
-Here is a usage example:
-
-```python
-# geometry.h5 is where the domain comes from. See PostProcessManager's docstring for more information
-post = PostProcessManager(geometry_file='geometry.h5', space_file='spaces.yml', fields_file='fields.h5')
-
-# See PostProcessManager.export_to_vtk's and TensorFemSpace.eval_fields' docstrings for more information
-post.export_to_vtk('filename_vtk', grid=grid, npts_per_cell=npts_per_cell, snapshots='all', fields = ('u0', 'u1'))
-```
-
-## Further Examples
-Further examples are present in the following files:
-
-* `examples/poisson_3d_target_torus.py`
-* `examples/sample_multipatch_parallel.py`
-* `examples/notebooks/Poisson_non_periodic.ipynb`
-* `psydac/api/tests/test_postprocessing.py`
\ No newline at end of file
diff --git a/docs/psydac-mesh.md b/docs/psydac-mesh.md
deleted file mode 100644
index 20a543c34..000000000
--- a/docs/psydac-mesh.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# Mesh Generation
-
-After installation, the command `psydac-mesh` will be available.
-
-## Example of usage
-
-```bash
-psydac-mesh -n='16,16' -d='3,3' square mesh.h5
-```
From f4ab560bb4c0f5bd338da3b048aa357207202b16 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Mon, 1 Dec 2025 15:49:11 +0100
Subject: [PATCH 23/63] update links in README to documentation
---
README.md | 15 ++++++++-------
docs/source/examples.rst | 2 +-
2 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/README.md b/README.md
index 1c634b6c6..09a6a07e7 100644
--- a/README.md
+++ b/README.md
@@ -38,7 +38,7 @@ PSYDAC requires a certain number of components to be installed on the machine:
The installation instructions depend on the operating system and on the packaging manager used.
It is particularly important to determine the **HDF5 root folder**, as this will be needed to install the [`h5py`](https://docs.h5py.org/en/latest/build.html#source-installation) package in parallel mode.
-Detailed instructions can be found in the [documentation](https://github.com/pyccel/psydac/blob/devel/docs/installation.md).
+Detailed instructions can be found in the [documentation](https://pyccel.github.io/psydac/installation.html).
Once those components are installed, we recommend using [`venv`](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment) to set up a fresh Python virtual environment at a location ``:
```bash
@@ -61,12 +61,12 @@ pip install ./psydac
Here `` is the path to the HDF5 root folder, such that `/lib/` contains the HDF5 dynamic libraries with MPI support.
For an editable install, the `-e/--editable` flag should be provided to the last command above.
-Again, for more details we refer to our [documentation](https://github.com/pyccel/psydac/blob/devel/docs/installation.md).
+Again, for more details we refer to our [documentation](https://pyccel.github.io/psydac/installation.html).
> [!TIP]
> PSYDAC provides the functionality to convert its MPI-parallel matrices and vectors to their [PETSc](https://petsc.org) equivalent, and back.
> This gives the user access to a wide variety of linear solvers and other algorithms.
-> Instructions for installing [PETSc](https://petsc.org) and `petsc4py` can be found in our [documentation](https://github.com/pyccel/psydac/blob/devel/docs/installation.md#optional-petsc-installation).
+> Instructions for installing [PETSc](https://petsc.org) and `petsc4py` can be found in our [documentation](https://pyccel.github.io/psydac/installation.html#id9).
## Running Tests
@@ -103,14 +103,15 @@ The classical installation translates all kernel files to Fortran without user i
## Examples and Tutorials
-A [tutorial](https://pyccel.github.io/IGA-Python/intro.html) on isogeometric analysis, with many example notebooks where various PDEs are solved with PSYDAC, is under construction in the [IGA-Python](https://github.com/pyccel/IGA-Python) repository.
+Our [documentation](https://pyccel.github.io/psydac/examples.html) provides Jupyter notebooks that present many aspects of this library.
+Additional [tutorials](https://pyccel.github.io/IGA-Python/intro.html) on isogeometric analysis, with many example notebooks where various PDEs are solved with PSYDAC, is under construction in the [IGA-Python](https://github.com/pyccel/IGA-Python) repository.
Some other examples can be found [here](https://github.com/pyccel/psydac/blob/devel/examples).
## Library Documentation
-- [Output formats](https://github.com/pyccel/psydac/blob/devel/docs/output.md)
-- [Mesh generation](https://github.com/pyccel/psydac/blob/devel/docs/psydac-mesh.md)
-- [Library reference](https://pyccel.github.io/psydac/)
+- [Output formats](https://pyccel.github.io/psydac/output.html)
+- [Mesh generation](https://pyccel.github.io/psydac/psydac-mesh.html)
+- [Modules](https://pyccel.github.io/psydac/modules.html)
## Contributing
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
index 1db3d852a..02c23698f 100644
--- a/docs/source/examples.rst
+++ b/docs/source/examples.rst
@@ -13,5 +13,5 @@ Examples
examples/Poisson_non_periodic
examples/Helmholtz_non_periodic
- examples/fem_l2_projection
+ examples/fem_L2_projection
\ No newline at end of file
From 89dd5d8d2cea892b1ba51431db4ac6bb6c3f1036 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 2 Dec 2025 14:36:05 +0100
Subject: [PATCH 24/63] move files out of psydac/feec/multipatch
---
.../examples => examples/feec}/h1_source_pbms_conga_2d.py | 0
.../examples => examples/feec}/hcurl_eigen_pbms_conga_2d.py | 0
.../examples => examples/feec}/hcurl_eigen_pbms_dg_2d.py | 0
.../examples => examples/feec}/hcurl_eigen_testcases.py | 0
.../examples => examples/feec}/hcurl_source_pbms_conga_2d.py | 0
.../examples => examples/feec}/hcurl_source_testcase.py | 0
.../examples => examples/feec}/mixed_source_pbms_conga_2d.py | 0
.../multipatch/examples => examples/feec}/ppc_test_cases.py | 0
.../multipatch/examples => examples/feec/tests}/__init__.py | 0
.../feec}/tests/test_feec_maxwell_multipatch_2d.py | 0
.../feec}/tests/test_feec_poisson_multipatch_2d.py | 0
.../examples => examples/feec}/timedomain_maxwell.py | 0
.../feec}/timedomain_maxwell_testcase.py | 0
psydac/feec/multipatch/tests/__init__.py | 5 -----
psydac/feec/{multipatch => }/multipatch_domain_utilities.py | 0
15 files changed, 5 deletions(-)
rename {psydac/feec/multipatch/examples => examples/feec}/h1_source_pbms_conga_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/hcurl_eigen_pbms_conga_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/hcurl_eigen_pbms_dg_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/hcurl_eigen_testcases.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/hcurl_source_pbms_conga_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/hcurl_source_testcase.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/mixed_source_pbms_conga_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/ppc_test_cases.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec/tests}/__init__.py (100%)
rename {psydac/feec/multipatch => examples/feec}/tests/test_feec_maxwell_multipatch_2d.py (100%)
rename {psydac/feec/multipatch => examples/feec}/tests/test_feec_poisson_multipatch_2d.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/timedomain_maxwell.py (100%)
rename {psydac/feec/multipatch/examples => examples/feec}/timedomain_maxwell_testcase.py (100%)
delete mode 100644 psydac/feec/multipatch/tests/__init__.py
rename psydac/feec/{multipatch => }/multipatch_domain_utilities.py (100%)
diff --git a/psydac/feec/multipatch/examples/h1_source_pbms_conga_2d.py b/examples/feec/h1_source_pbms_conga_2d.py
similarity index 100%
rename from psydac/feec/multipatch/examples/h1_source_pbms_conga_2d.py
rename to examples/feec/h1_source_pbms_conga_2d.py
diff --git a/psydac/feec/multipatch/examples/hcurl_eigen_pbms_conga_2d.py b/examples/feec/hcurl_eigen_pbms_conga_2d.py
similarity index 100%
rename from psydac/feec/multipatch/examples/hcurl_eigen_pbms_conga_2d.py
rename to examples/feec/hcurl_eigen_pbms_conga_2d.py
diff --git a/psydac/feec/multipatch/examples/hcurl_eigen_pbms_dg_2d.py b/examples/feec/hcurl_eigen_pbms_dg_2d.py
similarity index 100%
rename from psydac/feec/multipatch/examples/hcurl_eigen_pbms_dg_2d.py
rename to examples/feec/hcurl_eigen_pbms_dg_2d.py
diff --git a/psydac/feec/multipatch/examples/hcurl_eigen_testcases.py b/examples/feec/hcurl_eigen_testcases.py
similarity index 100%
rename from psydac/feec/multipatch/examples/hcurl_eigen_testcases.py
rename to examples/feec/hcurl_eigen_testcases.py
diff --git a/psydac/feec/multipatch/examples/hcurl_source_pbms_conga_2d.py b/examples/feec/hcurl_source_pbms_conga_2d.py
similarity index 100%
rename from psydac/feec/multipatch/examples/hcurl_source_pbms_conga_2d.py
rename to examples/feec/hcurl_source_pbms_conga_2d.py
diff --git a/psydac/feec/multipatch/examples/hcurl_source_testcase.py b/examples/feec/hcurl_source_testcase.py
similarity index 100%
rename from psydac/feec/multipatch/examples/hcurl_source_testcase.py
rename to examples/feec/hcurl_source_testcase.py
diff --git a/psydac/feec/multipatch/examples/mixed_source_pbms_conga_2d.py b/examples/feec/mixed_source_pbms_conga_2d.py
similarity index 100%
rename from psydac/feec/multipatch/examples/mixed_source_pbms_conga_2d.py
rename to examples/feec/mixed_source_pbms_conga_2d.py
diff --git a/psydac/feec/multipatch/examples/ppc_test_cases.py b/examples/feec/ppc_test_cases.py
similarity index 100%
rename from psydac/feec/multipatch/examples/ppc_test_cases.py
rename to examples/feec/ppc_test_cases.py
diff --git a/psydac/feec/multipatch/examples/__init__.py b/examples/feec/tests/__init__.py
similarity index 100%
rename from psydac/feec/multipatch/examples/__init__.py
rename to examples/feec/tests/__init__.py
diff --git a/psydac/feec/multipatch/tests/test_feec_maxwell_multipatch_2d.py b/examples/feec/tests/test_feec_maxwell_multipatch_2d.py
similarity index 100%
rename from psydac/feec/multipatch/tests/test_feec_maxwell_multipatch_2d.py
rename to examples/feec/tests/test_feec_maxwell_multipatch_2d.py
diff --git a/psydac/feec/multipatch/tests/test_feec_poisson_multipatch_2d.py b/examples/feec/tests/test_feec_poisson_multipatch_2d.py
similarity index 100%
rename from psydac/feec/multipatch/tests/test_feec_poisson_multipatch_2d.py
rename to examples/feec/tests/test_feec_poisson_multipatch_2d.py
diff --git a/psydac/feec/multipatch/examples/timedomain_maxwell.py b/examples/feec/timedomain_maxwell.py
similarity index 100%
rename from psydac/feec/multipatch/examples/timedomain_maxwell.py
rename to examples/feec/timedomain_maxwell.py
diff --git a/psydac/feec/multipatch/examples/timedomain_maxwell_testcase.py b/examples/feec/timedomain_maxwell_testcase.py
similarity index 100%
rename from psydac/feec/multipatch/examples/timedomain_maxwell_testcase.py
rename to examples/feec/timedomain_maxwell_testcase.py
diff --git a/psydac/feec/multipatch/tests/__init__.py b/psydac/feec/multipatch/tests/__init__.py
deleted file mode 100644
index 419109b64..000000000
--- a/psydac/feec/multipatch/tests/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
diff --git a/psydac/feec/multipatch/multipatch_domain_utilities.py b/psydac/feec/multipatch_domain_utilities.py
similarity index 100%
rename from psydac/feec/multipatch/multipatch_domain_utilities.py
rename to psydac/feec/multipatch_domain_utilities.py
From a053c7bf9d2aef7f5edfd56b9a65b1148df44836 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 2 Dec 2025 14:50:46 +0100
Subject: [PATCH 25/63] add Poisson and curl-curl EV example
---
examples/feec/h1_source_pbms_conga_2d.py | 119 +++++++--
examples/feec/hcurl_eigen_pbms_conga_2d.py | 137 +++++++---
examples/feec/hcurl_eigen_pbms_dg_2d.py | 197 ++++++--------
examples/feec/hcurl_eigen_testcases.py | 295 ---------------------
4 files changed, 279 insertions(+), 469 deletions(-)
delete mode 100644 examples/feec/hcurl_eigen_testcases.py
diff --git a/examples/feec/h1_source_pbms_conga_2d.py b/examples/feec/h1_source_pbms_conga_2d.py
index e7e0088f7..1a6e43996 100644
--- a/examples/feec/h1_source_pbms_conga_2d.py
+++ b/examples/feec/h1_source_pbms_conga_2d.py
@@ -23,18 +23,19 @@
from sympde.topology import Derham
from psydac.api.discretization import discretize
+from psydac.api.postprocessing import OutputManager, PostProcessManager
+
from psydac.linalg.basic import IdentityOperator
from psydac.linalg.solvers import inverse
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-from psydac.feec.multipatch.examples.ppc_test_cases import get_source_and_solution_h1
-
-from psydac.fem.projectors import get_dual_dofs
-
-from psydac.fem.basic import FemField
-from psydac.api.postprocessing import OutputManager, PostProcessManager
+from psydac.feec.multipatch_domain_utilities import build_multipatch_domain
+from psydac.fem.projectors import get_dual_dofs
+from psydac.fem.basic import FemField
+#==============================================================================
+# Solver for H1 source problems
+#==============================================================================
def solve_h1_source_pbm(
nc=4, deg=4, domain_name='pretzel_f', backend_language=None, source_type='manu_poisson_elliptic',
eta=-10., mu=1., gamma_h=10., plot_dir=None,
@@ -132,9 +133,10 @@ def solve_h1_source_pbm(
# useful for the boundary condition (if present)
pre_A = cP0.T @ (eta * H0 - mu * DG)
+ # System matrix
A = pre_A @ cP0 + gamma_h * JP0
-
+ # source and exact solution
f_scal, u_bc, u_ex = get_source_and_solution_h1(source_type=source_type, eta=eta, mu=mu, domain=domain, domain_name=domain_name,)
df = get_dual_dofs(Vh=V0h, f=f_scal, domain_h=domain_h, backend_language=backend_language)
@@ -234,19 +236,103 @@ def lift_u_bc(u_bc):
if u_ex:
err = u - u_ex
rel_err = np.sqrt(H0.dot_inner(err, err) / H0.dot_inner(u_ex, u_ex))
+ print('relative L2 error = {:.6e}'.format(rel_err))
return rel_err
+#==============================================================================
+# Test sources and exact solutions
+#==============================================================================
+def get_source_and_solution_h1(source_type=None, eta=0, mu=0,
+ domain=None, domain_name=None):
+ """
+ provide source, and exact solutions when available, for:
-if __name__ == '__main__':
+ Find u in H^1, such that
- omega = np.sqrt(170) # source
- eta = -omega**2
- mu=0
- gamma_h = 10
+ A u = f on \\Omega
+ u = u_bc on \\partial \\Omega
+
+ with
+
+ A u := eta * u - mu * div grad u
- source_type = 'manu_poisson_elliptic'
+ see solve_h1_source_pbm()
+ """
+ from sympy import pi, cos, sin, Tuple, exp
+
+ # exact solutions (if available)
+ u_ex = None
+
+ # bc solution: describe the bc on boundary. Inside domain, values should
+ # not matter. Homogeneous bc will be used if None
+ u_bc = None
+
+ # source terms
+ f_scal = None
+
+ # auxiliary term (for more diagnostics)
+ grad_phi = None
+ phi = None
+
+ x, y = domain.coordinates
+
+ if source_type in ['manu_poisson_elliptic']:
+ x0 = 1.5
+ y0 = 1.5
+ s = (x - x0) - (y - y0)
+ t = (x - x0) + (y - y0)
+ a = (1 / 1.9)**2
+ b = (1 / 1.2)**2
+ sigma2 = 0.0121
+ tau = a * s**2 + b * t**2 - 1
+ phi = exp(-tau**2 / (2 * sigma2))
+ dx_tau = 2 * (a * s + b * t)
+ dy_tau = 2 * (-a * s + b * t)
+ dxx_tau = 2 * (a + b)
+ dyy_tau = 2 * (a + b)
+
+ dx_phi = (-tau * dx_tau / sigma2) * phi
+ dy_phi = (-tau * dy_tau / sigma2) * phi
+ grad_phi = Tuple(dx_phi, dy_phi)
+
+ f_scal = -((tau * dx_tau / sigma2)**2 - (tau * dxx_tau + dx_tau**2) / sigma2
+ + (tau * dy_tau / sigma2)**2 - (tau * dyy_tau + dy_tau**2) / sigma2) * phi
+
+ # exact solution of -p'' = f with hom. bc's on pretzel domain
+ if mu == 1 and eta == 0:
+ u_ex = phi
+ else:
+ print('WARNING (54375385643): exact solution not available in this case!')
+
+ if not domain_name in ['pretzel', 'pretzel_f']:
+ # we may have non-hom bc's
+ u_bc = u_ex
+
+ elif source_type == 'manu_poisson_2':
+ f_scal = -4
+ if mu == 1 and eta == 0:
+ u_ex = x**2 + y**2
+ else:
+ raise NotImplementedError
+ u_bc = u_ex
+
+ elif source_type == 'manu_poisson_sincos':
+ u_ex = sin(pi * x) * cos(pi * y)
+ f_scal = (eta + 2 * mu * pi**2) * u_ex
+ u_bc = u_ex
+
+ else:
+ raise ValueError(source_type)
+
+ return f_scal, u_bc, u_ex
+
+if __name__ == '__main__':
+ eta = 0
+ mu=1
+ gamma_h = 10
+ source_type = 'manu_poisson_2'
domain_name = 'pretzel_f'
nc = 4
@@ -256,9 +342,8 @@ def lift_u_bc(u_bc):
solve_h1_source_pbm(
nc=nc, deg=deg,
eta=eta,
- mu=mu, # 1,
+ mu=mu,
domain_name=domain_name,
source_type=source_type,
backend_language='pyccel-gcc',
- plot_dir='./plots/h1_source_pbms_conga_2d/' + run_dir,
- )
\ No newline at end of file
+ )
diff --git a/examples/feec/hcurl_eigen_pbms_conga_2d.py b/examples/feec/hcurl_eigen_pbms_conga_2d.py
index 7de33cb9a..aa38f1737 100644
--- a/examples/feec/hcurl_eigen_pbms_conga_2d.py
+++ b/examples/feec/hcurl_eigen_pbms_conga_2d.py
@@ -11,25 +11,20 @@
from sympde.topology import Derham
-from psydac.api.discretization import discretize
-from psydac.api.settings import PSYDAC_BACKENDS
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-from psydac.feec.multipatch.utilities import time_count
+from psydac.api.discretization import discretize
+from psydac.api.postprocessing import OutputManager, PostProcessManager
+from psydac.linalg.basic import IdentityOperator
+from psydac.linalg.utilities import array_to_psydac
-from scipy.sparse.linalg import spilu, lgmres
-from scipy.sparse.linalg import LinearOperator, eigsh, minres
-from scipy.linalg import norm
-from psydac.linalg.basic import IdentityOperator
-
-from psydac.linalg.utilities import array_to_psydac
from psydac.fem.basic import FemField
-from psydac.feec.multipatch.multipatch_domain_utilities import build_cartesian_multipatch_domain
-
-from psydac.api.postprocessing import OutputManager, PostProcessManager
+from psydac.feec.multipatch_domain_utilities import build_cartesian_multipatch_domain, build_multipatch_domain
+#==============================================================================
+# Solver for curl-curl eigenvalue problems
+#==============================================================================
def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), domain=([0, np.pi], [0, np.pi]), domain_name='refined_square', backend_language='pyccel-gcc', mu=1, nu=0, gamma_h=0,
generalized_pbm=False, sigma=5, nb_eigs_solve=8, nb_eigs_plot=5, skip_eigs_threshold=1e-7,
plot_dir=None):
@@ -68,8 +63,6 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
Directory for the plots
"""
- diags = {}
-
if sigma is None:
raise ValueError('please specify a value for sigma')
@@ -80,9 +73,8 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
print(' domain_name = {}'.format(domain_name))
print(' backend_language = {}'.format(backend_language))
print('---------------------------------------------------------------------------------------------------------')
- t_stamp = time_count()
- print('building symbolic and discrete domain...')
+ print('building symbolic and discrete domain...')
int_x, int_y = domain
if isinstance(ncells, int):
domain = build_multipatch_domain(domain_name=domain_name)
@@ -105,16 +97,13 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
ncells = {patch.name: [ncells[int(patch.name[2])][int(patch.name[4])],
ncells[int(patch.name[2])][int(patch.name[4])]] for patch in domain.interior}
- t_stamp = time_count(t_stamp)
print(' .. discrete domain...')
domain_h = discretize(domain, ncells=ncells) # Vh space
print('building symbolic and discrete derham sequences...')
- t_stamp = time_count()
print(' .. derham sequence...')
derham = Derham(domain, ["H1", "Hcurl", "L2"])
- t_stamp = time_count(t_stamp)
print(' .. discrete derham sequence...')
derham_h = discretize(derham, domain_h, degree=degree)
@@ -122,30 +111,22 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
print('dim(V0h) = {}'.format(V0h.nbasis))
print('dim(V1h) = {}'.format(V1h.nbasis))
print('dim(V2h) = {}'.format(V2h.nbasis))
- diags['ndofs_V0'] = V0h.nbasis
- diags['ndofs_V1'] = V1h.nbasis
- diags['ndofs_V2'] = V2h.nbasis
- t_stamp = time_count(t_stamp)
print('building the discrete operators:')
print('commuting projection operators...')
I1 = IdentityOperator(V1h.coeff_space)
- t_stamp = time_count(t_stamp)
print('Hodge operators...')
# multi-patch (broken) linear operators / matrices
H0, H1, H2 = derham_h.hodge_operators(kind='linop', backend_language=backend_language)
dH0, dH1, dH2 = derham_h.hodge_operators(kind='linop', dual=True, backend_language=backend_language)
- t_stamp = time_count(t_stamp)
print('conforming projection operators...')
# conforming Projections (should take into account the boundary conditions
# of the continuous deRham sequence)
cP0, cP1, cP2 = derham_h.conforming_projectors(kind='linop', hom_bc = True)
-
- t_stamp = time_count(t_stamp)
print('broken differential operators...')
bD0, bD1 = derham_h.derivatives(kind='linop')
@@ -155,7 +136,6 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
# Conga (projection-based) stiffness matrices
if mu != 0:
# curl curl:
- t_stamp = time_count(t_stamp)
print('mu = {}'.format(mu))
print('curl-curl stiffness matrix...')
@@ -168,7 +148,6 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
# jump stabilization in V1h:
if gamma_h != 0 or generalized_pbm:
- t_stamp = time_count(t_stamp)
print('jump stabilization matrix...')
JS = (I1 - cP1).T @ H1 @ (I1 - cP1)
A += gamma_h * JS
@@ -179,11 +158,9 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
else:
B = H1
- t_stamp = time_count(t_stamp)
print('solving matrix eigenproblem...')
all_eigenvalues, all_eigenvectors_transp = get_eigenvalues(nb_eigs_solve, sigma, A.tosparse(), B.tosparse())
# Eigenvalue processing
- t_stamp = time_count(t_stamp)
print('sorting out eigenvalues...')
zero_eigenvalues = []
if skip_eigs_threshold is not None:
@@ -200,15 +177,7 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
eigenvalues = all_eigenvalues
eigenvectors = all_eigenvectors_transp.T
- for k, val in enumerate(eigenvalues):
- diags['eigenvalue_{}'.format(k)] = val # eigenvalues[k]
-
- for k, val in enumerate(zero_eigenvalues):
- diags['skipped eigenvalue_{}'.format(k)] = val
-
- t_stamp = time_count(t_stamp)
print('plotting the eigenmodes...')
-
if plot_dir:
if not os.path.exists(plot_dir):
@@ -249,11 +218,11 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
fields='vh')
PM.close()
- t_stamp = time_count(t_stamp)
-
- return diags, eigenvalues
-
+ return eigenvalues
+#==============================================================================
+# Eigenvalue solver
+#==============================================================================
def get_eigenvalues(nb_eigs, sigma, A_m, M_m):
"""
Compute the eigenvalues of the matrix A close to sigma and right-hand-side M
@@ -270,6 +239,10 @@ def get_eigenvalues(nb_eigs, sigma, A_m, M_m):
Matrix M
"""
+ from scipy.sparse.linalg import spilu, lgmres
+ from scipy.sparse.linalg import LinearOperator, eigsh, minres
+ from scipy.linalg import norm
+
print('----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ')
print(
'computing {0} eigenvalues (and eigenvectors) close to sigma={1} with scipy.sparse.eigsh...'.format(
@@ -327,3 +300,79 @@ def get_eigenvalues(nb_eigs, sigma, A_m, M_m):
print("done: eigenvalues found: " + repr(eigenvalues))
return eigenvalues, eigenvectors
+
+if __name__ == '__main__':
+ # Degree
+ degree = [3, 3]
+
+ # Refined square domain
+ domain_name = 'refined_square'
+ domain = [[0, np.pi], [0, np.pi]]
+ ncells = np.array([[10, 5, 10],
+ [5, 10, 5],
+ [10, 5, 10]])
+
+ # Curved L-shape domain
+ # domain_name = 'curved_L_shape'
+ # domain = [[1, 3], [0, np.pi / 4]] # interval in x- and y-direction
+ # ncells = np.array([[None, 5],
+ # [5, 10]])
+
+ # Jump stabilization parameter
+ gamma_h = 0
+ # solves generalized eigenvalue problem with: B(v,w) = +
+ # <(I-P)v,(I-P)w> in rhs
+ generalized_pbm = True
+
+ # curl-curl operator
+ nu = 0
+ mu = 1
+
+ # reference eigenvalues for validation
+ if domain_name == 'refined_square':
+ assert domain == [[0, np.pi], [0, np.pi]]
+ ref_sigmas = [
+ 1, 1,
+ 2,
+ 4, 4,
+ 5, 5,
+ 8,
+ 9, 9,
+ ]
+ sigma = 5
+ nb_eigs_solve = 10
+ nb_eigs_plot = 10
+ skip_eigs_threshold = 1e-7
+
+ elif domain_name == 'curved_L_shape':
+ # ref eigenvalues from Monique Dauge benchmark page
+ assert domain == [[1, 3], [0, np.pi / 4]]
+ ref_sigmas = [
+ 0.181857115231E+01,
+ 0.349057623279E+01,
+ 0.100656015004E+02,
+ 0.101118862307E+02,
+ 0.124355372484E+02,
+ ]
+ sigma = 7
+ nb_eigs_solve = 5
+ nb_eigs_plot = 5
+ skip_eigs_threshold = 1e-7
+
+ eigenvalues = hcurl_solve_eigen_pbm(
+ ncells=ncells, degree=degree,
+ gamma_h=gamma_h,
+ generalized_pbm=generalized_pbm,
+ nu=nu,
+ mu=mu,
+ sigma=sigma,
+ skip_eigs_threshold=skip_eigs_threshold,
+ nb_eigs_solve=nb_eigs_solve,
+ nb_eigs_plot=nb_eigs_plot,
+ domain_name=domain_name, domain=domain,
+ )
+
+ if ref_sigmas is not None:
+ n_errs = min(len(ref_sigmas), len(eigenvalues))
+ for k in range(n_errs):
+ print('error_{}: '.format(k), abs(eigenvalues[k] - ref_sigmas[k]))
diff --git a/examples/feec/hcurl_eigen_pbms_dg_2d.py b/examples/feec/hcurl_eigen_pbms_dg_2d.py
index 00dd63d87..0566a5804 100644
--- a/examples/feec/hcurl_eigen_pbms_dg_2d.py
+++ b/examples/feec/hcurl_eigen_pbms_dg_2d.py
@@ -9,37 +9,30 @@
SIAM Journal on Numerical Analysis 44 (2006)
"""
import os
-from mpi4py import MPI
-from collections import OrderedDict
-
import numpy as np
-import matplotlib.pyplot
-
-from scipy.sparse.linalg import LinearOperator, eigsh, minres
-from sympde.calculus import grad, dot, curl, cross
+from sympde.calculus import dot, curl, cross
from sympde.calculus import minus, plus
from sympde.topology import VectorFunctionSpace
from sympde.topology import elements_of
from sympde.topology import NormalVector
-from sympde.topology import Square
-from sympde.topology import IdentityMapping, PolarMapping
from sympde.expr.expr import LinearForm, BilinearForm
from sympde.expr.expr import integral
-from sympde.expr.expr import Norm
-from sympde.expr.equation import find, EssentialBC
from psydac.linalg.utilities import array_to_psydac
+
from psydac.fem.basic import FemField
-from psydac.feec.pull_push import pull_2d_hcurl
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-from psydac.feec.multipatch.utilities import time_count
+from psydac.feec.multipatch_domain_utilities import build_multipatch_domain, build_cartesian_multipatch_domain
+
from psydac.api.discretization import discretize
-from psydac.feec.multipatch.multipatch_domain_utilities import build_cartesian_multipatch_domain
from psydac.api.postprocessing import OutputManager, PostProcessManager
+from hcurl_eigen_pbms_conga_2d import get_eigenvalues
+#==============================================================================
+# Solver for curl-curl eigenvalue problems
+#==============================================================================
def hcurl_solve_eigen_pbm_dg(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), domain=([0, np.pi], [0, np.pi]), domain_name='refined_square', backend_language='pyccel-gcc', mu=1, nu=0,
sigma=5, nb_eigs_solve=8, nb_eigs_plot=5, skip_eigs_threshold=1e-7,
plot_dir=None,):
@@ -74,8 +67,6 @@ def hcurl_solve_eigen_pbm_dg(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), d
Directory for the plots
"""
- diags = {}
-
if sigma is None:
raise ValueError('please specify a value for sigma')
@@ -86,7 +77,6 @@ def hcurl_solve_eigen_pbm_dg(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), d
print(' domain_name = {}'.format(domain_name))
print(' backend_language = {}'.format(backend_language))
print('---------------------------------------------------------------------------------------------------------')
- t_stamp = time_count()
print('building symbolic and discrete domain...')
int_x, int_y = domain
@@ -111,13 +101,8 @@ def hcurl_solve_eigen_pbm_dg(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), d
ncells = {patch.name: [ncells[int(patch.name[2])][int(patch.name[4])],
ncells[int(patch.name[2])][int(patch.name[4])]] for patch in domain.interior}
- mappings = OrderedDict([(P.logical_domain, P.mapping)
- for P in domain.interior])
- mappings_list = list(mappings.values())
- t_stamp = time_count(t_stamp)
print(' .. discrete domain...')
-
V = VectorFunctionSpace('V', domain, kind='hcurl')
u, v, F = elements_of(V, names='u, v, F')
@@ -166,17 +151,16 @@ def avr(w): return 0.5 * plus(w) + 0.5 * minus(w)
bh = discretize(b, domain_h, [Vh, Vh])
Bh_m = bh.assemble().tosparse()
- all_eigenvalues_2, all_eigenvectors_transp_2 = get_eigenvalues(
+ all_eigenvalues, all_eigenvectors_transp = get_eigenvalues(
nb_eigs_solve, sigma, Ah_m, Bh_m)
# Eigenvalue processing
- t_stamp = time_count(t_stamp)
print('sorting out eigenvalues...')
zero_eigenvalues = []
if skip_eigs_threshold is not None:
eigenvalues = []
eigenvectors = []
- for val, vect in zip(all_eigenvalues_2, all_eigenvectors_transp_2.T):
+ for val, vect in zip(all_eigenvalues, all_eigenvectors_transp.T):
if abs(val) < skip_eigs_threshold:
zero_eigenvalues.append(val)
# we skip the eigenvector
@@ -184,18 +168,10 @@ def avr(w): return 0.5 * plus(w) + 0.5 * minus(w)
eigenvalues.append(val)
eigenvectors.append(vect)
else:
- eigenvalues = all_eigenvalues_2
- eigenvectors = all_eigenvectors_transp_2.T
- diags['DG'] = True
- for k, val in enumerate(eigenvalues):
- diags['eigenvalue2_{}'.format(k)] = val # eigenvalues[k]
-
- for k, val in enumerate(zero_eigenvalues):
- diags['skipped eigenvalue2_{}'.format(k)] = val
+ eigenvalues = all_eigenvalues
+ eigenvectors = all_eigenvectors_transp.T
- t_stamp = time_count(t_stamp)
print('plotting the eigenmodes...')
-
if plot_dir:
if not os.path.exists(plot_dir):
os.makedirs(plot_dir)
@@ -232,81 +208,76 @@ def avr(w): return 0.5 * plus(w) + 0.5 * minus(w)
fields='vh')
PM.close()
- t_stamp = time_count(t_stamp)
-
- return diags, eigenvalues
-
+ return eigenvalues
+
+if __name__ == '__main__':
+ # Degree
+ degree = [3, 3]
+
+ # Refined square domain
+ domain_name = 'refined_square'
+ domain = [[0, np.pi], [0, np.pi]]
+ ncells = np.array([[10, 5, 10],
+ [5, 10, 5],
+ [10, 5, 10]])
+
+ # Curved L-shape domain
+ # domain_name = 'curved_L_shape'
+ # domain = [[1, 3], [0, np.pi / 4]] # interval in x- and y-direction
+ # ncells = np.array([[None, 5],
+ # [5, 10]])
+
+ # solves generalized eigenvalue problem with: B(v,w) = +
+ # <(I-P)v,(I-P)w> in rhs
+ generalized_pbm = True
+
+ # curl-curl operator
+ nu = 0
+ mu = 1
+
+ # reference eigenvalues for validation
+ if domain_name == 'refined_square':
+ assert domain == [[0, np.pi], [0, np.pi]]
+ ref_sigmas = [
+ 1, 1,
+ 2,
+ 4, 4,
+ 5, 5,
+ 8,
+ 9, 9,
+ ]
+ sigma = 5
+ nb_eigs_solve = 10
+ nb_eigs_plot = 10
+ skip_eigs_threshold = 1e-7
-def get_eigenvalues(nb_eigs, sigma, A_m, M_m):
- """
- Compute the eigenvalues of the matrix A close to sigma and right-hand-side M
-
- Parameters
- ----------
- nb_eigs : int
- Number of eigenvalues to compute
- sigma : float
- Value close to which the eigenvalues are computed
- A_m : sparse matrix
- Matrix A
- M_m : sparse matrix
- Matrix M
- """
-
- print('----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ')
- print(
- 'computing {0} eigenvalues (and eigenvectors) close to sigma={1} with scipy.sparse.eigsh...'.format(
- nb_eigs,
- sigma))
- mode = 'normal'
- which = 'LM'
- # from eigsh docstring:
- # ncv = number of Lanczos vectors generated ncv must be greater than k and smaller than n;
- # it is recommended that ncv > 2*k. Default: min(n, max(2*k + 1, 20))
- ncv = 4 * nb_eigs
- print('A_m.shape = ', A_m.shape)
- try_lgmres = True
- max_shape_splu = 24000 # OK for nc=20, deg=6 on pretzel_f
- if A_m.shape[0] < max_shape_splu:
- print('(via sparse LU decomposition)')
- OPinv = None
- tol_eigsh = 0
- else:
-
- OP_m = A_m - sigma * M_m
- tol_eigsh = 1e-7
- if try_lgmres:
- print(
- '(via SPILU-preconditioned LGMRES iterative solver for A_m - sigma*M1_m)')
- OP_spilu = spilu(OP_m, fill_factor=15, drop_tol=5e-5)
- preconditioner = LinearOperator(
- OP_m.shape, lambda x: OP_spilu.solve(x))
- tol = tol_eigsh
- OPinv = LinearOperator(
- matvec=lambda v: lgmres(OP_m, v, x0=None, tol=tol, atol=tol, M=preconditioner,
- callback=lambda x: print(
- 'cg -- residual = ', norm(OP_m.dot(x) - v))
- )[0],
- shape=M_m.shape,
- dtype=M_m.dtype
- )
-
- else:
- # from https://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.linalg.eigsh.html:
- # the user can supply the matrix or operator OPinv, which gives x = OPinv @ b = [A - sigma * M]^-1 @ b.
- # > here, minres: MINimum RESidual iteration to solve Ax=b
- # suggested in https://github.com/scipy/scipy/issues/4170
- print('(with minres iterative solver for A_m - sigma*M1_m)')
- OPinv = LinearOperator(
- matvec=lambda v: minres(
- OP_m,
- v,
- tol=1e-10)[0],
- shape=M_m.shape,
- dtype=M_m.dtype)
-
- eigenvalues, eigenvectors = eigsh(
- A_m, k=nb_eigs, M=M_m, sigma=sigma, mode=mode, which=which, ncv=ncv, tol=tol_eigsh, OPinv=OPinv)
-
- print("done: eigenvalues found: " + repr(eigenvalues))
- return eigenvalues, eigenvectors
+ elif domain_name == 'curved_L_shape':
+ # ref eigenvalues from Monique Dauge benchmark page
+ assert domain == [[1, 3], [0, np.pi / 4]]
+ ref_sigmas = [
+ 0.181857115231E+01,
+ 0.349057623279E+01,
+ 0.100656015004E+02,
+ 0.101118862307E+02,
+ 0.124355372484E+02,
+ ]
+ sigma = 7
+ nb_eigs_solve = 5
+ nb_eigs_plot = 5
+ skip_eigs_threshold = 1e-7
+
+ eigenvalues = hcurl_solve_eigen_pbm_dg(
+ ncells=ncells, degree=degree,
+ nu=nu,
+ mu=mu,
+ sigma=sigma,
+ skip_eigs_threshold=skip_eigs_threshold,
+ nb_eigs_solve=nb_eigs_solve,
+ nb_eigs_plot=nb_eigs_plot,
+ domain_name=domain_name, domain=domain,
+ )
+
+ if ref_sigmas is not None:
+ n_errs = min(len(ref_sigmas), len(eigenvalues))
+ for k in range(n_errs):
+ print('error_{}: '.format(k), abs(eigenvalues[k] - ref_sigmas[k]))
diff --git a/examples/feec/hcurl_eigen_testcases.py b/examples/feec/hcurl_eigen_testcases.py
deleted file mode 100644
index 1cebbe9bc..000000000
--- a/examples/feec/hcurl_eigen_testcases.py
+++ /dev/null
@@ -1,295 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-"""
- Runner script for solving the eigenvalue problem for the H(curl) operator for different discretizations.
-"""
-
-import os
-import numpy as np
-
-from psydac.feec.multipatch.examples.hcurl_eigen_pbms_conga_2d import hcurl_solve_eigen_pbm
-from psydac.feec.multipatch.examples.hcurl_eigen_pbms_dg_2d import hcurl_solve_eigen_pbm_dg
-from psydac.feec.multipatch.utilities import time_count, get_run_dir, get_plot_dir, get_mat_dir, get_sol_dir, diag_fn
-from psydac.feec.multipatch.utils_conga_2d import write_diags_to_file
-from psydac.api.postprocessing import OutputManager, PostProcessManager
-
-t_stamp_full = time_count()
-
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-#
-# test-case and numerical parameters:
-method = 'feec'
-# method = 'dg'
-
-operator = 'curl-curl'
-degree = [3, 3] # shared across all patches
-
-# pretzel_f (18 patches)
-# domain_name = 'pretzel_f'
-# ncells = np.array([8, 8, 16, 16, 8, 4, 4, 4, 4, 4, 2, 2, 4, 16, 16, 8, 2, 2, 2])
-# ncells = np.array([4 for _ in range(18)])
-
-# domain onlyneeded for square like domains
-# domain = [[0, np.pi], [0, np.pi]] # interval in x- and y-direction
-
-# refined square domain
-# domain_name = 'refined_square'
-# the shape of ncells gives the shape of the domain,
-# while the entries describe the isometric number of cells in each patch
-# 2x2 = 4 patches
-# ncells = np.array([[8, 4],
-# [4, 4]])
-# 3x3= 9 patches
-# ncells = np.array([[4, 2, 4],
-# [2, 4, 2],
-# [4, 2, 4]])
-
-# L-shaped domain
-# domain_name = 'square_L_shape'
-# domain=[[-1, 1],[-1, 1]] # interval in x- and y-direction
-
-# The None indicates the patches to leave out
-# 2x2 = 4 patches
-# ncells = np.array([[None, 2],
-# [2, 2]])
-# 4x4 = 16 patches
-# ncells = np.array([[None, None, 4, 2],
-# [None, None, 8, 4],
-# [4, 8, 8, 4],
-# [2, 4, 4, 2]])
-# 8x8 = 64 patches
-# ncells = np.array([[None, None, None, None, 2, 2, 2,1 2],
-# [None, None, None, None, 2, 2, 2, 2],
-# [None, None, None, None, 2, 2, 2, 2],
-# [None, None, None, None, 4, 4, 2, 2],
-# [2, 2, 2, 4, 8, 4, 2, 2],
-# [2, 2, 2, 4, 4, 4, 2, 2],
-# [2, 2, 2, 2, 2, 2, 2, 2],
-# [2, 2, 2, 2, 2, 2, 2, 2]])
-
-# Curved L-shape domain
-domain_name = 'curved_L_shape'
-domain = [[1, 3], [0, np.pi / 4]] # interval in x- and y-direction
-
-
-ncells = np.array([[None, 5],
- [5, 10]])
-# ncells = 5
-
-# ncells = np.array([[None, None, 2, 2],
-# [None, None, 4, 2],
-# [ 2, 4, 8, 4],
-# [ 2, 2, 4, 4]])
-
-# ncells = np.array([[None, None, None, 2, 2, 2],
-# [None, None, None, 4, 4, 2],
-# [None, None, None, 8, 4, 2],
-# [2, 4, 8, 8, 4, 2],
-# [2, 4, 4, 4, 4, 2],
-# [2, 2, 2, 2, 2, 2]])
-
-# ncells = np.array([[None, None, None, None, 2, 2, 2, 2],
-# [None, None, None, None, 4, 4, 4, 2],
-# [None, None, None, None, 8, 8, 4, 2],
-# [None, None, None, None, 16, 8, 4, 2],
-# [2, 4, 8, 16, 16, 8, 4, 2],
-# [2, 4, 8, 8, 8, 8, 4, 2],
-# [2, 4, 4, 4, 4, 4, 4, 2],
-# [2, 2, 2, 2, 2, 2, 2, 2]])
-
-# all kinds of different square refinements and constructions are possible, eg
-# doubly connected domains
-# ncells = np.array([[4, 2, 2, 4],
-# [2, None, None, 2],
-# [2, None, None, 2],
-# [4, 2, 2, 4]])
-
-gamma_h = 0
-# solves generalized eigenvalue problem with: B(v,w) = +
-# <(I-P)v,(I-P)w> in rhs
-generalized_pbm = True
-
-if operator == 'curl-curl':
- nu = 0
- mu = 1
-else:
- raise ValueError(operator)
-
-case_dir = 'eigenpbm_' + operator + '_' + method
-ref_case_dir = case_dir
-
-ref_sigmas = None
-sigma = None
-nb_eigs_solve = None
-nb_eigs_plot = None
-skip_eigs_threshold = None
-diags = None
-eigenvalues = None
-
-if domain_name == 'refined_square':
- assert domain == [[0, np.pi], [0, np.pi]]
- ref_sigmas = [
- 1, 1,
- 2,
- 4, 4,
- 5, 5,
- 8,
- 9, 9,
- ]
- sigma = 5
- nb_eigs_solve = 10
- nb_eigs_plot = 10
- skip_eigs_threshold = 1e-7
-
-elif domain_name == 'square_L_shape':
- assert domain == [[-1, 1], [-1, 1]]
- ref_sigmas = [
- 1.47562182408,
- 3.53403136678,
- 9.86960440109,
- 9.86960440109,
- 11.3894793979,
- ]
- sigma = 6
- nb_eigs_solve = 5
- nb_eigs_plot = 5
- skip_eigs_threshold = 1e-7
-
-elif domain_name == 'curved_L_shape':
- # ref eigenvalues from Monique Dauge benchmark page
- assert domain == [[1, 3], [0, np.pi / 4]]
- ref_sigmas = [
- 0.181857115231E+01,
- 0.349057623279E+01,
- 0.100656015004E+02,
- 0.101118862307E+02,
- 0.124355372484E+02,
- ]
- sigma = 7
- nb_eigs_solve = 7
- nb_eigs_plot = 7
- skip_eigs_threshold = 1e-7
-
-elif domain_name in ['pretzel_f']:
- if operator == 'curl-curl':
- # ref sigmas computed with nc=20 and deg=6 and gamma = 0 (and
- # generalized ev-pbm)
- ref_sigmas = [
- 0.1795339843,
- 0.1992261261,
- 0.6992717244,
- 0.8709410438,
- 1.1945106937,
- 1.2546992683,
- ]
-
- sigma = .8
- nb_eigs_solve = 10
- nb_eigs_plot = 5
- skip_eigs_threshold = 1e-7
-
-#
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-common_diag_filename = './' + case_dir + '_diags.txt'
-
-
-params = {
- 'domain_name': domain_name,
- 'domain': domain,
- 'operator': operator,
- 'mu': mu,
- 'nu': nu,
- 'ncells': ncells,
- 'degree': degree,
- 'gamma_h': gamma_h,
- 'generalized_pbm': generalized_pbm,
- 'nb_eigs_solve': nb_eigs_solve,
- 'skip_eigs_threshold': skip_eigs_threshold
-}
-
-print(params)
-
-# backend_language = 'numba'
-backend_language = 'pyccel-gcc'
-
-dims = 1 if isinstance(ncells, int) else ncells.shape
-sz = 1 if isinstance(ncells, int) else ncells[ncells != None].sum()
-
-# get_run_dir(domain_name, nc, deg)
-run_dir = domain_name + str(dims) + 'patches_' + 'size_{}'.format(sz)
-plot_dir = get_plot_dir(case_dir, run_dir)
-diag_filename = plot_dir + '/' + diag_fn()
-common_diag_filename = './' + case_dir + '_diags.txt'
-
-
-print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
-print(' Calling hcurl_solve_eigen_pbm() with params = {}'.format(params))
-print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
-
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-# calling eigenpbm solver for:
-#
-# find lambda in R and u in H0(curl), such that
-# A u = lambda * u on \Omega
-# with
-#
-# A u := mu * curl curl u - nu * grad div u
-#
-# note:
-# - we look for nb_eigs_solve eigenvalues close to sigma (skip zero eigenvalues if skip_zero_eigs==True)
-# - we plot nb_eigs_plot eigenvectors
-if method == 'feec':
- diags, eigenvalues = hcurl_solve_eigen_pbm(
- ncells=ncells, degree=degree,
- gamma_h=gamma_h,
- generalized_pbm=generalized_pbm,
- nu=nu,
- mu=mu,
- sigma=sigma,
- skip_eigs_threshold=skip_eigs_threshold,
- nb_eigs_solve=nb_eigs_solve,
- nb_eigs_plot=nb_eigs_plot,
- domain_name=domain_name, domain=domain,
- backend_language=backend_language,
- plot_dir=plot_dir,
- )
-
-elif method == 'dg':
- diags, eigenvalues = hcurl_solve_eigen_pbm_dg(
- ncells=ncells, degree=degree,
- nu=nu,
- mu=mu,
- sigma=sigma,
- skip_eigs_threshold=skip_eigs_threshold,
- nb_eigs_solve=nb_eigs_solve,
- nb_eigs_plot=nb_eigs_plot,
- domain_name=domain_name, domain=domain,
- backend_language=backend_language,
- plot_dir=plot_dir,
- )
-
-if ref_sigmas is not None:
- errors = []
- n_errs = min(len(ref_sigmas), len(eigenvalues))
- for k in range(n_errs):
- diags['error_{}'.format(k)] = abs(eigenvalues[k] - ref_sigmas[k])
-#
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-write_diags_to_file(
- diags,
- script_filename=__file__,
- diag_filename=diag_filename,
- params=params)
-write_diags_to_file(
- diags,
- script_filename=__file__,
- diag_filename=common_diag_filename,
- params=params)
-
-# PM = PostProcessManager(geometry_file=, )
-time_count(t_stamp_full, msg='full program')
From 15fd15688fedb68bb319cdcb1388aaa7984bfb2d Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 2 Dec 2025 16:43:00 +0100
Subject: [PATCH 26/63] rename md so it shows up on github
---
examples/performance/{matrix_assembly_speed_log.md => README.md} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename examples/performance/{matrix_assembly_speed_log.md => README.md} (100%)
diff --git a/examples/performance/matrix_assembly_speed_log.md b/examples/performance/README.md
similarity index 100%
rename from examples/performance/matrix_assembly_speed_log.md
rename to examples/performance/README.md
From ae18af624cc668dff92194a0ab9d28f3f1ae59e0 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Tue, 2 Dec 2025 16:45:21 +0100
Subject: [PATCH 27/63] add time-harmonic Maxwell
---
examples/feec/hcurl_eigen_pbms_conga_2d.py | 1 +
examples/feec/hcurl_source_pbms_conga_2d.py | 184 +++++---
examples/feec/hcurl_source_testcase.py | 145 -------
examples/feec/mixed_source_pbms_conga_2d.py | 455 --------------------
4 files changed, 120 insertions(+), 665 deletions(-)
delete mode 100644 examples/feec/hcurl_source_testcase.py
delete mode 100644 examples/feec/mixed_source_pbms_conga_2d.py
diff --git a/examples/feec/hcurl_eigen_pbms_conga_2d.py b/examples/feec/hcurl_eigen_pbms_conga_2d.py
index aa38f1737..e790ba852 100644
--- a/examples/feec/hcurl_eigen_pbms_conga_2d.py
+++ b/examples/feec/hcurl_eigen_pbms_conga_2d.py
@@ -160,6 +160,7 @@ def hcurl_solve_eigen_pbm(ncells=np.array([[8, 4], [4, 4]]), degree=(3, 3), doma
print('solving matrix eigenproblem...')
all_eigenvalues, all_eigenvectors_transp = get_eigenvalues(nb_eigs_solve, sigma, A.tosparse(), B.tosparse())
+
# Eigenvalue processing
print('sorting out eigenvalues...')
zero_eigenvalues = []
diff --git a/examples/feec/hcurl_source_pbms_conga_2d.py b/examples/feec/hcurl_source_pbms_conga_2d.py
index 41a4160a5..cfc31e331 100644
--- a/examples/feec/hcurl_source_pbms_conga_2d.py
+++ b/examples/feec/hcurl_source_pbms_conga_2d.py
@@ -23,24 +23,23 @@
from sympde.topology import Derham
-
from psydac.api.discretization import discretize
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-from psydac.feec.multipatch.examples.ppc_test_cases import get_source_and_solution_hcurl
-from psydac.feec.multipatch.utils_conga_2d import P1_phys
-from psydac.feec.multipatch.utilities import time_count
-# from psydac.linalg.utilities import array_to_psydac
-from psydac.fem.basic import FemField
from psydac.api.postprocessing import OutputManager, PostProcessManager
+from psydac.feec.multipatch_domain_utilities import build_multipatch_domain
+
+from psydac.fem.basic import FemField
+from psydac.fem.projectors import get_dual_dofs
+
from psydac.linalg.basic import IdentityOperator
-from psydac.fem.projectors import get_dual_dofs
from psydac.linalg.solvers import inverse
-
+#==============================================================================
+# Solver for H(curl) source problems
+#==============================================================================
def solve_hcurl_source_pbm(
- nc=4, deg=4, domain_name='pretzel_f', backend_language=None, source_proj='tilde_Pi', source_type='manu_J',
- eta=-10., mu=1., nu=1., gamma_h=10.,
+ nc=4, deg=4, domain_name='pretzel_f', backend_language=None, source_type='manu_maxwell_inhom',
+ eta=-10., mu=1., nu=0., gamma_h=10.,
project_sol=True, plot_dir=None):
"""
solver for the problem: find u in H(curl), such that
@@ -71,12 +70,8 @@ def solve_hcurl_source_pbm(
:param nc: nb of cells per dimension, in each patch
:param deg: coordinate degree in each patch
:param gamma_h: jump penalization parameter
- :param source_proj: approximation operator (in V1h) for the source, possible values are
- - 'tilde_Pi': dual commuting projection, an L2 projection filtered by the adjoint conforming projection)
:param source_type: must be implemented in get_source_and_solution()
"""
- diags = {}
-
degree = [deg, deg]
@@ -85,45 +80,34 @@ def solve_hcurl_source_pbm(
print(' ncells = {}'.format(nc))
print(' degree = {}'.format(degree))
print(' domain_name = {}'.format(domain_name))
- print(' source_proj = {}'.format(source_proj))
print(' backend_language = {}'.format(backend_language))
print('---------------------------------------------------------------------------------------------------------')
print()
print(' -- building discrete spaces and operators --')
- t_stamp = time_count()
print(' .. multi-patch domain...')
domain = build_multipatch_domain(domain_name=domain_name)
- # mappings = OrderedDict([(P.logical_domain, P.mapping)
- # for P in domain.interior])
- # mappings_list = list(mappings.values())
-
+
if isinstance(nc, int):
ncells = [nc, nc]
else:
ncells = {patch.name: [nc[i], nc[i]]
for (i, patch) in enumerate(domain.interior)}
-
- t_stamp = time_count(t_stamp)
print(' .. derham sequence...')
derham = Derham(domain, ["H1", "Hcurl", "L2"])
- t_stamp = time_count(t_stamp)
print(' .. discrete domain...')
domain_h = discretize(domain, ncells=ncells)
- t_stamp = time_count(t_stamp)
print(' .. discrete derham sequence...')
derham_h = discretize(derham, domain_h, degree=degree)
- t_stamp = time_count(t_stamp)
print(' .. commuting projection operators...')
nquads = [10 * (d + 1) for d in degree]
P0, P1, P2 = derham_h.projectors(nquads=nquads)
- t_stamp = time_count(t_stamp)
print(' .. multi-patch spaces...')
V0h, V1h, V2h = derham_h.spaces
mappings = derham_h.callable_mapping
@@ -131,52 +115,39 @@ def solve_hcurl_source_pbm(
print('dim(V0h) = {}'.format(V0h.nbasis))
print('dim(V1h) = {}'.format(V1h.nbasis))
print('dim(V2h) = {}'.format(V2h.nbasis))
- diags['ndofs_V0'] = V0h.nbasis
- diags['ndofs_V1'] = V1h.nbasis
- diags['ndofs_V2'] = V2h.nbasis
- t_stamp = time_count(t_stamp)
+
print(' .. Id operator and matrix...')
I1 = IdentityOperator(V1h.coeff_space)
- t_stamp = time_count(t_stamp)
print(' .. Hodge operators...')
# multi-patch (broken) linear operators / matrices
# other option: define as Hodge Operators:
H0, H1, H2 = derham_h.hodge_operators(kind='linop', backend_language=backend_language)
dH0, dH1, dH2 = derham_h.hodge_operators(kind='linop', dual=True, backend_language=backend_language)
-
- t_stamp = time_count(t_stamp)
print(' .. conforming Projection operators...')
# conforming Projections (should take into account the boundary conditions
# of the continuous deRham sequence)
cP0, cP1, cP2 = derham_h.conforming_projectors(kind='linop', hom_bc = True)
-
- t_stamp = time_count(t_stamp)
print(' .. broken differential operators...')
# broken (patch-wise) differential operators
bD0, bD1 = derham_h.derivatives(kind='linop')
# Conga (projection-based) stiffness matrices
# curl curl:
- t_stamp = time_count(t_stamp)
print(' .. curl-curl stiffness matrix...')
pre_CC = bD1.T @ H2 @ bD1
# grad div:
- t_stamp = time_count(t_stamp)
print(' .. grad-div stiffness matrix...')
pre_GD = - H1 @ bD0 @ cP0 @ dH0 @ cP0.T @ bD0.T @ H1
# jump stabilization:
- t_stamp = time_count(t_stamp)
print(' .. jump stabilization matrix...')
JS = (I1 - cP1).T @ H1 @ (I1 - cP1)
-
- t_stamp = time_count(t_stamp)
print(' .. full operator matrix...')
print('eta = {}'.format(eta))
print('mu = {}'.format(mu))
@@ -191,27 +162,21 @@ def solve_hcurl_source_pbm(
A = pre_A @ cP1 + gamma_h * JS
- t_stamp = time_count(t_stamp)
print()
print(' -- getting source --')
- f_vect, u_bc, u_ex, curl_u_ex, div_u_ex = get_source_and_solution_hcurl(source_type=source_type, eta=eta, mu=mu, domain=domain, domain_name=domain_name,)
+ f_vect, u_bc, u_ex = get_source_and_solution_hcurl(source_type=source_type, eta=eta, mu=mu, domain=domain, domain_name=domain_name,)
# compute approximate source f_h
- t_stamp = time_count(t_stamp)
-
# f_h = L2 projection of f_vect, with filtering if tilde_Pi
- print(' .. projecting the source with ' + source_proj +' projection...')
-
tilde_f = get_dual_dofs(Vh=V1h, f=f_vect, domain_h=domain_h, backend_language=backend_language)
- if source_proj == 'tilde_Pi':
- print(' .. filtering the discrete source with P1.T ...')
- tilde_f = cP1.T @ tilde_f
+ print(' .. filtering the discrete source with P1.T ...')
+ tilde_f = cP1.T @ tilde_f
def lift_u_bc(u_bc):
if u_bc is not None:
- ubc = P1_phys(u_bc, P1, domain).coeffs
+ ubc = P1(u_bc).coeffs
ubc -= cP1.dot(ubc)
else:
@@ -219,40 +184,32 @@ def lift_u_bc(u_bc):
return ubc
-
ubc = lift_u_bc(u_bc)
if ubc is not None:
# modified source for the homogeneous pbm
- t_stamp = time_count(t_stamp)
print(' .. modifying the source with lifted bc solution...')
tilde_f -= pre_A.dot(ubc)
# direct solve with scipy spsolve
- t_stamp = time_count(t_stamp)
print('solving source problem with conjugate gradient...')
solver = inverse(A, solver='cg', tol=1e-8)
u = solver.solve(tilde_f)
# project the homogeneous solution on the conforming problem space
- t_stamp = time_count(t_stamp)
if project_sol:
print(' .. projecting the homogeneous solution on the conforming problem space...')
u = cP1.dot(u)
if ubc is not None:
# adding the lifted boundary condition
- t_stamp = time_count(t_stamp)
print(' .. adding the lifted boundary condition...')
u += ubc
uh = FemField(V1h, coeffs=u)
- #need cp1 here?
f = dH1.dot(tilde_f)
jh = FemField(V1h, coeffs=f)
- t_stamp = time_count(t_stamp)
-
print(' -- plots and diagnostics --')
if plot_dir:
if not os.path.exists(plot_dir):
@@ -287,15 +244,112 @@ def lift_u_bc(u_bc):
PM.close()
- time_count(t_stamp)
if u_ex:
- u_ex_p = P1_phys(u_ex, P1, domain).coeffs
+ u_ex_p = P1(u_ex).coeffs
err = u_ex_p - u
- print(err.inner(H1.dot(err)))
l2_error = np.sqrt( H1.dot_inner(err, err) / H1.dot_inner(u_ex_p, u_ex_p))
- print(l2_error)
- diags['err'] = l2_error
+ print("L2 error: ", l2_error)
+
+ return l2_error
+
+#==============================================================================
+# Test sources and exact solutions
+#==============================================================================
+def get_source_and_solution_hcurl(
+ source_type=None, eta=0, mu=0, nu=0,
+ domain=None, domain_name=None):
+ """
+ provide source, and exact solutions when available, for:
+
+ Find u in H(curl) such that
+
+ A u = f on \\Omega
+ n x u = n x u_bc on \\partial \\Omega
+
+ with
+
+ A u := eta * u + mu * curl curl u - nu * grad div u
+
+ see solve_hcurl_source_pbm()
+ """
+ from sympy import pi, cos, sin, Tuple, exp
+
+ # exact solutions (if available)
+ u_ex = None
+
+ # bc solution: describe the bc on boundary. Inside domain, values should
+ # not matter. Homogeneous bc will be used if None
+ u_bc = None
+
+ # source terms
+ f_vect = None
+
+ # auxiliary term (for more diagnostics)
+ grad_phi = None
+ phi = None
+
+ x, y = domain.coordinates
+
+ if source_type == 'manu_maxwell_inhom':
+ # used for Maxwell equation with manufactured solution
+ f_vect = Tuple(eta * sin(pi * y) - pi**2 * sin(pi * y) * cos(pi * x) + pi**2 * sin(pi * y),
+ eta * sin(pi * x) * cos(pi * y) + pi**2 * sin(pi * x) * cos(pi * y))
+ if nu == 0:
+ u_ex = Tuple(sin(pi * y), sin(pi * x) * cos(pi * y))
+ curl_u_ex = pi * (cos(pi * x) * cos(pi * y) - cos(pi * y))
+ div_u_ex = -pi * sin(pi * x) * sin(pi * y)
+ else:
+ raise NotImplementedError
+ u_bc = u_ex
+
+ elif source_type == 'elliptic_J':
+ # no manufactured solution for Maxwell pbm
+ x0 = 1.5
+ y0 = 1.5
+ s = (x - x0) - (y - y0)
+ t = (x - x0) + (y - y0)
+ a = (1 / 1.9)**2
+ b = (1 / 1.2)**2
+ sigma2 = 0.0121
+ tau = a * s**2 + b * t**2 - 1
+ phi = exp(-tau**2 / (2 * sigma2))
+ dx_tau = 2 * (a * s + b * t)
+ dy_tau = 2 * (-a * s + b * t)
+
+ f_x = dy_tau * phi
+ f_y = - dx_tau * phi
+ f_vect = Tuple(f_x, f_y)
+
+ else:
+ raise ValueError(source_type)
+
+ from sympy import lambdify
+ u_bc_x = lambdify(domain.coordinates, u_bc[0])
+ u_bc_y = lambdify(domain.coordinates, u_bc[1])
+
+ u_ex_x = lambdify(domain.coordinates, u_ex[0])
+ u_ex_y = lambdify(domain.coordinates, u_ex[1])
+
+ return f_vect, [u_bc_x, u_bc_y], [u_ex_x, u_ex_y]
+
+if __name__ == '__main__':
+ nc = 5
+ deg = 3
+
+ source_type = 'manu_maxwell_inhom'
+ domain_name = 'pretzel_f'
+
+ omega = np.pi
+ eta = -omega**2 # source
- return diags
+ err = solve_hcurl_source_pbm(
+ nc=nc, deg=deg,
+ eta=eta,
+ nu=0,
+ mu=1,
+ domain_name=domain_name,
+ source_type=source_type,
+ backend_language='pyccel-gcc')
+
\ No newline at end of file
diff --git a/examples/feec/hcurl_source_testcase.py b/examples/feec/hcurl_source_testcase.py
deleted file mode 100644
index dc60f2102..000000000
--- a/examples/feec/hcurl_source_testcase.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-"""
- Runner script for solving the H(curl) source problem.
-"""
-
-import os
-import numpy as np
-from psydac.feec.multipatch.examples.hcurl_source_pbms_conga_2d import solve_hcurl_source_pbm
-
-from psydac.feec.multipatch.utilities import time_count, FEM_sol_fn, get_run_dir, get_plot_dir, get_mat_dir, get_sol_dir, diag_fn
-from psydac.feec.multipatch.utils_conga_2d import write_diags_to_file
-
-t_stamp_full = time_count()
-
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-#
-# main test-cases used for the ppc paper:
-
-# test_case = 'maxwell_hom_eta=50' # used in paper
-#test_case = 'maxwell_hom_eta=170' # used in paper
-test_case = 'maxwell_inhom' # used in paper
-
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-# numerical parameters:
-domain_name = 'pretzel_f'
-# domain_name = 'curved_L_shape'
-
-# currently only 'tilde_Pi' is implemented
-source_proj = 'tilde_Pi'
-
-# nc_s = [np.array([16 for _ in range(18)])]
-
-# corners in pretzel [2, 2, 2*,2*, 2, 1, 1, 1, 1, 1, 0, 0, 1, 2*, 2*, 2, 0, 0 ]
-nc_s = [np.array([16, 16, 16, 16, 16, 8, 8, 8, 8,
- 8, 8, 8, 8, 16, 16, 16, 8, 8])]
-# nc_s = [10]
-# refine handles only
-# nc_s = [np.array([16, 16, 16, 16, 16, 8, 8, 8, 8, 4, 2, 2, 4, 16, 16, 16, 2, 2])]
-
-# refine source
-# nc_s = [np.array([32, 8, 8, 32, 32, 32, 32, 8, 8, 8, 8, 8, 8, 32, 8, 8, 8, 8])]
-
-deg_s = [3]
-
-if test_case == 'maxwell_hom_eta=50':
- homogeneous = True
- source_type = 'elliptic_J'
- omega = np.sqrt(50) # source time pulsation
-
-elif test_case == 'maxwell_hom_eta=170':
- homogeneous = True
- source_type = 'elliptic_J'
- omega = np.sqrt(170) # source time pulsation
-
-elif test_case == 'maxwell_inhom':
- homogeneous = False
- source_type = 'manu_maxwell_inhom'
- omega = np.pi
-
-else:
- raise ValueError(test_case)
-
-case_dir = test_case
-
-eta = int(-omega**2 * roundoff) / roundoff
-
-project_sol = True # True # (use conf proj of solution for visualization)
-gamma_h = 10
-
-#
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-common_diag_filename = './' + case_dir + '_diags.txt'
-
-for nc in nc_s:
- for deg in deg_s:
-
- params = {
- 'domain_name': domain_name,
- 'nc': nc,
- 'deg': deg,
- 'homogeneous': homogeneous,
- 'source_type': source_type,
- 'source_proj': source_proj,
- 'project_sol': project_sol,
- 'omega': omega,
- 'gamma_h': gamma_h,
- }
- # backend_language = 'numba'
- backend_language = 'pyccel-gcc'
-
- run_dir = get_run_dir(domain_name, nc, deg, source_type=source_type)
- plot_dir = get_plot_dir(case_dir, run_dir)
- diag_filename = plot_dir + '/' + \
- diag_fn(source_type=source_type, source_proj=source_proj)
-
-
-
- print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
- print(' Calling solve_hcurl_source_pbm() with params = {}'.format(params))
- print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
-
- # ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
- # calling solver for:
- #
- # find u in H(curl), s.t.
- # A u = f on \Omega
- # n x u = n x u_bc on \partial \Omega
- # with
- # A u := eta * u + mu * curl curl u - nu * grad div u
-
- diags = solve_hcurl_source_pbm(
- nc=nc, deg=deg,
- eta=eta,
- nu=0,
- mu=1,
- domain_name=domain_name,
- source_type=source_type,
- source_proj=source_proj,
- backend_language=backend_language,
- project_sol=project_sol,
- gamma_h=gamma_h,
- plot_dir=plot_dir,
- )
-
- #
- # ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
- write_diags_to_file(
- diags,
- script_filename=__file__,
- diag_filename=diag_filename,
- params=params)
- write_diags_to_file(
- diags,
- script_filename=__file__,
- diag_filename=common_diag_filename,
- params=params)
-
-time_count(t_stamp_full, msg='full program')
diff --git a/examples/feec/mixed_source_pbms_conga_2d.py b/examples/feec/mixed_source_pbms_conga_2d.py
deleted file mode 100644
index 7bc382bf4..000000000
--- a/examples/feec/mixed_source_pbms_conga_2d.py
+++ /dev/null
@@ -1,455 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-from mpi4py import MPI
-
-import os
-import numpy as np
-from collections import OrderedDict
-
-from sympy import lambdify
-
-from scipy.sparse import bmat
-from scipy.sparse.linalg import spsolve
-
-from sympde.calculus import dot
-from sympde.topology import element_of
-from sympde.expr.expr import LinearForm
-from sympde.expr.expr import integral
-from sympde.topology import Derham
-
-from psydac.api.settings import PSYDAC_BACKENDS
-
-from psydac.feec.pull_push import pull_2d_h1, pull_2d_hcurl, pull_2d_l2
-
-from psydac.feec.multipatch.api import discretize
-from psydac.feec.multipatch.fem_linear_operators import IdLinearOperator
-from psydac.feec.multipatch.operators import HodgeOperator
-from psydac.fem.plotting_utilities import plot_field_2d as plot_field
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-from psydac.feec.multipatch.examples.ppc_test_cases import get_source_and_sol_for_magnetostatic_pbm
-from psydac.feec.multipatch.examples.hcurl_eigen_pbms_conga_2d import get_eigenvalues
-from psydac.feec.multipatch.utilities import time_count
-
-from psydac.feec.multipatch.non_matching_operators import construct_h1_conforming_projection, construct_hcurl_conforming_projection
-
-
-def solve_magnetostatic_pbm(
- nc=4, deg=4, domain_name='pretzel_f', backend_language=None, source_proj='P_L2_wcurl_J',
- source_type='dipole_J', bc_type='metallic',
- gamma0_h=10., gamma1_h=10.,
- dim_harmonic_space=0,
- project_solution=False,
- plot_source=False, plot_dir=None, hide_plots=True,
- m_load_dir="",
-):
- """
- solver for a magnetostatic problem
-
- div B = 0
- curl B = j
-
- written in the form of a mixed problem: find p in H1, u in H(curl), such that
-
- G^* u = f_scal on \\Omega
- G p + A u = f_vect on \\Omega
-
- with operators
-
- G: p -> grad p
- G^*: u -> -div u
- A: u -> curl curl u
-
- and sources
-
- f_scal = 0
- f_vect = curl j
-
- -- then the solution u = (Bx, By) satisfies the original magnetostatic equation, see e.g.
- Beirão da Veiga, Brezzi, Dassi, Marini and Russo, Virtual Element approx of 2D magnetostatic pbms, CMAME 327 (2017)
-
- Here the operators G and A are discretized with
-
- Gh: V0h -> V1h and Ah: V1h -> V1h
-
- in a broken-FEEC approach involving a discrete sequence on a 2D multipatch domain \\Omega,
-
- V0h --grad-> V1h -—curl-> V2h
-
- and boundary conditions to be specified (see the multi-patch paper for details).
-
- Harmonic constraint: if dim_harmonic_space > 0, a constraint is added, of the form
-
- u in H^\\perp
-
- where H = ker(L) is the kernel of the Hodge-Laplace operator L = curl curl u - grad div
-
- Note: if source_proj == 'P_L2_wcurl_J' then a scalar J is given and we define the V1h part of the discrete source as
- l(v) :=
-
- :param nc: nb of cells per dimension, in each patch
- :param deg: coordinate degree in each patch
- :param gamma0_h: jump penalization parameter in V0h
- :param gamma1_h: jump penalization parameter in V1h
- :param source_proj: approximation operator for the source, possible values are 'P_geom' or 'P_L2'
- :param source_type: must be implemented as a test-case
- :param bc_type: 'metallic' or 'pseudo-vacuum' -- see details in multi-patch paper
- :param m_load_dir: directory for matrix storage
- """
-
- ncells = [nc, nc]
- degree = [deg, deg]
-
- # if backend_language is None:
- # backend_language='python'
- # print('[note: using '+backend_language+ ' backends in discretize functions]')
- assert bc_type in ['metallic', 'pseudo-vacuum']
-
- print('---------------------------------------------------------------------------------------------------------')
- print('Starting solve_mixed_source_pbm function with: ')
- print(' ncells = {}'.format(ncells))
- print(' degree = {}'.format(degree))
- print(' domain_name = {}'.format(domain_name))
- print(' source_proj = {}'.format(source_proj))
- print(' bc_type = {}'.format(bc_type))
- print(' backend_language = {}'.format(backend_language))
- print('---------------------------------------------------------------------------------------------------------')
-
- print('building symbolic and discrete domain...')
- domain = build_multipatch_domain(domain_name=domain_name)
- mappings = OrderedDict([(P.logical_domain, P.mapping)
- for P in domain.interior])
- mappings_list = list(mappings.values())
- domain_h = discretize(domain, ncells=ncells)
-
- print('building symbolic and discrete derham sequences...')
- derham = Derham(domain, ["H1", "Hcurl", "L2"])
- derham_h = discretize(derham, domain_h, degree=degree)
-
- V0h = derham_h.V0
- V1h = derham_h.V1
- V2h = derham_h.V2
- print('dim(V0h) = {}'.format(V0h.nbasis))
- print('dim(V1h) = {}'.format(V1h.nbasis))
- print('dim(V2h) = {}'.format(V2h.nbasis))
-
- print('building the discrete operators:')
- print('commuting projection operators...')
- nquads = [4 * (d + 1) for d in degree]
- P0, P1, P2 = derham_h.projectors(nquads=nquads)
-
- # these physical projection operators should probably be in the
- # interface...
- def P0_phys(f_phys):
- f = lambdify(domain.coordinates, f_phys)
- f_log = [pull_2d_h1(f, m.get_callable_mapping())
- for m in mappings_list]
- return P0(f_log)
-
- def P1_phys(f_phys):
- f_x = lambdify(domain.coordinates, f_phys[0])
- f_y = lambdify(domain.coordinates, f_phys[1])
- f_log = [pull_2d_hcurl([f_x, f_y], m.get_callable_mapping())
- for m in mappings_list]
- return P1(f_log)
-
- def P2_phys(f_phys):
- f = lambdify(domain.coordinates, f_phys)
- f_log = [pull_2d_l2(f, m.get_callable_mapping())
- for m in mappings_list]
- return P2(f_log)
-
- I0_m = IdLinearOperator(V0h).to_sparse_matrix()
- I1_m = IdLinearOperator(V1h).to_sparse_matrix()
-
- print('Hodge operators...')
- # multi-patch (broken) linear operators / matrices
- H0 = HodgeOperator(
- V0h,
- domain_h,
- backend_language=backend_language,
- load_dir=m_load_dir,
- load_space_index=0)
- H1 = HodgeOperator(
- V1h,
- domain_h,
- backend_language=backend_language,
- load_dir=m_load_dir,
- load_space_index=1)
- H2 = HodgeOperator(
- V2h,
- domain_h,
- backend_language=backend_language,
- load_dir=m_load_dir,
- load_space_index=2)
-
- H0_m = H0.to_sparse_matrix() # = mass matrix of V0
- dH0_m = H0.get_dual_Hodge_sparse_matrix() # = inverse mass matrix of V0
- H1_m = H1.to_sparse_matrix() # = mass matrix of V1
- dH1_m = H1.get_dual_Hodge_sparse_matrix() # = inverse mass matrix of V1
- H2_m = H2.to_sparse_matrix() # = mass matrix of V2
- dH2_m = H2.get_dual_Hodge_sparse_matrix() # = inverse mass matrix of V2
-
- M0_m = H0_m
- M1_m = H1_m # usual notation
-
- hom_bc = (bc_type == 'pseudo-vacuum') # /!\ here u = B is in H(curl), not E /!\
- print('with hom_bc = {}'.format(hom_bc))
-
- print('conforming projection operators...')
- # conforming Projections (should take into account the boundary conditions
- # of the continuous deRham sequence)
- cP0_m = construct_h1_conforming_projection(V0h, hom_bc=True)
- cP1_m = construct_hcurl_conforming_projection(V1h, hom_bc=True)
-
- print('broken differential operators...')
- bD0, bD1 = derham_h.broken_derivatives_as_operators
- bD0_m = bD0.to_sparse_matrix()
- bD1_m = bD1.to_sparse_matrix()
-
- if not os.path.exists(plot_dir):
- os.makedirs(plot_dir)
-
- # Conga (projection-based) operator matrices
- print('grad matrix...')
- G_m = bD0_m @ cP0_m
- tG_m = H1_m @ G_m # grad: V0h -> tV1h
-
- print('curl-curl stiffness matrix...')
- C_m = bD1_m @ cP1_m
- CC_m = C_m.transpose() @ H2_m @ C_m
-
- # jump penalization and stabilization operators:
- JP0_m = I0_m - cP0_m
- S0_m = JP0_m.transpose() @ H0_m @ JP0_m
-
- JP1_m = I1_m - cP1_m
- S1_m = JP1_m.transpose() @ H1_m @ JP1_m
-
- if not hom_bc:
- # very small regularization to avoid constant p=1 in the kernel
- reg_S0_m = 1e-16 * M0_m + gamma0_h * S0_m
- else:
- reg_S0_m = gamma0_h * S0_m
-
- hf_cs = []
- if dim_harmonic_space > 0:
-
- print('computing the harmonic fields...')
- gamma_Lh = 10 # penalization value should not change the kernel
-
- GD_m = - tG_m @ dH0_m @ G_m.transpose() @ H1_m # todo: check with paper
- L_m = CC_m - GD_m + gamma_Lh * S1_m
- eigenvalues, eigenvectors = get_eigenvalues(
- dim_harmonic_space + 1, 1e-6, L_m, H1_m)
-
- for i in range(dim_harmonic_space):
- lambda_i = eigenvalues[i]
- print(
- ".. storing eigenmode #{}, with eigenvalue = {}".format(
- i, lambda_i))
- # check:
- if abs(lambda_i) > 1e-8:
- print(" ****** WARNING! this eigenvalue should be 0! ****** ")
- hf_cs.append(eigenvectors[:, i])
-
- # matrix of the coefs of the harmonic fields (Lambda^H_i) in the basis (Lambda_i), in the form:
- # hf_m = (c^H_{i,j})_{i < dim_harmonic_space, j < dim_V1} such that
- # Lambda^H_i = sum_j c^H_{i,j} Lambda^1_j
- hf_m = np.array(hf_cs).transpose()
- MH_m = M1_m @ hf_m
-
- # check:
- # should be the first positive eigenvalue of L_h
- lambda_i = eigenvalues[dim_harmonic_space]
- if abs(lambda_i) < 1e-4:
- print(" ****** Warning -- something is probably wrong: ")
- print(
- " ****** eigenmode #{} should have positive eigenvalue: {}".format(
- dim_harmonic_space, lambda_i))
-
- print('computing the full operator matrix with harmonic constraint...')
- A_m = bmat([[reg_S0_m, tG_m.transpose(), None],
- [tG_m, CC_m + gamma1_h * S1_m, MH_m],
- [None, MH_m.transpose(), None]])
-
- else:
- print('computing the full operator matrix without harmonic constraint...')
-
- A_m = bmat([[reg_S0_m, tG_m.transpose()],
- [tG_m, CC_m + gamma1_h * S1_m]])
-
- # get exact source, bc's, ref solution...
- # (not all the returned functions are useful here)
- print('getting the source and ref solution...')
- N_diag = 200
- method = 'conga'
- f_scal, f_vect, j_scal, uh_ref = get_source_and_sol_for_magnetostatic_pbm(
- source_type=source_type, domain=domain, domain_name=domain_name)
-
- # compute approximate source:
- # ff_h = (f0_h, f1_h) = (P0_h f_scal, P1_h f_vect) with projection operators specified by source_proj
- # and dual-basis coefficients in column array bb_c = (b0_c, b1_c)
- # note: f1_h may also be defined through the special option 'P_L2_wcurl_J'
- # for magnetostatic problems
- f0_c = f1_c = j2_c = None
- assert source_proj in ['P_geom', 'P_L2', 'P_L2_wcurl_J']
-
- if f_scal is None:
- tilde_f0_c = np.zeros(V0h.nbasis)
- else:
- print('approximating the V0 source with ' + source_proj)
- if source_proj == 'P_geom':
- f0_h = P0_phys(f_scal)
- f0_c = f0_h.coeffs.toarray()
- tilde_f0_c = H0_m.dot(f0_c)
- else:
- # L2 proj
- tilde_f0_c = derham_h.get_dual_dofs(
- space='V0',
- f=f_scal,
- backend_language=backend_language,
- return_format='numpy_array')
-
- if source_proj == 'P_L2_wcurl_J':
- if j_scal is None:
- tilde_j2_c = np.zeros(V2h.nbasis)
- tilde_f1_c = np.zeros(V1h.nbasis)
- else:
- print('approximating the V1 source as a weak curl of j_scal')
- tilde_j2_c = derham_h.get_dual_dofs(
- space='V2',
- f=j_scal,
- backend_language=backend_language,
- return_format='numpy_array')
- tilde_f1_c = C_m.transpose().dot(tilde_j2_c)
- elif f_vect is None:
- tilde_f1_c = np.zeros(V1h.nbasis)
- else:
- print('approximating the V1 source with ' + source_proj)
- if source_proj == 'P_geom':
- f1_h = P1_phys(f_vect)
- f1_c = f1_h.coeffs.toarray()
- tilde_f1_c = H1_m.dot(f1_c)
- else:
- assert source_proj == 'P_L2'
- tilde_f1_c = derham_h.get_dual_dofs(
- space='V1',
- f=f_vect,
- backend_language=backend_language,
- return_format='numpy_array')
-
- if plot_source:
- if f0_c is None:
- f0_c = dH0_m.dot(tilde_f0_c)
- plot_field(numpy_coeffs=f0_c, Vh=V0h, space_kind='h1', domain=domain, title='f0_h with P = ' + source_proj,
- filename=plot_dir + 'f0h_' + source_proj + '.png', hide_plot=hide_plots)
- if f1_c is None:
- f1_c = dH1_m.dot(tilde_f1_c)
- plot_field(numpy_coeffs=f1_c, Vh=V1h, space_kind='hcurl', domain=domain, title='f1_h with P = ' + source_proj,
- filename=plot_dir + 'f1h_' + source_proj + '.png', hide_plot=hide_plots)
- if source_proj == 'P_L2_wcurl_J':
- if j2_c is None:
- j2_c = dH2_m.dot(tilde_j2_c)
- plot_field(numpy_coeffs=j2_c, Vh=V2h, space_kind='l2', domain=domain, title='P_L2 jh in V2h',
- filename=plot_dir + 'j2h.png', hide_plot=hide_plots)
-
- print("building block RHS")
- if dim_harmonic_space > 0:
- tilde_h_c = np.zeros(dim_harmonic_space) # harmonic part of the rhs
- b_c = np.block([tilde_f0_c, tilde_f1_c, tilde_h_c])
- else:
- b_c = np.block([tilde_f0_c, tilde_f1_c])
-
- # direct solve with scipy spsolve ------------------------------
- print('solving source problem with scipy.spsolve...')
- sol_c = spsolve(A_m.asformat('csr'), b_c)
- # ------------------------------------------------------------
- ph_c = sol_c[:V0h.nbasis]
- uh_c = sol_c[V0h.nbasis:V0h.nbasis + V1h.nbasis]
- hh_c = np.zeros(V1h.nbasis)
- if dim_harmonic_space > 0:
- # compute the harmonic part (h) of the solution
- # coefs of the harmonic part, in the basis of the harmonic fields
- hh_hbcoefs = sol_c[V0h.nbasis + V1h.nbasis:]
- assert len(hh_hbcoefs) == dim_harmonic_space
- for i in range(dim_harmonic_space):
- # coefs the of the i-th harmonic field, in the B/M spline basis of
- # V1h
- hi_c = hf_cs[i]
- hh_c += hh_hbcoefs[i] * hi_c
-
- if project_solution:
- print('projecting the homogeneous solution on the conforming problem space...')
- uh_c = cP1_m.dot(uh_c)
- u_name = r'$P^1_h B_h$'
- ph_c = cP0_m.dot(ph_c)
- p_name = r'$P^0_h p_h$'
- else:
- u_name = r'$B_h$'
- p_name = r'$p_h$'
-
- print('getting and plotting the FEM solution from numpy coefs array...')
- params_str = 'gamma0_h={}_gamma1_h={}'.format(gamma0_h, gamma1_h)
- title = r'solution {} (amplitude)'.format(p_name)
- plot_field(numpy_coeffs=ph_c, Vh=V0h, space_kind='h1',
- domain=domain, title=title, filename=plot_dir + params_str + '_ph.png', hide_plot=hide_plots)
- title = r'solution $h_h$ (amplitude)'
- plot_field(numpy_coeffs=hh_c, Vh=V1h, space_kind='hcurl',
- domain=domain, title=title, filename=plot_dir + params_str + '_hh.png', hide_plot=hide_plots)
- title = r'solution {} (amplitude)'.format(u_name)
- plot_field(numpy_coeffs=uh_c, Vh=V1h, space_kind='hcurl', plot_type='amplitude',
- domain=domain, title=title, filename=plot_dir + params_str + '_uh.png', hide_plot=hide_plots)
- title = r'solution {} (vector field)'.format(u_name)
- plot_field(numpy_coeffs=uh_c, Vh=V1h, space_kind='hcurl', plot_type='vector_field',
- domain=domain, title=title, filename=plot_dir + params_str + '_uh_vf.png', hide_plot=hide_plots)
- title = r'solution {} (components)'.format(u_name)
- plot_field(numpy_coeffs=uh_c, Vh=V1h, space_kind='hcurl', plot_type='components',
- domain=domain, title=title, filename=plot_dir + params_str + '_uh_xy.png', hide_plot=hide_plots)
-
-
-if __name__ == '__main__':
-
- t_stamp_full = time_count()
-
- bc_type = 'metallic'
- # bc_type = 'pseudo-vacuum'
- source_type = 'dipole_J'
-
- # source_proj = 'P_L2_wcurl_J'
- source_proj = 'P_geom'
-
- domain_name = 'pretzel_f'
- dim_harmonic_space = 3
-
- # nc = 20
- # deg = 4
- nc = 10
- deg = 2
-
- # domain_name = 'curved_L_shape'
- # dim_harmonic_space = 0
-
- # nc = 2
- # deg = 2
-
- run_dir = '{}_{}_bc={}_nc={}_deg={}/'.format(
- domain_name, source_type, bc_type, nc, deg)
- m_load_dir = 'matrices_{}_nc={}_deg={}/'.format(domain_name, nc, deg)
- solve_magnetostatic_pbm(
- nc=nc, deg=deg,
- domain_name=domain_name,
- source_type=source_type,
- source_proj=source_proj,
- bc_type=bc_type,
- backend_language='pyccel-gcc',
- dim_harmonic_space=dim_harmonic_space,
- plot_source=True,
- plot_dir='./plots/magnetostatic_runs/' + run_dir,
- hide_plots=True,
- m_load_dir=m_load_dir
- )
-
- time_count(t_stamp_full, msg='full program')
From de759208c8cfaf02339a7cda69ba5cc5ec614653 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 13:35:41 +0100
Subject: [PATCH 28/63] fix td example
---
examples/feec/ppc_test_cases.py | 412 ------------------------
examples/feec/timedomain_maxwell.py | 483 ++++++++++------------------
2 files changed, 171 insertions(+), 724 deletions(-)
delete mode 100644 examples/feec/ppc_test_cases.py
diff --git a/examples/feec/ppc_test_cases.py b/examples/feec/ppc_test_cases.py
deleted file mode 100644
index de3d487b2..000000000
--- a/examples/feec/ppc_test_cases.py
+++ /dev/null
@@ -1,412 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-import os
-import numpy as np
-
-from sympy import pi, cos, sin, Tuple, exp, atan, atan2
-from sympy.functions.special.error_functions import erf
-# todo [MCP, 12/02/2022]: add an 'equation' argument to be able to return
-# 'exact solution'
-
-def get_phi_pulse(x_0, y_0, domain=None):
- x, y = domain.coordinates
- ds2_0 = (0.02)**2
- sigma_0 = (x - x_0)**2 + (y - y_0)**2
- phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
-
- return phi_0
-
-
-def get_div_free_pulse(x_0, y_0, domain=None):
- x, y = domain.coordinates
- ds2_0 = (0.02)**2
- sigma_0 = (x - x_0)**2 + (y - y_0)**2
- phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
- dx_sig_0 = 2 * (x - x_0)
- dy_sig_0 = 2 * (y - y_0)
- dx_phi_0 = - dx_sig_0 * sigma_0 / ds2_0 * phi_0
- dy_phi_0 = - dy_sig_0 * sigma_0 / ds2_0 * phi_0
- f_x = dy_phi_0
- f_y = - dx_phi_0
- f_vect = Tuple(f_x, f_y)
-
- return f_vect
-
-
-def get_curl_free_pulse(x_0, y_0, domain=None, pp=False):
- # return -grad phi_0
- x, y = domain.coordinates
- if pp:
- # psi=phi
- ds2_0 = (0.02)**2
- else:
- ds2_0 = (0.1)**2
- sigma_0 = (x - x_0)**2 + (y - y_0)**2
- phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
- dx_sig_0 = 2 * (x - x_0)
- dy_sig_0 = 2 * (y - y_0)
- dx_phi_0 = - dx_sig_0 * sigma_0 / ds2_0 * phi_0
- dy_phi_0 = - dy_sig_0 * sigma_0 / ds2_0 * phi_0
- f_x = -dx_phi_0
- f_y = -dy_phi_0
- f_vect = Tuple(f_x, f_y)
-
- return f_vect
-
-
-def get_Delta_phi_pulse(x_0, y_0, domain=None, pp=False):
- # return -Delta phi_0, with same phi_0 as in get_curl_free_pulse()
- x, y = domain.coordinates
- if pp:
- # psi=phi
- ds2_0 = (0.02)**2
- else:
- ds2_0 = (0.1)**2
- sigma_0 = (x - x_0)**2 + (y - y_0)**2
- phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
- dx_sig_0 = 2 * (x - x_0)
- dy_sig_0 = 2 * (y - y_0)
- dxx_sig_0 = 2
- dyy_sig_0 = 2
- dxx_phi_0 = ((dx_sig_0 * sigma_0 / ds2_0)**2 -
- ((dx_sig_0)**2 + dxx_sig_0 * sigma_0) / ds2_0) * phi_0
- dyy_phi_0 = ((dy_sig_0 * sigma_0 / ds2_0)**2 -
- ((dy_sig_0)**2 + dyy_sig_0 * sigma_0) / ds2_0) * phi_0
- f = - dxx_phi_0 - dyy_phi_0
-
- return f
-
-
-def get_Gaussian_beam_old(x_0, y_0, domain=None):
- # return E = cos(k*x) exp( - x^2 + y^2 / 2 sigma^2) v
- x, y = domain.coordinates
- x = x - x_0
- y = y - y_0
-
- k = (10, 0)
- nk = np.sqrt(k[0]**2 + k[1]**2)
-
- v = (k[0] / nk, k[1] / nk)
-
- sigma = 0.05
-
- xy = x**2 + y**2
- ef = exp(- xy / (2 * sigma**2))
-
- E = cos(k[1] * x + k[0] * y) * ef
- B = (-v[1] * x + v[0] * y) / (sigma**2) * E
-
- return Tuple(v[0] * E, v[1] * E), B
-
-
-def get_Gaussian_beam(x_0, y_0, domain=None):
- # return E = cos(k*x) exp( - x^2 + y^2 / 2 sigma^2) v
- x, y = domain.coordinates
-
- x = x - x_0
- y = y - y_0
-
- sigma = 0.1
-
- xy = x**2 + y**2
- ef = 1 / (sigma**2) * exp(- xy / (2 * sigma**2))
-
- # E = curl exp
- E = Tuple(y * ef, -x * ef)
-
- # B = curl E
- B = (xy / (sigma**2) - 2) * ef
-
- return E, B
-
-
-def get_diag_Gaussian_beam(x_0, y_0, domain=None):
- # return E = cos(k*x) exp( - x^2 + y^2 / 2 sigma^2) v
- x, y = domain.coordinates
- x = x - x_0
- y = y - y_0
-
- k = (np.pi, np.pi)
- nk = np.sqrt(k[0]**2 + k[1]**2)
-
- v = (k[0] / nk, k[1] / nk)
-
- sigma = 0.25
-
- xy = x**2 + y**2
- ef = exp(- xy / (2 * sigma**2))
-
- E = cos(k[1] * x + k[0] * y) * ef
- B = (-v[1] * x + v[0] * y) / (sigma**2) * E
-
- return Tuple(v[0] * E, v[1] * E), B
-
-
-def get_easy_Gaussian_beam(x_0, y_0, domain=None):
- # return E = cos(k*x) exp( - x^2 + y^2 / 2 sigma^2) v
- x, y = domain.coordinates
- x = x - x_0
- y = y - y_0
-
- k = pi
- sigma = 0.5
-
- xy = x**2 + y**2
- ef = exp(- xy / (2 * sigma**2))
-
- E = cos(k * y) * ef
- B = -y / (sigma**2) * E
-
- return Tuple(E, 0), B
-
-
-def get_Gaussian_beam2(x_0, y_0, domain=None):
- """
- Gaussian beam
- Beam inciding from the left, centered and normal to wall:
- x: axial normalized distance to the beam's focus
- y: radial normalized distance to the center axis of the beam
- """
- x, y = domain.coordinates
-
- x0 = x_0
- y0 = y_0
- theta = pi / 2
- w0 = 1
-
- t = [(x - x0) * cos(theta) - (y - y0) * sin(theta),
- (x - x0) * sin(theta) + (y - y0) * cos(theta)]
-
- EW0 = 1.0 # amplitude at the waist
- k0 = 2 * pi # free-space wavenumber
-
- x_ray = pi * w0 ** 2 # Rayleigh range
-
- w = w0 * (1 + t[0]**2 / x_ray**2)**0.5 # width
- curv = t[0] / (t[0]**2 + x_ray**2) # curvature
-
- # corresponds to atan(x / x_ray), which is the Gouy phase
- gouy_psi = -0.5 * atan2(t[0] / x_ray, 1.)
-
- EW_mod = EW0 * (w0 / w)**0.5 * exp(-(t[1] ** 2) / (w ** 2)) # Amplitude
- phase = k0 * t[0] + 0.5 * k0 * curv * t[1] ** 2 + gouy_psi # Phase
-
- EW_r = EW_mod * cos(phase) # Real part
- EW_i = EW_mod * sin(phase) # Imaginary part
-
- B = 0 # t[1]/(w**2) * EW_r
-
- return Tuple(0, EW_r), B
-
-
-def get_source_and_sol_for_magnetostatic_pbm(
- source_type=None,
- domain=None, domain_name=None,
- refsol_params=None
-):
- """
- provide source, and exact solutions when available, for:
-
- Find u=B in H(curl) such that
-
- div B = 0
- curl B = j
-
- written as a mixed problem, see solve_magnetostatic_pbm()
- """
- u_ex = None # exact solution
- x, y = domain.coordinates
- if source_type == 'dipole_J':
- # we compute two possible source terms:
- # . a dipole current j_scal = phi_0 - phi_1 (two blobs)
- # . and f_vect = curl j_scal
- x_0 = 1.0
- y_0 = 1.0
- ds2_0 = (0.02)**2
- sigma_0 = (x - x_0)**2 + (y - y_0)**2
- phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
- dx_sig_0 = 2 * (x - x_0)
- dy_sig_0 = 2 * (y - y_0)
- dx_phi_0 = - dx_sig_0 * sigma_0 / ds2_0 * phi_0
- dy_phi_0 = - dy_sig_0 * sigma_0 / ds2_0 * phi_0
-
- x_1 = 2.0
- y_1 = 2.0
- ds2_1 = (0.02)**2
- sigma_1 = (x - x_1)**2 + (y - y_1)**2
- phi_1 = exp(-sigma_1**2 / (2 * ds2_1))
- dx_sig_1 = 2 * (x - x_1)
- dy_sig_1 = 2 * (y - y_1)
- dx_phi_1 = - dx_sig_1 * sigma_1 / ds2_1 * phi_1
- dy_phi_1 = - dy_sig_1 * sigma_1 / ds2_1 * phi_1
-
- f_scal = None
- j_scal = phi_0 - phi_1
- f_x = dy_phi_0 - dy_phi_1
- f_y = - dx_phi_0 + dx_phi_1
- f_vect = Tuple(f_x, f_y)
-
- else:
- raise ValueError(source_type)
-
- return f_scal, f_vect, j_scal, u_ex
-
-
-def get_source_and_solution_hcurl(
- source_type=None, eta=0, mu=0, nu=0,
- domain=None, domain_name=None):
- """
- provide source, and exact solutions when available, for:
-
- Find u in H(curl) such that
-
- A u = f on \\Omega
- n x u = n x u_bc on \\partial \\Omega
-
- with
-
- A u := eta * u + mu * curl curl u - nu * grad div u
-
- see solve_hcurl_source_pbm()
- """
-
- # exact solutions (if available)
- u_ex = None
- curl_u_ex = None
- div_u_ex = None
-
- # bc solution: describe the bc on boundary. Inside domain, values should
- # not matter. Homogeneous bc will be used if None
- u_bc = None
-
- # source terms
- f_vect = None
-
- # auxiliary term (for more diagnostics)
- grad_phi = None
- phi = None
-
- x, y = domain.coordinates
-
- if source_type == 'manu_maxwell_inhom':
- # used for Maxwell equation with manufactured solution
- f_vect = Tuple(eta * sin(pi * y) - pi**2 * sin(pi * y) * cos(pi * x) + pi**2 * sin(pi * y),
- eta * sin(pi * x) * cos(pi * y) + pi**2 * sin(pi * x) * cos(pi * y))
- if nu == 0:
- u_ex = Tuple(sin(pi * y), sin(pi * x) * cos(pi * y))
- curl_u_ex = pi * (cos(pi * x) * cos(pi * y) - cos(pi * y))
- div_u_ex = -pi * sin(pi * x) * sin(pi * y)
- else:
- raise NotImplementedError
- u_bc = u_ex
-
- elif source_type == 'elliptic_J':
- # no manufactured solution for Maxwell pbm
- x0 = 1.5
- y0 = 1.5
- s = (x - x0) - (y - y0)
- t = (x - x0) + (y - y0)
- a = (1 / 1.9)**2
- b = (1 / 1.2)**2
- sigma2 = 0.0121
- tau = a * s**2 + b * t**2 - 1
- phi = exp(-tau**2 / (2 * sigma2))
- dx_tau = 2 * (a * s + b * t)
- dy_tau = 2 * (-a * s + b * t)
-
- f_x = dy_tau * phi
- f_y = - dx_tau * phi
- f_vect = Tuple(f_x, f_y)
-
- else:
- raise ValueError(source_type)
-
- # u_ex = Tuple(0, 1) # DEBUG
- return f_vect, u_bc, u_ex, curl_u_ex, div_u_ex # , phi, grad_phi
-
-
-def get_source_and_solution_h1(source_type=None, eta=0, mu=0,
- domain=None, domain_name=None):
- """
- provide source, and exact solutions when available, for:
-
- Find u in H^1, such that
-
- A u = f on \\Omega
- u = u_bc on \\partial \\Omega
-
- with
-
- A u := eta * u - mu * div grad u
-
- see solve_h1_source_pbm()
- """
-
- # exact solutions (if available)
- u_ex = None
-
- # bc solution: describe the bc on boundary. Inside domain, values should
- # not matter. Homogeneous bc will be used if None
- u_bc = None
-
- # source terms
- f_scal = None
-
- # auxiliary term (for more diagnostics)
- grad_phi = None
- phi = None
-
- x, y = domain.coordinates
-
- if source_type in ['manu_poisson_elliptic']:
- x0 = 1.5
- y0 = 1.5
- s = (x - x0) - (y - y0)
- t = (x - x0) + (y - y0)
- a = (1 / 1.9)**2
- b = (1 / 1.2)**2
- sigma2 = 0.0121
- tau = a * s**2 + b * t**2 - 1
- phi = exp(-tau**2 / (2 * sigma2))
- dx_tau = 2 * (a * s + b * t)
- dy_tau = 2 * (-a * s + b * t)
- dxx_tau = 2 * (a + b)
- dyy_tau = 2 * (a + b)
-
- dx_phi = (-tau * dx_tau / sigma2) * phi
- dy_phi = (-tau * dy_tau / sigma2) * phi
- grad_phi = Tuple(dx_phi, dy_phi)
-
- f_scal = -((tau * dx_tau / sigma2)**2 - (tau * dxx_tau + dx_tau**2) / sigma2
- + (tau * dy_tau / sigma2)**2 - (tau * dyy_tau + dy_tau**2) / sigma2) * phi
-
- # exact solution of -p'' = f with hom. bc's on pretzel domain
- if mu == 1 and eta == 0:
- u_ex = phi
- else:
- print('WARNING (54375385643): exact solution not available in this case!')
-
- if not domain_name in ['pretzel', 'pretzel_f']:
- # we may have non-hom bc's
- u_bc = u_ex
-
- elif source_type == 'manu_poisson_2':
- f_scal = -4
- if mu == 1 and eta == 0:
- u_ex = x**2 + y**2
- else:
- raise NotImplementedError
- u_bc = u_ex
-
- elif source_type == 'manu_poisson_sincos':
- u_ex = sin(pi * x) * cos(pi * y)
- f_scal = (eta + 2 * mu * pi**2) * u_ex
- u_bc = u_ex
-
- else:
- raise ValueError(source_type)
-
- return f_scal, u_bc, u_ex
diff --git a/examples/feec/timedomain_maxwell.py b/examples/feec/timedomain_maxwell.py
index 4525b2f04..4b0b24495 100644
--- a/examples/feec/timedomain_maxwell.py
+++ b/examples/feec/timedomain_maxwell.py
@@ -15,44 +15,30 @@
V0h --grad-> V1h -—curl-> V2h
(Eh) (Bh)
"""
-
-from pytest import param
-from mpi4py import MPI
-
import os
import numpy as np
-import scipy as sp
-from collections import OrderedDict
-import matplotlib.pyplot as plt
-from sympy import lambdify, Matrix
+from sympde.calculus import grad, dot, curl, cross
+from sympde.topology import NormalVector
+from sympde.topology import elements_of
+from sympde.topology import Derham
+from sympde.expr.expr import integral
+from sympde.expr.expr import BilinearForm
-from scipy.sparse.linalg import spsolve
-from scipy import special
+from psydac.linalg.basic import IdentityOperator
-from sympde.calculus import dot
-from sympde.topology import element_of
-from sympde.expr.expr import LinearForm
-from sympde.expr.expr import integral, Norm
-from sympde.topology import Derham
-from psydac.linalg.basic import IdentityOperator
-
-from psydac.api.settings import PSYDAC_BACKENDS
+from psydac.api.settings import PSYDAC_BACKENDS
from psydac.api.discretization import discretize
-
-from psydac.fem.plotting_utilities import plot_field_2d as plot_field
-from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
-
-from psydac.feec.multipatch.examples.ppc_test_cases import get_source_and_solution_hcurl, get_div_free_pulse, get_curl_free_pulse, get_Delta_phi_pulse, get_Gaussian_beam
-from psydac.feec.multipatch.utils_conga_2d import DiagGrid, P0_phys, P1_phys, P2_phys, get_Vh_diags_for
-from psydac.feec.multipatch.utilities import time_count
-from psydac.fem.basic import FemField
-from psydac.feec.multipatch.multipatch_domain_utilities import build_cartesian_multipatch_domain
-
from psydac.api.postprocessing import OutputManager, PostProcessManager
-from psydac.fem.projectors import get_dual_dofs
+from psydac.feec.multipatch_domain_utilities import build_multipatch_domain, build_cartesian_multipatch_domain
+
+from psydac.fem.basic import FemField
+from psydac.fem.projectors import get_dual_dofs
+#==============================================================================
+# Solver for the TD Maxwell problem
+#==============================================================================
def solve_td_maxwell_pbm(*,
nc=4,
deg=4,
@@ -62,15 +48,10 @@ def solve_td_maxwell_pbm(*,
domain_name='pretzel_f',
backend='pyccel-gcc',
source_type='zero',
- source_omega=None,
- source_proj='P_L2',
- project_sol=False,
- filter_source=True,
E0_type='pulse_2',
- E0_proj='P_L2',
plot_dir=None,
- plot_time_ranges=None,
- domain_lims=None
+ domain_lims=None,
+ p_moments=4,
):
"""
solver for the TD Maxwell problem: find E(t) in H(curl), B in L2, such that
@@ -113,77 +94,37 @@ def solve_td_maxwell_pbm(*,
Name of the backend used for acceleration of the computational kernels,
to be chosen among the available keys of the PSYDAC_BACKENDS dict.
- source_type : str {'zero' | 'pulse' | 'cf_pulse' | 'Il_pulse'}
+ source_type : str {'zero' | 'pulse' | 'cf_pulse' }
Name that identifies the space-time profile of the current source, to be
chosen among those available in the function get_source_and_solution().
Available options:
- 'zero' : no current source
- 'pulse' : div-free current source, time-harmonic
- 'cf_pulse': curl-free current source, time-harmonic
- - 'Il_pulse': Issautier-like pulse, with both a div-free and a
- curl-free component, not time-harmonic.
-
- source_omega : float
- Pulsation of the time-harmonic component (if any) of a time-dependent
- current source.
-
- source_proj : str {'P_geom' | 'P_L2'}
- Name of the approximation operator for the current source: 'P_geom' is
- a geometric projector (based on inter/histopolation) which yields the
- primal degrees of freedom; 'P_L2' is an L2 projector which yields the
- dual degrees of freedom. Change of basis from primal to dual (and vice
- versa) is obtained through multiplication with the proper Hodge matrix.
-
- project_sol : bool
- Whether the solution fields should be projected onto the corresponding
- conforming spaces before plotting them.
-
- filter_source : bool
- If True, the current source will be filtered with the conforming
- projector operator (or its dual, depending on which basis is used).
E0_type : str {'zero', 'pulse'}
Initial conditions for the electric field. Choose 'zero' for E0=0
and 'pulse' for a non-zero field localized in a small region.
- E0_proj : str {'P_geom' | 'P_L2'}
- Name of the approximation operator for the initial electric field E0
- (see source_proj for details). Only relevant if E0 is not zero.
-
plot_dir : str
Path to the directory where the figures will be saved.
- plot_time_ranges : list
- List of lists, of the form `[[start, end], dtp]`, where `[start, end]`
- is a time interval and `dtp` is the time between two successive plots.
-
domain_lims : list
If the domain_name is 'refined_square' or 'square_L_shape', this
parameter must be set to the list of the two intervals defining the
rectangular domain, i.e. `[[x_min, x_max], [y_min, y_max]]`.
-
+
+ p_moments : int
+ Degree of the polynomial moments used in the conforming projection.
"""
degree = [deg, deg]
- if source_omega is not None:
- period_time = 2 * np.pi / source_omega
- Nt_pp = period_time // dt_max
-
- if plot_time_ranges is None:
- plot_time_ranges = [
- [[0, final_time], final_time]
- ]
-
print('---------------------------------------------------------------------------------------------------------')
print('Starting solve_td_maxwell_pbm function with: ')
print(' ncells = {}'.format(nc))
print(' degree = {}'.format(degree))
print(' domain_name = {}'.format(domain_name))
- print(' E0_type = {}'.format(E0_type))
- print(' E0_proj = {}'.format(E0_proj))
- print(' source_type = {}'.format(source_type))
- print(' source_proj = {}'.format(source_proj))
print(' backend = {}'.format(backend))
print('---------------------------------------------------------------------------------------------------------')
@@ -191,12 +132,10 @@ def solve_td_maxwell_pbm(*,
print()
print(' -- building discrete spaces and operators --')
- t_stamp = time_count()
print(' .. multi-patch domain...')
if domain_name == 'refined_square' or domain_name == 'square_L_shape':
int_x, int_y = domain_lims
domain = build_cartesian_multipatch_domain(nc, int_x, int_y, mapping='identity')
-
else:
domain = build_multipatch_domain(domain_name=domain_name)
@@ -209,67 +148,43 @@ def solve_td_maxwell_pbm(*,
ncells = {patch.name: [nc[int(patch.name[2])][int(patch.name[4])],
nc[int(patch.name[2])][int(patch.name[4])]] for patch in domain.interior}
- mappings = OrderedDict([(P.logical_domain, P.mapping)
- for P in domain.interior])
- mappings_list = list(mappings.values())
-
- t_stamp = time_count(t_stamp)
print(' .. derham sequence...')
derham = Derham(domain, ["H1", "Hcurl", "L2"])
- t_stamp = time_count(t_stamp)
print(' .. discrete domain...')
domain_h = discretize(domain, ncells=ncells)
- t_stamp = time_count(t_stamp)
print(' .. discrete derham sequence...')
-
derham_h = discretize(derham, domain_h, degree=degree)
- t_stamp = time_count(t_stamp)
print(' .. commuting projection operators...')
nquads = [4 * (d + 1) for d in degree]
P0, P1, P2 = derham_h.projectors(nquads=nquads)
- t_stamp = time_count(t_stamp)
print(' .. multi-patch spaces...')
V0h, V1h, V2h = derham_h.spaces
- t_stamp = time_count(t_stamp)
print(' .. Id operator and matrix...')
I1 = IdentityOperator(V1h.coeff_space)
- t_stamp = time_count(t_stamp)
print(' .. Hodge operators...')
H0, H1, H2 = derham_h.hodge_operators(kind='linop')
dH0, dH1, dH2 = derham_h.hodge_operators(kind='linop', dual=True)
-
- t_stamp = time_count(t_stamp)
print(' .. conforming Projection operators...')
- cP0, cP1, cP2 = derham_h.conforming_projectors(kind='linop', p_moments = degree[0]+2, hom_bc = False)
+ cP0, cP1, cP2 = derham_h.conforming_projectors(kind='linop', p_moments = p_moments, hom_bc = False)
- t_stamp = time_count(t_stamp)
print(' .. broken differential operators...')
bD0, bD1 = derham_h.derivatives(kind='linop')
-
- if plot_dir is not None and not os.path.exists(plot_dir):
- os.makedirs(plot_dir)
-
print(' .. matrix of the primal curl (in primal bases)...')
C = bD1 @ cP1
+
print(' .. matrix of the dual curl (also in primal bases)...')
dC = dH1 @ C.T @ H2
-
### Silvermueller ABC
- from sympde.calculus import grad, dot, curl, cross
- from sympde.topology import NormalVector
- from sympde.expr.expr import BilinearForm
- from sympde.topology import elements_of
-
u, v = elements_of(derham.V1, names='u, v')
nn = NormalVector('nn')
boundary = domain.boundary
@@ -278,16 +193,15 @@ def solve_td_maxwell_pbm(*,
a = BilinearForm((u, v), integral(boundary, expr_b))
ah = discretize(a, domain_h, [V1h, V1h], backend=PSYDAC_BACKENDS[backend],)
A_eps = ah.assemble()
- # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Compute stable time step size based on max CFL and max dt
dt = compute_stable_dt(C=C, dC=dC, cfl_max=cfl_max, dt_max=dt_max)
- # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ print(" Reduce time step to match the simulation final time:")
+ Nt = int(np.ceil(final_time / dt))
+ dt = final_time / Nt
+ print(f" . Time step size : dt = {dt}")
+ print(' total nb of time steps: Nt = {}, final time: T = {:5.4f}'.format(Nt, final_time))
- # Absorbing dC
- CH2 = C.T @ H2
H1A = H1 + dt * A_eps
# alternative inverse
@@ -303,48 +217,15 @@ def solve_td_maxwell_pbm(*,
H1A_inv = SparseMatrixLinearOperator(M.codomain, M.domain, M_inv)
####
- dC = H1A_inv @ CH2
+ # Absorbing dC
+ dC = H1A_inv @ C.T @ H2
dCH1 = H1A_inv @ H1
- print(' .. matrix of the dual div (still in primal bases)...')
- D = dH0 @ cP0.T @ bD0.T @ H1
-
-
- print(" Reduce time step to match the simulation final time:")
- Nt = int(np.ceil(final_time / dt))
- dt = final_time / Nt
- print(f" . Time step size : dt = {dt}")
- print(f" . Nb of time steps: Nt = {Nt}")
-
- # ...
- def is_plotting_time(nt, *, dt=dt, Nt=Nt, plot_time_ranges=plot_time_ranges):
- if nt in [0, Nt]:
- return True
- for [start, end], dt_plots in plot_time_ranges:
- # number of time steps between two successive plots
- ds = max(dt_plots // dt, 1)
- if (start <= nt * dt <= end) and (nt % ds == 0):
- return True
- return False
- # ...
-
-
- print(' ------ ------ ------ ------ ------ ------ ------ ------ ')
- print(' ------ ------ ------ ------ ------ ------ ------ ------ ')
- print(' total nb of time steps: Nt = {}, final time: T = {:5.4f}'.format(Nt, final_time))
- print(' ------ ------ ------ ------ ------ ------ ------ ------ ')
- print(' ------ ------ ------ ------ ------ ------ ------ ------ ')
- print(' ------ ------ ------ ------ ------ ------ ------ ------ ')
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
# source
-
- t_stamp = time_count(t_stamp)
print()
print(' -- getting source --')
- f0_h = None
- f0_harmonic_h = None
- rho0_h = None
if source_type == 'zero':
@@ -359,113 +240,28 @@ def is_plotting_time(nt, *, dt=dt, Nt=Nt, plot_time_ranges=plot_time_ranges):
f0 = get_curl_free_pulse(x_0=np.pi/2, y_0=np.pi/2, domain=domain)
- elif source_type == 'Il_pulse': # Issautier-like pulse
- # source will be
- # J = curl A + cos(om*t) * grad phi
- # so that
- # dt rho = - div J = - cos(om*t) Delta phi
- # for instance, with rho(t=0) = 0 this gives
- # rho = - sin(om*t)/om * Delta phi
- # and Gauss' law reads
- # div E = rho = - sin(om*t)/om * Delta phi
- f0 = get_div_free_pulse(x_0=np.pi/2, y_0=np.pi/2, domain=domain) # this is curl A
- f0_harmonic = get_curl_free_pulse( x_0=np.pi/2, y_0=np.pi/2, domain=domain) # this is grad phi
-
- rho0 = get_Delta_phi_pulse(x_0=np.pi/2, y_0=np.pi/2, domain=domain) # this is Delta phi
- tilde_rho0_h = get_dual_dofs(Vh=V0h, f=rho0, domain_h=domain_h, backend_language=backend)
- tilde_rho0_h = cP0.T @ tilde_rho0_h
- rho0_h = dH0.dot(tilde_rho0_h)
else:
- f0, u_bc, u_ex, curl_u_ex, div_u_ex = get_source_and_solution_hcurl(source_type=source_type, domain=domain, domain_name=domain_name)
- assert u_bc is None # only homogeneous BC's for now
-
-
- if source_omega is not None:
- f0_harmonic = f0
- f0 = None
-
- def source_enveloppe(tau):
- return 1
-
- t_stamp = time_count(t_stamp)
- tilde_f0_h = f0_h = None
- tilde_f0_harmonic_h = f0_harmonic_h = None
-
- if source_proj == 'P_geom':
- print(' .. projecting the source with commuting projection...')
-
- if f0 is not None:
- f0_h = P1_phys(f0, P1, domain).coeffs
- tilde_f0_h = H1.dot(f0_h)
-
- if f0_harmonic is not None:
- f0_harmonic_h = P1_phys(f0_harmonic, P1, domain).coeffs
- tilde_f0_harmonic_h = H1.dot(f0_harmonic_h)
-
- elif source_proj == 'P_L2':
-
- if f0 is not None:
- if source_type == 'Il_pulse':
- source_name = 'Il_pulse_f0'
- else:
- source_name = source_type
+ raise ValueError(source_type)
- print(' .. projecting the source f0 with L2 projection...')
- tilde_f0_h = get_dual_dofs(Vh=V1h, f=f0, domain_h=domain_h, backend_language=backend)
- if f0_harmonic is not None:
- if source_type == 'Il_pulse':
- source_name = 'Il_pulse_f0_harmonic'
- else:
- source_name = source_type
+ if f0 is not None:
+ print(' .. projecting the source f0 with L2 projection...')
+ tilde_f0_h = get_dual_dofs(Vh=V1h, f=f0, domain_h=domain_h, backend_language=backend)
- print(' .. projecting the source f0_harmonic with L2 projection...')
- tilde_f0_harmonic_h = get_dual_dofs(Vh=V1h, f=f0_harmonic, domain_h=domain_h, backend_language=backend)
-
- else:
- raise ValueError(source_proj)
-
- t_stamp = time_count(t_stamp)
- if filter_source:
print(' .. filtering the source...')
- if tilde_f0_h is not None:
- tilde_f0_h = cP1.T @ tilde_f0_h
-
- if tilde_f0_harmonic_h is not None:
- tilde_f0_harmonic_h = cP1.T @ tilde_f0_harmonic_h
+ tilde_f0_h = cP1.T @ tilde_f0_h
- if tilde_f0_h is not None:
f0_h = dH1.dot(tilde_f0_h)
- if tilde_f0_harmonic_h is not None:
- f0_harmonic_h = dH1.dot(tilde_f0_harmonic_h)
-
+ else:
- if f0_h is None:
f0_h = V1h.coeff_space.zeros()
- t_stamp = time_count(t_stamp)
-
- # diags arrays
- E_norm2_diag = np.zeros(Nt + 1)
- B_norm2_diag = np.zeros(Nt + 1)
- divE_norm2_diag = np.zeros(Nt + 1)
- time_diag = np.zeros(Nt + 1)
- PE_norm2_diag = np.zeros(Nt + 1)
- I_PE_norm2_diag = np.zeros(Nt + 1)
- J_norm2_diag = np.zeros(Nt + 1)
- if source_type == 'Il_pulse':
- GaussErr_norm2_diag = np.zeros(Nt + 1)
- GaussErrP_norm2_diag = np.zeros(Nt + 1)
- else:
- GaussErr_norm2_diag = None
- GaussErrP_norm2_diag = None
-
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
# initial solution
- print(' .. initial solution ..')
+ print(' -- initial solution --')
# initial B sol
B_h = V2h.coeff_space.zeros()
@@ -479,38 +275,31 @@ def source_enveloppe(tau):
E0 = get_div_free_pulse(x_0=np.pi/2, y_0=np.pi/2, domain=domain)
- if E0_proj == 'P_geom':
- print(' .. projecting E0 with commuting projection...')
- E0_h = P1_phys(E0, P1, domain)
- E_h = E0_h.coeffs
-
- elif E0_proj == 'P_L2':
-
- print(' .. projecting E0 with L2 projection...')
- tilde_E0_h = get_dual_dofs(Vh=V1h, f=E0, domain_h=domain_h, backend_language=backend)
- E_h = dH1.dot(tilde_E0_h)
+ print(' .. projecting E0 with L2 projection...')
+ tilde_E0_h = get_dual_dofs(Vh=V1h, f=E0, domain_h=domain_h, backend_language=backend)
+ E_h = dH1.dot(tilde_E0_h)
elif E0_type == 'pulse_2':
E0, B0 = get_Gaussian_beam(y_0=np.pi/2, x_0=np.pi/2, domain=domain)
- if E0_proj == 'P_geom':
- print(' .. projecting E0 with commuting projection...')
+ print(' .. projecting E0 with L2 projection...')
+ tilde_E0_h = get_dual_dofs(Vh=V1h, f=E0, domain_h=domain_h, backend_language=backend)
+ E_h = dH1.dot(tilde_E0_h)
- E0_h = P1_phys(E0, P1, domain)
- E_h = E0_h.coeffs
+ tilde_B0_h = get_dual_dofs(Vh=V2h, f=B0, domain_h=domain_h, backend_language=backend)
+ B_h = dH2.dot(tilde_B0_h)
- B0_h = P2_phys(B0, P2, domain)
- B_h = B0_h.coeffs
+ elif E0_type == 'Gaussian':
+
+ E0, B0 = get_Gaussian_beam(y_0=np.pi/2, x_0=np.pi/2, domain=domain)
- elif E0_proj == 'P_L2':
-
- print(' .. projecting E0 with L2 projection...')
- tilde_E0_h = get_dual_dofs(Vh=V1h, f=E0, domain_h=domain_h, backend_language=backend)
- E_h = dH1.dot(tilde_E0_h)
+ print(' .. projecting E0 with L2 projection...')
+ tilde_E0_h = get_dual_dofs(Vh=V1h, f=E0, domain_h=domain_h, backend_language=backend)
+ E_h = dH1.dot(tilde_E0_h)
- tilde_B0_h = get_dual_dofs(Vh=V2h, f=B0, domain_h=domain_h, backend_language=backend)
- B_h = dH2.dot(tilde_B0_h)
+ tilde_B0_h = get_dual_dofs(Vh=V2h, f=B0, domain_h=domain_h, backend_language=backend)
+ B_h = dH2.dot(tilde_B0_h)
else:
raise ValueError(E0_type)
@@ -518,23 +307,9 @@ def source_enveloppe(tau):
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
# time loop
- def compute_diags(E_h, B_h, J_h, nt):
- time_diag[nt] = (nt) * dt
- PE_h = cP1.dot(E_h)
- I_PE_h = E_h - PE_h
- E_norm2_diag[nt] = E_h.inner(H1.dot(E_h))
- PE_norm2_diag[nt] = PE_h.inner(H1.dot(PE_h))
- I_PE_norm2_diag[nt] = I_PE_h.inner(H1.dot(I_PE_h))
- J_norm2_diag[nt] = J_h.inner(H1.dot(J_h))
- B_norm2_diag[nt] = B_h.inner(H2.dot(B_h))
- divE_h = D @ E_h
- divE_norm2_diag[nt] = divE_h.inner(H0.dot(divE_h))
- if source_type == 'Il_pulse' and source_omega is not None:
- rho_h = rho0_h * np.sin(source_omega * nt * dt) / omega
- GaussErr = rho_h - divE_h
- GaussErrP = rho_h - D @ PE_h
- GaussErr_norm2_diag[nt] = GaussErr.inner(H0.dot(GaussErr))
- GaussErrP_norm2_diag[nt] = GaussErrP.inner(H0.dot(GaussErrP))
+
+ if plot_dir is not None and not os.path.exists(plot_dir):
+ os.makedirs(plot_dir)
if plot_dir:
OM1 = OutputManager(plot_dir + '/spaces1.yml', plot_dir + '/fields1.h5')
@@ -555,38 +330,35 @@ def compute_diags(E_h, B_h, J_h, nt):
f_h = f0_h.copy()
+ Btemp_h = B_h.copy()
+ Etemp_h = E_h.copy()
+
+ print(" -- time loop --")
for nt in range(Nt):
- print(' .. nt+1 = {}/{}'.format(nt + 1, Nt))
+ print(' .. nt+1 = {}/{}'.format(nt+1, Nt))
# 1/2 faraday: Bn -> Bn+1/2
- B_h -= (dt / 2) * C @ E_h
-
- # ampere: En -> En+1
- if f0_harmonic_h is not None and source_omega is not None:
- f_harmonic_h = f0_harmonic_h * (np.sin(source_omega * (nt + 1) * dt) - np.sin(source_omega * (nt) * dt)) / (dt * source_omega) # * source_enveloppe(omega*(nt+1/2)*dt)
- f_h = f0_h + f_harmonic_h
-
- E_h = dCH1 @ E_h + dt * (dC @ B_h - f_h)
-
- # 1/2 faraday: Bn+1/2 -> Bn+1
- B_h -= (dt / 2) * C @ E_h
-
- # diags:
- compute_diags(E_h, B_h, f_h, nt=nt + 1)
-
-
+ # B_h -= (dt/2) * C @ E_h
+ # E_h = A_eps @ E_h + dt * dC @ B_h
+ # B_h -= (dt/2) * C @ E_h
+
+ C.dot(E_h, out=Btemp_h)
+ B_h -= (dt/2) * Btemp_h
+
+ dCH1.dot(E_h, out=E_h)
+ dC.dot(B_h, out=Etemp_h)
+ E_h += dt * (Etemp_h - f_h)
+
+ C.dot(E_h, out=Btemp_h)
+ B_h -= (dt/2) * Btemp_h
- if is_plotting_time(nt + 1) and plot_dir:
- print("Plot fields")
-
- Eh = FemField(V1h, coeffs=cP1 @ E_h)
- OM1.add_snapshot(t=nt * dt, ts=nt)
- OM1.export_fields(Eh=Eh)
-
- Bh = FemField(V2h, coeffs=B_h)
- OM2.add_snapshot(t=nt * dt, ts=nt)
- OM2.export_fields(Bh=Bh)
+ Eh = FemField(V1h, coeffs=cP1 @ E_h)
+ OM1.add_snapshot(t=nt*dt, ts=nt)
+ OM1.export_fields(Eh = Eh)
+ Bh = FemField(V2h, coeffs=B_h)
+ OM2.add_snapshot(t=nt*dt, ts=nt)
+ OM2.export_fields(Bh=Bh)
if plot_dir:
OM1.close()
@@ -616,8 +388,9 @@ def compute_diags(E_h, B_h, J_h, nt):
fields='Bh')
PM.close()
-
-
+# ==============================================================================
+# Compute stable time step size
+# ==============================================================================
def compute_stable_dt(*, C, dC, cfl_max, dt_max=None):
"""
Compute a stable time step size based on the maximum CFL parameter in the
@@ -666,7 +439,6 @@ def compute_stable_dt(*, C, dC, cfl_max, dt_max=None):
print(' WARNING !!! cfl = {} '.format(cfl))
print(' ****** ****** ****** ****** ****** ****** ')
- t_stamp = time_count()
V = C.domain
from psydac.linalg.utilities import array_to_psydac
vv = array_to_psydac(np.random.rand(V.dimension), V)
@@ -690,7 +462,6 @@ def compute_stable_dt(*, C, dC, cfl_max, dt_max=None):
spectral_rho = norm_vv # approximation
conv = abs((spectral_rho - old_spectral_rho) / spectral_rho) < 0.001
print(" ... spectral radius iteration: spectral_rho( dC @ C ) ~= {}".format(spectral_rho))
- t_stamp = time_count(t_stamp)
norm_op = np.sqrt(spectral_rho)
c_dt_max = 2. / norm_op
@@ -707,3 +478,91 @@ def compute_stable_dt(*, C, dC, cfl_max, dt_max=None):
print(f" -- and spectral_radius((c*dt)**2* dC @ C ) = {(light_c * dt * norm_op)**2} (should be < 4).")
return dt
+
+# ==============================================================================
+# Test Sources
+# ==============================================================================
+def get_div_free_pulse(x_0, y_0, domain=None):
+
+ from sympy import pi, cos, sin, Tuple, exp
+
+ x, y = domain.coordinates
+ ds2_0 = (0.02)**2
+ sigma_0 = (x - x_0)**2 + (y - y_0)**2
+ phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
+ dx_sig_0 = 2 * (x - x_0)
+ dy_sig_0 = 2 * (y - y_0)
+ dx_phi_0 = - dx_sig_0 * sigma_0 / ds2_0 * phi_0
+ dy_phi_0 = - dy_sig_0 * sigma_0 / ds2_0 * phi_0
+ f_x = dy_phi_0
+ f_y = - dx_phi_0
+ f_vect = Tuple(f_x, f_y)
+
+ return f_vect
+
+
+def get_curl_free_pulse(x_0, y_0, domain=None, pp=False):
+
+ from sympy import pi, cos, sin, Tuple, exp
+
+ # return -grad phi_0
+ x, y = domain.coordinates
+ if pp:
+ # psi=phi
+ ds2_0 = (0.02)**2
+ else:
+ ds2_0 = (0.1)**2
+ sigma_0 = (x - x_0)**2 + (y - y_0)**2
+ phi_0 = exp(-sigma_0**2 / (2 * ds2_0))
+ dx_sig_0 = 2 * (x - x_0)
+ dy_sig_0 = 2 * (y - y_0)
+ dx_phi_0 = - dx_sig_0 * sigma_0 / ds2_0 * phi_0
+ dy_phi_0 = - dy_sig_0 * sigma_0 / ds2_0 * phi_0
+ f_x = -dx_phi_0
+ f_y = -dy_phi_0
+ f_vect = Tuple(f_x, f_y)
+
+ return f_vect
+
+def get_Gaussian_beam(x_0, y_0, domain=None):
+
+ from sympy import pi, cos, sin, Tuple, exp
+
+ # return E = cos(k*x) exp( - x^2 + y^2 / 2 sigma^2) v
+ x, y = domain.coordinates
+
+ x = x - x_0
+ y = y - y_0
+
+ sigma = 0.1
+
+ xy = x**2 + y**2
+ ef = 1 / (sigma**2) * exp(- xy / (2 * sigma**2))
+
+ # E = curl exp
+ E = Tuple(y * ef, -x * ef)
+
+ # B = curl E
+ B = (xy / (sigma**2) - 2) * ef
+
+ return E, B
+
+if __name__ == '__main__':
+ domain_name = 'refined_square'
+ domain_lims = [[0, np.pi], [0, np.pi]]
+
+ nc = 20
+ ncells = np.array([[nc, nc, nc],
+ [nc, 2*nc, nc],
+ [nc, nc, nc]])
+
+ deg = 3
+ p_moments = deg+1
+
+ final_time = 2
+
+ plot_dir = './td_maxwell_pulse/'
+
+ solve_td_maxwell_pbm(nc=ncells, deg=deg, p_moments=p_moments, final_time=final_time,
+ domain_name=domain_name, domain_lims=domain_lims,
+ source_type='zero', E0_type='pulse', plot_dir=plot_dir)
From 31a47030cf3fc3a3ac0b9595064f9df90df11aa2 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:06:45 +0100
Subject: [PATCH 29/63] finish multipatch examples
---
examples/feec/hcurl_eigen_pbms_dg_2d.py | 2 +-
.../tests/test_feec_maxwell_multipatch_2d.py | 27 ++-
.../tests/test_feec_poisson_multipatch_2d.py | 2 +-
examples/feec/timedomain_maxwell.py | 15 +-
examples/feec/timedomain_maxwell_testcase.py | 159 ------------------
5 files changed, 21 insertions(+), 184 deletions(-)
delete mode 100644 examples/feec/timedomain_maxwell_testcase.py
diff --git a/examples/feec/hcurl_eigen_pbms_dg_2d.py b/examples/feec/hcurl_eigen_pbms_dg_2d.py
index 0566a5804..8faa09a29 100644
--- a/examples/feec/hcurl_eigen_pbms_dg_2d.py
+++ b/examples/feec/hcurl_eigen_pbms_dg_2d.py
@@ -28,7 +28,7 @@
from psydac.api.discretization import discretize
from psydac.api.postprocessing import OutputManager, PostProcessManager
-from hcurl_eigen_pbms_conga_2d import get_eigenvalues
+from examples.feec.hcurl_eigen_pbms_conga_2d import get_eigenvalues
#==============================================================================
# Solver for curl-curl eigenvalue problems
diff --git a/examples/feec/tests/test_feec_maxwell_multipatch_2d.py b/examples/feec/tests/test_feec_maxwell_multipatch_2d.py
index bfb8bc210..9a624e3cf 100644
--- a/examples/feec/tests/test_feec_maxwell_multipatch_2d.py
+++ b/examples/feec/tests/test_feec_maxwell_multipatch_2d.py
@@ -4,12 +4,11 @@
# for full license details. #
#---------------------------------------------------------------------------#
import numpy as np
-import pytest
-from psydac.feec.multipatch.examples.hcurl_source_pbms_conga_2d import solve_hcurl_source_pbm
-from psydac.feec.multipatch.examples.hcurl_eigen_pbms_conga_2d import hcurl_solve_eigen_pbm
-from psydac.feec.multipatch.examples.hcurl_eigen_pbms_dg_2d import hcurl_solve_eigen_pbm_dg
-from psydac.feec.multipatch.examples.timedomain_maxwell import solve_td_maxwell_pbm
+from examples.feec.hcurl_source_pbms_conga_2d import solve_hcurl_source_pbm
+from examples.feec.hcurl_eigen_pbms_conga_2d import hcurl_solve_eigen_pbm
+from examples.feec.hcurl_eigen_pbms_dg_2d import hcurl_solve_eigen_pbm_dg
+from examples.feec.timedomain_maxwell import solve_td_maxwell_pbm
def test_time_harmonic_maxwell_pretzel_f():
nc = 4
@@ -17,22 +16,20 @@ def test_time_harmonic_maxwell_pretzel_f():
source_type = 'manu_maxwell_inhom'
domain_name = 'pretzel_f'
- source_proj = 'tilde_Pi'
omega = np.pi
eta = -omega**2 # source
- diags = solve_hcurl_source_pbm(
+ err = solve_hcurl_source_pbm(
nc=nc, deg=deg,
eta=eta,
nu=0,
mu=1,
domain_name=domain_name,
source_type=source_type,
- source_proj=source_proj,
backend_language='pyccel-gcc')
- assert abs(diags["err"] - 0.0072015081402929445) < 1e-10
+ assert abs(err - 0.0072015081402929445) < 1e-10
def test_time_harmonic_maxwell_pretzel_f_nc():
deg = 2
@@ -41,22 +38,20 @@ def test_time_harmonic_maxwell_pretzel_f_nc():
source_type = 'manu_maxwell_inhom'
domain_name = 'pretzel_f'
- source_proj = 'tilde_Pi'
omega = np.pi
eta = -omega**2 # source
- diags = solve_hcurl_source_pbm(
+ err = solve_hcurl_source_pbm(
nc=nc, deg=deg,
eta=eta,
nu=0,
mu=1,
domain_name=domain_name,
source_type=source_type,
- source_proj=source_proj,
backend_language='pyccel-gcc')
- assert abs(diags["err"] - 0.004849225522124346) < 5e-7
+ assert abs(err - 0.004849225522124346) < 5e-7
def test_maxwell_eigen_curved_L_shape():
domain_name = 'curved_L_shape'
@@ -77,7 +72,7 @@ def test_maxwell_eigen_curved_L_shape():
nb_eigs_plot = 7
skip_eigs_threshold = 1e-7
- diags, eigenvalues = hcurl_solve_eigen_pbm(
+ eigenvalues = hcurl_solve_eigen_pbm(
ncells=ncells, degree=degree,
gamma_h=0,
generalized_pbm=True,
@@ -120,7 +115,7 @@ def test_maxwell_eigen_curved_L_shape_nc():
nb_eigs_plot = 7
skip_eigs_threshold = 1e-7
- diags, eigenvalues = hcurl_solve_eigen_pbm(
+ eigenvalues = hcurl_solve_eigen_pbm(
ncells=ncells, degree=degree,
gamma_h=0,
generalized_pbm=True,
@@ -163,7 +158,7 @@ def test_maxwell_eigen_curved_L_shape_dg():
nb_eigs_plot = 7
skip_eigs_threshold = 1e-7
- diags, eigenvalues = hcurl_solve_eigen_pbm_dg(
+ eigenvalues = hcurl_solve_eigen_pbm_dg(
ncells=ncells, degree=degree,
nu=0,
mu=1,
diff --git a/examples/feec/tests/test_feec_poisson_multipatch_2d.py b/examples/feec/tests/test_feec_poisson_multipatch_2d.py
index 29cab81d1..142ff5ccc 100644
--- a/examples/feec/tests/test_feec_poisson_multipatch_2d.py
+++ b/examples/feec/tests/test_feec_poisson_multipatch_2d.py
@@ -5,7 +5,7 @@
#---------------------------------------------------------------------------#
import numpy as np
-from psydac.feec.multipatch.examples.h1_source_pbms_conga_2d import solve_h1_source_pbm
+from examples.feec.h1_source_pbms_conga_2d import solve_h1_source_pbm
def test_poisson_pretzel_f():
diff --git a/examples/feec/timedomain_maxwell.py b/examples/feec/timedomain_maxwell.py
index 4b0b24495..658dc8bfe 100644
--- a/examples/feec/timedomain_maxwell.py
+++ b/examples/feec/timedomain_maxwell.py
@@ -51,7 +51,7 @@ def solve_td_maxwell_pbm(*,
E0_type='pulse_2',
plot_dir=None,
domain_lims=None,
- p_moments=4,
+ p_moments=-1,
):
"""
solver for the TD Maxwell problem: find E(t) in H(curl), B in L2, such that
@@ -352,13 +352,14 @@ def solve_td_maxwell_pbm(*,
C.dot(E_h, out=Btemp_h)
B_h -= (dt/2) * Btemp_h
- Eh = FemField(V1h, coeffs=cP1 @ E_h)
- OM1.add_snapshot(t=nt*dt, ts=nt)
- OM1.export_fields(Eh = Eh)
+ if plot_dir:
+ Eh = FemField(V1h, coeffs=cP1 @ E_h)
+ OM1.add_snapshot(t=nt*dt, ts=nt)
+ OM1.export_fields(Eh = Eh)
- Bh = FemField(V2h, coeffs=B_h)
- OM2.add_snapshot(t=nt*dt, ts=nt)
- OM2.export_fields(Bh=Bh)
+ Bh = FemField(V2h, coeffs=B_h)
+ OM2.add_snapshot(t=nt*dt, ts=nt)
+ OM2.export_fields(Bh=Bh)
if plot_dir:
OM1.close()
diff --git a/examples/feec/timedomain_maxwell_testcase.py b/examples/feec/timedomain_maxwell_testcase.py
deleted file mode 100644
index 4a1b5a58e..000000000
--- a/examples/feec/timedomain_maxwell_testcase.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-"""
- Runner script for solving the time-domain Maxwell problem.
-"""
-
-import numpy as np
-
-from psydac.feec.multipatch.examples.timedomain_maxwell import solve_td_maxwell_pbm
-from psydac.feec.multipatch.utilities import get_run_dir, get_plot_dir
-
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-#
-
-test_case = 'E0_pulse_no_source'
-# test_case = 'Issautier_like_source'
-# J_proj_case = 'P_geom'
-J_proj_case = 'P_L2'
-
-#
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-# Parameters to be changed in the batch run
-deg = 3
-
-# Common simulation parameters
-# domain_name = 'square_6'
-# ncells = [4,4,4,4,4,4]
-# domain_name = 'pretzel_f'
-
-# non-conf domains
-domain = [[0, np.pi], [0, np.pi]] # interval in x- and y-direction
-domain_name = 'refined_square'
-# use isotropic meshes (probably with a square domain)
-# 4x8= 64 patches
-# care for the transpose
-ncells = np.array([[10, 10, 10],
- [10, 20, 10],
- [10, 10, 10]])
-
-cfl_max = 0.8
-
-# 'P_geom' # projection used for initial E0 (B0 = 0 in all cases)
-E0_proj = 'P_L2'
-backend = 'pyccel-gcc'
-project_sol = True # whether cP1 E_h is plotted instead of E_h
-
-# Parameters that depend on test case
-if test_case == 'E0_pulse_no_source':
-
- E0_type = 'pulse_2' # non-zero initial conditions
- source_type = 'zero' # no current source
- source_omega = None
- final_time = 2 # wave transit time in domain is > 4
- dt_max = None
-
- plot_a_lot = True
- if plot_a_lot:
- plot_time_ranges = [[[0, final_time], 0.1]]
- else:
- plot_time_ranges = [
- [[0, 2], 0.1],
- [[final_time - 1, final_time], 0.1],
- ]
-
-# TODO: check
-elif test_case == 'Issautier_like_source':
-
- E0_type = 'zero' # zero initial conditions
- source_type = 'Il_pulse'
- source_omega = None
- final_time = 20
- dt_max = None
-
- if deg == 3 and final_time == 20:
-
- plot_time_ranges = [
- [[1.9, 2], 0.1],
- [[4.9, 5], 0.1],
- [[9.9, 10], 0.1],
- [[19.9, 20], 0.1],
- ]
-
-else:
- raise ValueError(test_case)
-
-
-# projection used for the source J
-if J_proj_case == 'P_geom':
- source_proj = 'P_geom'
- filter_source = False
-
-elif J_proj_case == 'P_L2':
- source_proj = 'P_L2'
- filter_source = False
-
-elif J_proj_case == 'tilde Pi_1':
- source_proj = 'P_L2'
- filter_source = True
-
-else:
- raise ValueError(J_proj_case)
-
-case_dir = 'tdmaxwell_' + test_case + '_J_proj=' + J_proj_case
-
-if filter_source:
- case_dir += '_Jfilter'
-else:
- case_dir += '_Jnofilter'
-if not project_sol:
- case_dir += '_E_noproj'
-
-if source_omega is not None:
- case_dir += f'_omega={source_omega}'
-
-case_dir += f'_tend={final_time}'
-
-#
-# ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
-
-run_dir = get_run_dir(
- domain_name,
- sum(ncells),
- deg,
- source_type=source_type,
- conf_proj="")
-
-plot_dir = get_plot_dir(case_dir, run_dir)
-
-
-#
-params = {
- 'nc': ncells,
- 'deg': deg,
- 'final_time': final_time,
- 'cfl_max': cfl_max,
- 'dt_max': dt_max,
- 'domain_name': domain_name,
- 'backend': backend,
- 'source_type': source_type,
- 'source_omega': source_omega,
- 'source_proj': source_proj,
- 'project_sol': project_sol,
- 'filter_source': filter_source,
- 'E0_type': E0_type,
- 'E0_proj': E0_proj,
- 'plot_dir': plot_dir,
- 'plot_time_ranges': plot_time_ranges,
- 'domain_lims': domain
-}
-
-print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
-print(' Calling solve_td_maxwell_pbm() with params = {}'.format(params))
-print('\n --- --- --- --- --- --- --- --- --- --- --- --- --- --- \n')
-
-solve_td_maxwell_pbm(**params)
From ffaf555c5d590110dc9550814f8c49fca767dae0 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:16:55 +0100
Subject: [PATCH 30/63] add examples to test workflow
---
.github/workflows/testing.yml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
index c3c2da8a4..22b9eb157 100644
--- a/.github/workflows/testing.yml
+++ b/.github/workflows/testing.yml
@@ -236,6 +236,11 @@ jobs:
run: |
python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and petsc"
+ - name: Run tests in examples/feec
+ working-directory: ./pytest
+ run: |
+ python -m pytest examples/feec
+
- name: Remove test directory
if: always()
run: |
From 580c5244d993b66bae61c7331575ad1245de18af Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:23:00 +0100
Subject: [PATCH 31/63] purge feec/multipatch
---
docs/source/modules/feec.multipatch.rst | 12 -
docs/source/modules/feec.rst | 7 -
psydac/feec/multipatch/__init__.py | 5 -
psydac/feec/multipatch/utilities.py | 160 ---------
psydac/feec/multipatch/utils_conga_2d.py | 392 -----------------------
5 files changed, 576 deletions(-)
delete mode 100644 docs/source/modules/feec.multipatch.rst
delete mode 100644 psydac/feec/multipatch/__init__.py
delete mode 100644 psydac/feec/multipatch/utilities.py
delete mode 100644 psydac/feec/multipatch/utils_conga_2d.py
diff --git a/docs/source/modules/feec.multipatch.rst b/docs/source/modules/feec.multipatch.rst
deleted file mode 100644
index 85bf08af1..000000000
--- a/docs/source/modules/feec.multipatch.rst
+++ /dev/null
@@ -1,12 +0,0 @@
-feec.multipatch
-===============
-
-.. currentmodule:: psydac.feec
-.. autosummary::
- :nosignatures:
- :toctree: STUBDIR
- :template: autosummary/module.rst
-
- multipatch.multipatch_domain_utilities
- multipatch.utilities
- multipatch.utils_conga_2d
diff --git a/docs/source/modules/feec.rst b/docs/source/modules/feec.rst
index f75dfb8cc..c3f50aaba 100644
--- a/docs/source/modules/feec.rst
+++ b/docs/source/modules/feec.rst
@@ -13,10 +13,3 @@ feec
feec.hodge
feec.pull_push
feec.pushforward
-
-feec submodules
----------------
-.. toctree::
- :maxdepth: 1
-
- feec.multipatch
diff --git a/psydac/feec/multipatch/__init__.py b/psydac/feec/multipatch/__init__.py
deleted file mode 100644
index 419109b64..000000000
--- a/psydac/feec/multipatch/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
diff --git a/psydac/feec/multipatch/utilities.py b/psydac/feec/multipatch/utilities.py
deleted file mode 100644
index 22182c2c7..000000000
--- a/psydac/feec/multipatch/utilities.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-import time
-
-
-def time_count(t_stamp=None, msg=None):
- new_t_stamp = time.time()
- if msg is None:
- msg = ''
- else:
- msg = '[' + msg + ']'
- if t_stamp:
- print('time elapsed ' + msg + ': ' + repr(new_t_stamp - t_stamp))
- elif len(msg) > 0:
- print('time stamp set for ' + msg)
- return new_t_stamp
-
-# ---------------------------------------------------------------------------------------------------------------
-# small/temporary utility for saving/loading sparse matrices, plots...
-# (should be cleaned !)
-
-
-def source_name(source_type=None, source_proj=None):
- """ Get the source term name"""
- assert source_type and source_proj
- return source_type + '_' + source_proj
-
-
-def sol_ref_fn(source_type, N_diag, source_proj=None):
- """ Get the reference solution filename based on the source term type"""
- fn = 'u_ref_' + source_name(source_type,
- source_proj) + '_N' + repr(N_diag) + '.npz'
- return fn
-
-
-def error_fn(
- source_type=None,
- method=None,
- conf_proj=None,
- k=None,
- domain_name=None,
- deg=None):
- """ Get the error filename based on the method used to solve the multpatch problem"""
- return 'errors/error_' + domain_name + '_' + source_type + '_' + '_deg' + \
- repr(deg) + '_' + get_method_name(method, k, conf_proj=conf_proj) + '.txt'
-
-
-def get_method_name(method=None, k=None, conf_proj=None, penal_regime=None):
- """ Get method name used to solve the multpatch problem"""
- if method == 'nitsche':
- method_name = method
- if k == 1:
- method_name += '_SIP'
- elif k == -1:
- method_name += '_NIP'
- elif k == 0:
- method_name += '_IIP'
- else:
- assert k is None
- elif method == 'conga':
- method_name = method
- if conf_proj is not None:
- method_name += '_' + conf_proj
- else:
- raise ValueError(method)
- if penal_regime is not None:
- method_name += '_pr' + repr(penal_regime)
-
- return method_name
-
-
-def get_fem_name(
- method=None,
- k=None,
- DG_full=False,
- conf_proj=None,
- domain_name=None,
- nc=None,
- deg=None,
- hom_seq=True):
- """ Get Fem name used to solve the multipatch problem"""
- assert domain_name
- fn = domain_name + (('_nc' + repr(nc)) if nc else '') + \
- (('_deg' + repr(deg)) if deg else '')
- if DG_full:
- fn += '_fDG'
- if method is not None:
- fn += '_' + get_method_name(method, k, conf_proj)
- if not hom_seq:
- fn += '_inhom'
- return fn
-
-
-def FEM_sol_fn(source_type=None, source_proj=None):
- """ Get the filename for FEM solution coeffs in numpy array format """
- fn = 'sol_' + source_name(source_type, source_proj) + '.npy'
- return fn
-
-
-def get_load_dir(
- method=None,
- DG_full=False,
- domain_name=None,
- nc=None,
- deg=None,
- data='matrices'):
- """ get load directory name based on the fem name"""
- assert data in ['matrices', 'solutions', 'rhs']
- if method is None:
- assert data == 'rhs'
- fem_name = get_fem_name(
- domain_name=domain_name,
- method=method,
- nc=nc,
- deg=deg,
- DG_full=DG_full)
- return './saved_' + data + '/' + fem_name + '/'
-
-
-def get_run_dir(domain_name, nc, deg, source_type=None, conf_proj=None):
- """ Get the run directory name"""
- rdir = domain_name
- if source_type:
- rdir += '_' + source_type
- if conf_proj:
- rdir += '_P=' + conf_proj
- rdir += '_nc={}_deg={}'.format(nc, deg)
- return rdir
-
-
-def get_plot_dir(case_dir, run_dir):
- """ Get the plot directory name"""
- return './plots/' + case_dir + '/' + run_dir
-
-
-def get_mat_dir(domain_name, nc, deg, quad_param=None):
- """ Get the directory name where matrices are stored"""
- mat_dir = './saved_matrices/matrices_{}_nc={}_deg={}'.format(
- domain_name, nc, deg)
- if quad_param is not None:
- mat_dir += '_qp={}'.format(quad_param)
- return mat_dir
-
-
-def get_sol_dir(case_dir, domain_name, nc, deg):
- """ Get the directory name where solutions are stored"""
- return './saved_solutions/' + case_dir + \
- '/solutions_{}_nc={}_deg={}'.format(domain_name, nc, deg)
-
-
-def diag_fn(source_type=None, source_proj=None):
- """ Get the diagnostics filename"""
- if source_type is not None:
- fn = 'diag_' + source_name(source_type, source_proj) + '.txt'
- else:
- fn = 'diag.txt'
- return fn
diff --git a/psydac/feec/multipatch/utils_conga_2d.py b/psydac/feec/multipatch/utils_conga_2d.py
deleted file mode 100644
index 7907dea57..000000000
--- a/psydac/feec/multipatch/utils_conga_2d.py
+++ /dev/null
@@ -1,392 +0,0 @@
-#---------------------------------------------------------------------------#
-# This file is part of PSYDAC which is released under MIT License. See the #
-# LICENSE file or go to https://github.com/pyccel/psydac/blob/devel/LICENSE #
-# for full license details. #
-#---------------------------------------------------------------------------#
-import os
-import datetime
-
-import numpy as np
-
-from sympy import lambdify
-from sympde.topology import Derham
-from sympde.topology.callable_mapping import BasicCallableMapping
-
-from psydac.api.settings import PSYDAC_BACKENDS
-from psydac.feec.pull_push import pull_2d_h1, pull_2d_hcurl, pull_2d_l2
-
-from psydac.api.discretization import discretize
-from psydac.feec.multipatch.utilities import time_count
-from psydac.linalg.utilities import array_to_psydac
-from psydac.fem.basic import FemField
-from psydac.fem.plotting_utilities import get_plotting_grid, get_grid_quad_weights, get_grid_vals
-
-from scipy.sparse import kron, block_diag
-from psydac.core.bsplines import collocation_matrix, histopolation_matrix
-from psydac.linalg.solvers import inverse
-
-
-# commuting projections on the physical domain (should probably be in the
-# interface)
-def P0_phys(f_phys, P0, domain):
- f = lambdify(domain.coordinates, f_phys)
-
- return P0(f)
-
-
-def P1_phys(f_phys, P1, domain):
- f_x = lambdify(domain.coordinates, f_phys[0])
- f_y = lambdify(domain.coordinates, f_phys[1])
-
- return P1([f_x, f_y])
-
-
-def P2_phys(f_phys, P2, domain):
- f = lambdify(domain.coordinates, f_phys)
-
- return P2(f)
-
-
-def get_kind(space='V*'):
- # temp helper
- if space == 'V0':
- kind = 'h1'
- elif space == 'V1':
- kind = 'hcurl'
- elif space == 'V2':
- kind = 'l2'
- else:
- raise ValueError(space)
- return kind
-
-# ===============================================================================
-def get_K0_and_K0_inv(V0h, uniform_patches=False):
- """
- Compute the change of basis matrices K0 and K0^{-1} in V0h.
-
- With
- K0_ij = sigma^0_i(B_j) = B_jx(n_ix) * B_jy(n_iy)
- where sigma_i is the geometric (interpolation) dof
- and B_j is the tensor-product B-spline
- """
- if uniform_patches:
- print(' [[WARNING -- hack in get_K0_and_K0_inv: using copies of 1st-patch matrices in every patch ]] ')
-
- V0 = V0h.symbolic_space # VOh is FemSpace
- domain = V0.domain
- K0_blocks = []
- K0_inv_blocks = []
- for k, D in enumerate(domain.interior):
- if uniform_patches and k > 0:
- K0_k = K0_blocks[0].copy()
- K0_inv_k = K0_inv_blocks[0].copy()
-
- else:
- V0_k = V0h.spaces[k] # fem space on patch k: (TensorFemSpace)
- K0_k_factors = [None, None]
- for d in [0, 1]:
- # 1d fem space alond dim d (SplineSpace)
- V0_kd = V0_k.spaces[d]
- K0_k_factors[d] = collocation_matrix(
- knots=V0_kd.knots,
- degree=V0_kd.degree,
- periodic=V0_kd.periodic,
- normalization=V0_kd.basis,
- xgrid=V0_kd.greville
- )
- K0_k = kron(*K0_k_factors)
- K0_k.eliminate_zeros()
- K0_inv_k = inv(K0_k.tocsc())
- K0_inv_k.eliminate_zeros()
-
- K0_blocks.append(K0_k)
- K0_inv_blocks.append(K0_inv_k)
- K0 = block_diag(K0_blocks)
- K0_inv = block_diag(K0_inv_blocks)
- return K0, K0_inv
-
-
-# ===============================================================================
-def get_K1_and_K1_inv(V1h, uniform_patches=False):
- """
- Compute the change of basis matrices K1 and K1^{-1} in Hcurl space V1h.
-
- With
- K1_ij = sigma^1_i(B_j) = int_{e_ix}(M_jx) * B_jy(n_iy)
- if i = horizontal edge [e_ix, n_iy] and j = (M_jx o B_jy) x-oriented MoB spline
- or
- = B_jx(n_ix) * int_{e_iy}(M_jy)
- if i = vertical edge [n_ix, e_iy] and j = (B_jx o M_jy) y-oriented BoM spline
- (above, 'o' denotes tensor-product for functions)
- """
- if uniform_patches:
- print(' [[WARNING -- hack in get_K1_and_K1_inv: using copies of 1st-patch matrices in every patch ]] ')
-
- V1 = V1h.symbolic_space # V1h is FemSpace
- domain = V1.domain
- K1_blocks = []
- K1_inv_blocks = []
- for k, D in enumerate(domain.interior):
- if uniform_patches and k > 0:
- K1_k = K1_blocks[0].copy()
- K1_inv_k = K1_inv_blocks[0].copy()
-
- else:
- # fem space on patch k:
- V1_k = V1h.spaces[k]
- K1_k_blocks = []
- for c in [0, 1]: # dim of component
- # fem space for comp. dc (TensorFemSpace)
- V1_kc = V1_k.spaces[c]
- K1_kc_factors = [None, None]
- for d in [0, 1]: # dim of variable
- # 1d fem space for comp c alond dim d (SplineSpace)
- V1_kcd = V1_kc.spaces[d]
- if c == d:
- K1_kc_factors[d] = histopolation_matrix(
- knots=V1_kcd.knots,
- degree=V1_kcd.degree,
- periodic=V1_kcd.periodic,
- normalization=V1_kcd.basis,
- xgrid=V1_kcd.ext_greville
- )
- else:
- K1_kc_factors[d] = collocation_matrix(
- knots=V1_kcd.knots,
- degree=V1_kcd.degree,
- periodic=V1_kcd.periodic,
- normalization=V1_kcd.basis,
- xgrid=V1_kcd.greville
- )
- K1_kc = kron(*K1_kc_factors)
- K1_kc.eliminate_zeros()
- K1_k_blocks.append(K1_kc)
- K1_k = block_diag(K1_k_blocks)
- K1_k.eliminate_zeros()
- K1_inv_k = inv(K1_k.tocsc())
- K1_inv_k.eliminate_zeros()
-
- K1_blocks.append(K1_k)
- K1_inv_blocks.append(K1_inv_k)
-
- K1 = block_diag(K1_blocks)
- K1_inv = block_diag(K1_inv_blocks)
- return K1, K1_inv
-
-# ===============================================================================
-
-
-def ortho_proj_Hcurl(EE, V1h, domain_h, M1, backend_language='python'):
- """
- return orthogonal projection of E on V1h, given M1 the mass matrix
- """
- assert isinstance(EE, Tuple)
- V1 = V1h.symbolic_space
- v = element_of(V1, name='v')
- l = LinearForm(v, integral(V1.domain, dot(v, EE)))
- lh = discretize(
- l,
- domain_h,
- V1h,
- backend=PSYDAC_BACKENDS[backend_language])
- b = lh.assemble()
- M1_inv = inverse(M1.mat(), 'pcg', pc='jacobi', tol=1e-10)
- sol_coeffs = M1_inv @ b
-
- return FemField(V1h, coeffs=sol_coeffs)
-
-
-# ===============================================================================
-class DiagGrid():
- """
- Class storing:
- - a diagnostic cell-centered grid
- - writing / quadrature utilities
- - a ref solution
-
- to compare solutions from different FEM spaces on same domain
- """
-
- def __init__(self, mappings=None, N_diag=None):
-
- mappings_list = list(mappings.values())
- etas, xx, yy, patch_logvols = get_plotting_grid(
- mappings, N=N_diag, centered_nodes=True, return_patch_logvols=True)
- quad_weights = get_grid_quad_weights(
- etas, patch_logvols, mappings_list)
-
- self.etas = etas
- self.xx = xx
- self.yy = yy
- self.patch_logvols = patch_logvols
- self.quad_weights = quad_weights
- self.mappings_list = mappings_list
-
- self.sol_ref = {} # Fem fields
- self.sol_vals = {} # values on diag grid
- self.sol_ref_vals = {} # values on diag grid
-
- def grid_vals_h1(self, v):
- return get_grid_vals(v, self.etas, self.mappings_list, space_kind='h1')
-
- def grid_vals_hcurl(self, v):
- return get_grid_vals(
- v,
- self.etas,
- self.mappings_list,
- space_kind='hcurl')
-
- def create_ref_fem_spaces(self, domain=None, ref_nc=None, ref_deg=None):
- print('[DiagGrid] Discretizing the ref FEM space...')
- degree = [ref_deg, ref_deg]
- derham = Derham(domain, ["H1", "Hcurl", "L2"])
- ref_nc = {patch.name: [ref_nc, ref_nc] for patch in domain.interior}
-
- domain_h = discretize(domain, ncells=ref_nc)
- # , backend=PSYDAC_BACKENDS[backend_language])
- derham_h = discretize(derham, domain_h, degree=degree)
- self.V0h = derham_h.V0
- self.V1h = derham_h.V1
-
- def import_ref_sol_from_coeffs(self, sol_ref_filename=None, space='V*'):
- print('[DiagGrid] loading coeffs of ref_sol from {}...'.format(
- sol_ref_filename))
- if space == 'V0':
- Vh = self.V0h
- elif space == 'V1':
- Vh = self.V1h
- else:
- raise ValueError(space)
- try:
- coeffs = np.load(sol_ref_filename)
- except OSError:
- print("-- WARNING: file not found, setting sol_ref = 0")
- coeffs = np.zeros(Vh.nbasis)
- if space in self.sol_ref:
- print(
- 'WARNING !! sol_ref[{}] exists -- will be overwritten !! '.format(space))
- print('use refined labels if several solutions are needed in the same space')
- self.sol_ref[space] = FemField(
- Vh, coeffs=array_to_psydac(
- coeffs, Vh.coeff_space))
-
- def write_sol_values(self, v, space='V*'):
- """
- v: FEM field
- """
- if space in self.sol_vals:
- print(
- 'WARNING !! sol_vals[{}] exists -- will be overwritten !! '.format(space))
- print('use refined labels if several solutions are needed in the same space')
- self.sol_vals[space] = get_grid_vals(
- v, self.etas, self.mappings_list, space_kind=get_kind(space))
-
- def write_sol_ref_values(self, v=None, space='V*'):
- """
- if no FemField v is provided, then use the self.sol_ref (must have been imported)
- """
- if space in self.sol_vals:
- print(
- 'WARNING !! sol_ref_vals[{}] exists -- will be overwritten !! '.format(space))
- print('use refined labels if several solutions are needed in the same space')
- if v is None:
- # then sol_ref must have been imported
- v = self.sol_ref[space]
- self.sol_ref_vals[space] = get_grid_vals(
- v, self.etas, self.mappings_list, space_kind=get_kind(space))
-
- def compute_l2_error(self, space='V*'):
- if space in ['V0', 'V2']:
- u = self.sol_ref_vals[space]
- uh = self.sol_vals[space]
- abs_u = [np.abs(p) for p in u]
- abs_uh = [np.abs(p) for p in uh]
- errors = [np.abs(p - q) for p, q in zip(u, uh)]
- elif space == 'V1':
- u_x, u_y = self.sol_ref_vals[space]
- uh_x, uh_y = self.sol_vals[space]
- abs_u = [np.sqrt((u1)**2 + (u2)**2) for u1, u2 in zip(u_x, u_y)]
- abs_uh = [np.sqrt((u1)**2 + (u2)**2) for u1, u2 in zip(uh_x, uh_y)]
- errors = [np.sqrt((u1 - v1)**2 + (u2 - v2)**2)
- for u1, v1, u2, v2 in zip(u_x, uh_x, u_y, uh_y)]
- else:
- raise ValueError(space)
-
- l2_norm_uh = (
- np.sum([J_F * v**2 for v, J_F in zip(abs_uh, self.quad_weights)]))**0.5
- l2_norm_u = (
- np.sum([J_F * v**2 for v, J_F in zip(abs_u, self.quad_weights)]))**0.5
- l2_error = (
- np.sum([J_F * v**2 for v, J_F in zip(errors, self.quad_weights)]))**0.5
-
- return l2_norm_uh, l2_norm_u, l2_error
-
- def get_diags_for(self, v, space='V*', print_diags=True):
- self.write_sol_values(v, space)
- sol_norm, sol_ref_norm, l2_error = self.compute_l2_error(space)
- rel_l2_error = l2_error / (max(sol_norm, sol_ref_norm))
- diags = {
- 'sol_norm': sol_norm,
- 'sol_ref_norm': sol_ref_norm,
- 'rel_l2_error': rel_l2_error,
- }
- if print_diags:
- print(' .. l2 norms (computed via quadratures on diag_grid): ')
- print(diags)
-
- return diags
-
-
-def get_Vh_diags_for(
- v=None,
- v_ref=None,
- M_m=None,
- print_diags=True,
- msg='error between ?? and ?? in Vh'):
- """
- v, v_ref: FemField
- M_m: mass matrix in scipy format
- """
- uh_c = v.coeffs.toarray()
- uh_ref_c = v_ref.coeffs.toarray()
- err_c = uh_c - uh_ref_c
- l2_error = np.dot(err_c, M_m.dot(err_c))**0.5
- sol_norm = np.dot(uh_c, M_m.dot(uh_c))**0.5
- sol_ref_norm = np.dot(uh_ref_c, M_m.dot(uh_ref_c))**0.5
- rel_l2_error = l2_error / (max(sol_norm, sol_ref_norm))
- diags = {
- 'sol_norm': sol_norm,
- 'sol_ref_norm': sol_ref_norm,
- 'rel_l2_error': rel_l2_error,
- }
- if print_diags:
- print(' .. l2 norms ({}): '.format(msg))
- print(diags)
-
- return diags
-
-
-def write_diags_to_file(diags, script_filename, diag_filename, params=None):
- """ write diagnostics to file """
- print(' -- writing diags to file {} --'.format(diag_filename))
- if not os.path.exists(diag_filename):
- open(diag_filename, 'w')
-
- with open(diag_filename, 'a') as a_writer:
- a_writer.write('\n')
- a_writer.write(
- ' ---------- ---------- ---------- ---------- ---------- ---------- \n')
- a_writer.write(' run script: \n {}\n'.format(script_filename))
- a_writer.write(
- ' executed on: \n {}\n\n'.format(
- datetime.datetime.now()))
- a_writer.write(' params: \n')
- for key, value in params.items():
- a_writer.write(' {}: {} \n'.format(key, value))
- a_writer.write('\n')
- a_writer.write(' diags: \n')
- for key, value in diags.items():
- a_writer.write(' {}: {} \n'.format(key, value))
- a_writer.write(
- ' ---------- ---------- ---------- ---------- ---------- ---------- \n')
- a_writer.write('\n')
From afbd14cc21bcfbe3495626e4a5680a4f137cdef1 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:23:56 +0100
Subject: [PATCH 32/63] update feec doc
---
docs/source/modules/feec.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/docs/source/modules/feec.rst b/docs/source/modules/feec.rst
index c3f50aaba..42313895c 100644
--- a/docs/source/modules/feec.rst
+++ b/docs/source/modules/feec.rst
@@ -11,5 +11,6 @@ feec
feec.derivatives
feec.global_geometric_projectors
feec.hodge
+ feec.multipatch_domain_utilities
feec.pull_push
feec.pushforward
From 6035e9aa8471e9328ac53b20b4db287071a728dc Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:28:39 +0100
Subject: [PATCH 33/63] fix broken imports
---
psydac/api/tests/build_domain.py | 2 +-
psydac/fem/tests/test_dirichlet_projectors.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/psydac/api/tests/build_domain.py b/psydac/api/tests/build_domain.py
index 7a77f7c7f..3301294c1 100644
--- a/psydac/api/tests/build_domain.py
+++ b/psydac/api/tests/build_domain.py
@@ -12,7 +12,7 @@
from sympde.topology import IdentityMapping, PolarMapping, AffineMapping, Mapping
# remove after sympde PR #155 is merged and call Domain.join instead
-from psydac.feec.multipatch.multipatch_domain_utilities import sympde_Domain_join
+from psydac.feec.multipatch_domain_utilities import sympde_Domain_join
#==============================================================================
# small extension to SymPDE:
diff --git a/psydac/fem/tests/test_dirichlet_projectors.py b/psydac/fem/tests/test_dirichlet_projectors.py
index bea727244..e2fc41fa8 100644
--- a/psydac/fem/tests/test_dirichlet_projectors.py
+++ b/psydac/fem/tests/test_dirichlet_projectors.py
@@ -445,7 +445,7 @@ def test_discrete_derham_boundary_projector_multipatch():
comm = None
backend = PSYDAC_BACKEND_GPYCCEL
- from psydac.feec.multipatch.multipatch_domain_utilities import build_multipatch_domain
+ from psydac.feec.multipatch_domain_utilities import build_multipatch_domain
domain = build_multipatch_domain(domain_name='annulus_3')
rng = np.random.default_rng(42)
From c002659e3971622a04f0539c7fa75c997d982fb2 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 15:37:50 +0100
Subject: [PATCH 34/63] remove feec.multipatch from script
---
docs/update_links.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/update_links.py b/docs/update_links.py
index 3ff71a899..cdea472ec 100644
--- a/docs/update_links.py
+++ b/docs/update_links.py
@@ -5,7 +5,7 @@
'cmd',
'core',
'ddm',
- 'feec', 'feec.multipatch',
+ 'feec',
'fem',
'linalg', 'linalg.kernels',
'mapping',
From 799c757cbcbbbe5ec6fd9108f2819f33f87f0f89 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Wed, 3 Dec 2025 16:45:20 +0100
Subject: [PATCH 35/63] remove failing example testing
---
.github/workflows/testing.yml | 5 -----
1 file changed, 5 deletions(-)
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
index 22b9eb157..c3c2da8a4 100644
--- a/.github/workflows/testing.yml
+++ b/.github/workflows/testing.yml
@@ -236,11 +236,6 @@ jobs:
run: |
python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and petsc"
- - name: Run tests in examples/feec
- working-directory: ./pytest
- run: |
- python -m pytest examples/feec
-
- name: Remove test directory
if: always()
run: |
From 34620f8cf2e115c093c1fa5505bb7cc2b88a16b6 Mon Sep 17 00:00:00 2001
From: Frederik Schnack
Date: Thu, 4 Dec 2025 11:19:49 +0100
Subject: [PATCH 36/63] move script and add examples test
---
.github/workflows/testing.yml | 7 ++++++-
mpi_tester.py => scripts/mpi_tester.py | 0
2 files changed, 6 insertions(+), 1 deletion(-)
rename mpi_tester.py => scripts/mpi_tester.py (100%)
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
index c3c2da8a4..86e729f1b 100644
--- a/.github/workflows/testing.yml
+++ b/.github/workflows/testing.yml
@@ -190,7 +190,7 @@ jobs:
- name: Initialize test directory
run: |
mkdir pytest
- cp mpi_tester.py pytest
+ cp scripts/mpi_tester.py pytest
- name: Run coverage tests on macOS
if: matrix.os == 'macos-14'
@@ -236,6 +236,11 @@ jobs:
run: |
python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and petsc"
+ - name: Run single-process example tests with Pytest on Ubuntu
+ if: matrix.os == 'ubuntu-24.04'
+ run: |
+ python -m pytest examples/feec
+
- name: Remove test directory
if: always()
run: |
diff --git a/mpi_tester.py b/scripts/mpi_tester.py
similarity index 100%
rename from mpi_tester.py
rename to scripts/mpi_tester.py
From 1131dc1384676c6867437e99a053cc6a3db52f5e Mon Sep 17 00:00:00 2001
From: elmosa
Date: Thu, 4 Dec 2025 19:52:21 +0100
Subject: [PATCH 37/63] add example 3D VTK positive Helmholtz
---
.../positive_Helmholtz_3D_fx.png | Bin 0 -> 1973988 bytes
.../positive_Helmholtz_3D_fy.png | Bin 0 -> 1973988 bytes
.../positive_Helmholtz_3D_fz.png | Bin 0 -> 1973988 bytes
.../positive_Helmholtz_3D_ux.png | Bin 0 -> 1973988 bytes
.../positive_Helmholtz_3D_uy.png | Bin 0 -> 1973988 bytes
.../positive_Helmholtz_3D_uz.png | Bin 0 -> 1973988 bytes
.../notebooks/positive_Helmholtz_3D_VTK.ipynb | 295 ++++++++++++++++++
7 files changed, 295 insertions(+)
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_fx.png
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_fy.png
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_fz.png
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_ux.png
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_uy.png
create mode 100644 examples/notebooks/paraview_images/positive_Helmholtz_3D_uz.png
create mode 100644 examples/notebooks/positive_Helmholtz_3D_VTK.ipynb
diff --git a/examples/notebooks/paraview_images/positive_Helmholtz_3D_fx.png b/examples/notebooks/paraview_images/positive_Helmholtz_3D_fx.png
new file mode 100644
index 0000000000000000000000000000000000000000..00d948219f177a26418846e1a6c0854d6a4025cb
GIT binary patch
literal 1973988
zcmeEv2bfmH_5M<%!!AvcW;;4Whz(4lMx$snighs>
zV~nv27)1rdf-PbLv7iVFQWWXz{@>lp&Ntt!b9;NgXCIh5bLPxB?|kfiIdf*_h9Sou
z*}O^nCM6{$%?BMda9Bx6-DM>u^)75w7d6Y?saJ--+KfH?H)9XI=%Vw+j6QAbm