diff --git a/ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb b/ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb index 692f39e0..5f091bd9 100755 --- a/ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb +++ b/ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb @@ -20,18 +20,6 @@ "Outside ESRF: download [install_ImageD11_from_git.py](https://github.com/FABLE-3DXRD/ImageD11/tree/master/ImageD11/nbGui/install_ImageD11_from_git.py), and update the path in the next cell:" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "fbd76936-7fac-4b03-99eb-2e1651ac4dd5", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -51,13 +39,17 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", + "PYTHONPATH = None\n", "\n", "# Experts : update these files for your detector if you need to\n", - "maskfile = \"/data/id11/nanoscope/Eiger/eiger_mask_E-08-0144_20240205.edf\"\n", - "# e2dxfile = \"/data/id11/nanoscope/Eiger/e2dx_E-08-0144_20240205.edf\"\n", - "# e2dyfile = \"/data/id11/nanoscope/Eiger/e2dy_E-08-0144_20240205.edf\"\n", + "maskfile = \"/data/id11/nanoscope/Eiger/eiger_mask_E-08-0144_20250819.edf\"\n", + "# old papermill tests still use e2dx + e2dy\n", + "# if you give e2dxfile, this is used. Otherwise it takes the detector h5 below.\n", + "e2dxfile = None # \"/data/id11/nanoscope/Eiger/e2dx_E-08-0144_20240205.edf\"\n", + "e2dyfile = None # \"/data/id11/nanoscope/Eiger/e2dy_E-08-0144_20240205.edf\"\n", + "# For the future easier to have one detectorh5 (so you can't mix up x/y).\n", "detectorh5 = \"/data/id11/nanoscope/Eiger/spatial_20250826_LJ/newSpatial_20250819.h5\"\n", "detector = 'eiger'\n", "omegamotor = 'rot_center'\n", @@ -88,7 +80,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " PYTHONPATH=setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { @@ -217,8 +211,11 @@ " )\n", "ds.import_all() # Can use scans = [f'{scan}.1' for scan in range(1,102)] )\n", "ds.maskfile = maskfile\n", - "ds.e2dxfile = e2dxfile\n", - "ds.e2dyfile = e2dyfile\n", + "if e2dxfile is not None:\n", + " ds.e2dxfile = e2dxfile\n", + " ds.e2dyfile = e2dyfile\n", + "else:\n", + " ds.detectorh5 = detectorh5\n", "ds.save()" ] }, diff --git a/ImageD11/nbGui/S3DXRD/0_segment_frelon.ipynb b/ImageD11/nbGui/S3DXRD/0_segment_frelon.ipynb index 433d4345..99d716c2 100755 --- a/ImageD11/nbGui/S3DXRD/0_segment_frelon.ipynb +++ b/ImageD11/nbGui/S3DXRD/0_segment_frelon.ipynb @@ -32,18 +32,6 @@ "It will automatically download and install ImageD11 to your home directory" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "2402147c-5513-4907-8ca9-76e3e252df0c", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -63,8 +51,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# Experts : update these files for your detector if you need to\n", "\n", @@ -110,17 +98,15 @@ { "cell_type": "code", "execution_count": null, - "id": "ac4e7e05-93ac-4ffc-849f-9bc581889911", + "id": "2402147c-5513-4907-8ca9-76e3e252df0c", "metadata": { - "editable": true, - "slideshow": { - "slide_type": "" - }, "tags": [] }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/4_visualise.ipynb b/ImageD11/nbGui/S3DXRD/4_visualise.ipynb index b6fb76a3..efbb7b39 100755 --- a/ImageD11/nbGui/S3DXRD/4_visualise.ipynb +++ b/ImageD11/nbGui/S3DXRD/4_visualise.ipynb @@ -41,15 +41,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -68,8 +59,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -84,16 +75,12 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "editable": true, - "slideshow": { - "slide_type": "" - }, - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/5_combine_phases.ipynb b/ImageD11/nbGui/S3DXRD/5_combine_phases.ipynb index 90da2515..e284b9ea 100755 --- a/ImageD11/nbGui/S3DXRD/5_combine_phases.ipynb +++ b/ImageD11/nbGui/S3DXRD/5_combine_phases.ipynb @@ -16,16 +16,6 @@ "__Date: 21/02/2025__" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "636849a2-54fd-44ce-aca3-cb8e7e945e59", - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -45,8 +35,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -70,7 +60,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/6_run_papermill.ipynb b/ImageD11/nbGui/S3DXRD/6_run_papermill.ipynb index 01db93bb..e63e744e 100644 --- a/ImageD11/nbGui/S3DXRD/6_run_papermill.ipynb +++ b/ImageD11/nbGui/S3DXRD/6_run_papermill.ipynb @@ -58,8 +58,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# give it a path to an existing dataset to determine the required paths\n", "dset_path = \"path/to/dataset.h5\"\n", @@ -91,7 +91,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/7_stack_layers.ipynb b/ImageD11/nbGui/S3DXRD/7_stack_layers.ipynb index af6b0b02..73447d88 100644 --- a/ImageD11/nbGui/S3DXRD/7_stack_layers.ipynb +++ b/ImageD11/nbGui/S3DXRD/7_stack_layers.ipynb @@ -49,8 +49,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# supply the path to one dataset file - we'll find the rest automatically\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -81,7 +81,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/fit_y0.ipynb b/ImageD11/nbGui/S3DXRD/fit_y0.ipynb index cd5f272e..8a27ab9e 100644 --- a/ImageD11/nbGui/S3DXRD/fit_y0.ipynb +++ b/ImageD11/nbGui/S3DXRD/fit_y0.ipynb @@ -44,8 +44,8 @@ "outputs": [], "source": [ "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", "\n", @@ -65,7 +65,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/friedel_pair_map.ipynb b/ImageD11/nbGui/S3DXRD/friedel_pair_map.ipynb index b0b29a69..6c21d58a 100644 --- a/ImageD11/nbGui/S3DXRD/friedel_pair_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/friedel_pair_map.ipynb @@ -22,21 +22,6 @@ "Jon Wright. March 2025.\n" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "editable": true, - "slideshow": { - "slide_type": "" - }, - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -50,8 +35,8 @@ "outputs": [], "source": [ "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "\n", "dset_path = \"/path/to/dataset\"\n", @@ -78,6 +63,23 @@ " ytol = 2." ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/ImageD11/nbGui/S3DXRD/import_test_data.ipynb b/ImageD11/nbGui/S3DXRD/import_test_data.ipynb index 74497f49..f04e1e9a 100644 --- a/ImageD11/nbGui/S3DXRD/import_test_data.ipynb +++ b/ImageD11/nbGui/S3DXRD/import_test_data.ipynb @@ -31,16 +31,6 @@ "It will automatically download and install ImageD11 to your home directory\n" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "6f943f39-15ab-4bda-b0d9-38ed46f06b5a", - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -60,8 +50,20 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "download_dir = 'si_cube_test'\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\" # Name of the git checkout folder" + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f943f39-15ab-4bda-b0d9-38ed46f06b5a", + "metadata": {}, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { @@ -77,8 +79,6 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)\n", - "\n", "import os\n", "\n", "import ImageD11.sinograms.dataset\n", diff --git a/ImageD11/nbGui/S3DXRD/pbp_1_indexing.ipynb b/ImageD11/nbGui/S3DXRD/pbp_1_indexing.ipynb index 7b649275..02a361ca 100755 --- a/ImageD11/nbGui/S3DXRD/pbp_1_indexing.ipynb +++ b/ImageD11/nbGui/S3DXRD/pbp_1_indexing.ipynb @@ -43,15 +43,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -70,8 +61,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -125,7 +116,11 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)\n", + "else:\n", + " PYTHONPATH = None" ] }, { @@ -139,7 +134,10 @@ "import numpy as np\n", "from matplotlib import pyplot as plt\n", "\n", - "from ImageD11.nbGui.S3DXRD.run_pbp_recon_chunk import merge_chunk_outputs\n", + "try:\n", + " from ImageD11.nbGui.S3DXRD.run_pbp_recon_chunk import merge_chunk_outputs\n", + "except:\n", + " merge_chunk_outputs = None # Old version\n", "import ImageD11.sinograms.point_by_point\n", "import ImageD11.sinograms.dataset\n", "import ImageD11.columnfile\n", @@ -407,7 +405,7 @@ }, "outputs": [], "source": [ - "if use_cluster:\n", + "if merge_chunk_outputs is not None and use_cluster:\n", " grains_prefix = ds.pbpfile.replace(ds.analysispath, os.path.join(ds.analysispath, 'slurm_pbp')).replace('.txt', f'_{phase_str}_')\n", " bash_script_path, grains_files = pbp_object.submit_slurm_chunks(grains_prefix,\n", " PYTHONPATH,\n", diff --git a/ImageD11/nbGui/S3DXRD/pbp_2_visualise.ipynb b/ImageD11/nbGui/S3DXRD/pbp_2_visualise.ipynb index 36a48a3c..6e21124f 100755 --- a/ImageD11/nbGui/S3DXRD/pbp_2_visualise.ipynb +++ b/ImageD11/nbGui/S3DXRD/pbp_2_visualise.ipynb @@ -24,17 +24,6 @@ "Then run 4_visualise to convert the refinement results to an accurate single-valued map with good strains." ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -53,8 +42,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -78,7 +67,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { @@ -89,6 +80,7 @@ }, "outputs": [], "source": [ + "import os\n", "import numpy as np\n", "from matplotlib import pyplot as plt\n", "import matplotlib.cm as cm\n", diff --git a/ImageD11/nbGui/S3DXRD/pbp_3_refinement.ipynb b/ImageD11/nbGui/S3DXRD/pbp_3_refinement.ipynb index 7caf9249..bad5a60a 100755 --- a/ImageD11/nbGui/S3DXRD/pbp_3_refinement.ipynb +++ b/ImageD11/nbGui/S3DXRD/pbp_3_refinement.ipynb @@ -45,15 +45,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -72,8 +63,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -111,7 +102,11 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " PYTHONPATH=setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)\n", + "else:\n", + " PYTHONPATH=None # use the system python" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/select_for_index_unknown.ipynb b/ImageD11/nbGui/S3DXRD/select_for_index_unknown.ipynb index 16d98e39..75d8d073 100644 --- a/ImageD11/nbGui/S3DXRD/select_for_index_unknown.ipynb +++ b/ImageD11/nbGui/S3DXRD/select_for_index_unknown.ipynb @@ -42,13 +42,13 @@ }, "outputs": [], "source": [ - "import os, sys\n", - "# USER: You can change this location if you want\n", - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", + "\n", + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { @@ -60,6 +60,7 @@ "outputs": [], "source": [ "%matplotlib ipympl\n", + "import os, sys\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import ImageD11.sinograms.dataset\n", diff --git a/ImageD11/nbGui/S3DXRD/tomo_1_index.ipynb b/ImageD11/nbGui/S3DXRD/tomo_1_index.ipynb index f99cefb3..e92b31a1 100755 --- a/ImageD11/nbGui/S3DXRD/tomo_1_index.ipynb +++ b/ImageD11/nbGui/S3DXRD/tomo_1_index.ipynb @@ -25,17 +25,6 @@ "If it doesn't seem to work well, try the point-by-point route instead!" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -54,8 +43,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -102,7 +91,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { @@ -113,6 +104,7 @@ }, "outputs": [], "source": [ + "import os\n", "import numpy as np\n", "from matplotlib import pyplot as plt\n", "\n", diff --git a/ImageD11/nbGui/S3DXRD/tomo_1_index_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/tomo_1_index_minor_phase.ipynb index 23ccb43c..e4bb1be1 100755 --- a/ImageD11/nbGui/S3DXRD/tomo_1_index_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/tomo_1_index_minor_phase.ipynb @@ -26,17 +26,6 @@ "If it doesn't seem to work well, try the point-by-point route instead!" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -55,8 +44,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -103,7 +92,9 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, { diff --git a/ImageD11/nbGui/S3DXRD/tomo_2_map.ipynb b/ImageD11/nbGui/S3DXRD/tomo_2_map.ipynb index 1e120bdd..654c74f5 100644 --- a/ImageD11/nbGui/S3DXRD/tomo_2_map.ipynb +++ b/ImageD11/nbGui/S3DXRD/tomo_2_map.ipynb @@ -37,15 +37,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": 1, @@ -64,8 +55,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -118,6 +109,8 @@ }, "outputs": [], "source": [ + "# We have to run this at ESRF because the astra slurm script is using it\n", + "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, diff --git a/ImageD11/nbGui/S3DXRD/tomo_2_map_minor_phase.ipynb b/ImageD11/nbGui/S3DXRD/tomo_2_map_minor_phase.ipynb index 1c30dbfb..e111b77a 100755 --- a/ImageD11/nbGui/S3DXRD/tomo_2_map_minor_phase.ipynb +++ b/ImageD11/nbGui/S3DXRD/tomo_2_map_minor_phase.ipynb @@ -40,15 +40,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -67,8 +58,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -124,6 +115,8 @@ }, "outputs": [], "source": [ + "# We have to run this at ESRF because the astra slurm script is using it\n", + "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" ] }, diff --git a/ImageD11/nbGui/S3DXRD/tomo_3_refinement.ipynb b/ImageD11/nbGui/S3DXRD/tomo_3_refinement.ipynb index 5cc65f5a..a0d2c2f1 100755 --- a/ImageD11/nbGui/S3DXRD/tomo_3_refinement.ipynb +++ b/ImageD11/nbGui/S3DXRD/tomo_3_refinement.ipynb @@ -42,16 +42,6 @@ "os.environ['MKL_NUM_THREADS'] = '1'" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "324210ec-acd1-49de-aed0-0ec90b119249", - "metadata": {}, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -71,8 +61,8 @@ "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", "# python environment stuff\n", - "CHECKOUT_PATH = None\n", - "IMAGED11_PATH = \"ImageD11\"\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# dataset file to import\n", "dset_path = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n", @@ -109,7 +99,11 @@ }, "outputs": [], "source": [ - "PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " PYTHONPATH = setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)\n", + "else:\n", + " PYTHONPATH = None # use the system python" ] }, { diff --git a/ImageD11/nbGui/TDXRD/0_segment_frelon.ipynb b/ImageD11/nbGui/TDXRD/0_segment_frelon.ipynb index adf46b1f..607f0a92 100755 --- a/ImageD11/nbGui/TDXRD/0_segment_frelon.ipynb +++ b/ImageD11/nbGui/TDXRD/0_segment_frelon.ipynb @@ -26,18 +26,6 @@ "It will automatically download and install ImageD11 to your home directory" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "2402147c-5513-4907-8ca9-76e3e252df0c", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -52,7 +40,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# Experts : update these files for your detector if you need to\n", "\n", @@ -95,6 +85,18 @@ "scans = [\"1.1\",]" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "05630edf-0454-442c-a058-b1b775479b72", + "metadata": {}, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)\n" + ] + }, { "cell_type": "code", "execution_count": null, @@ -410,7 +412,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.9" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/ImageD11/nbGui/TDXRD/1_index_default.ipynb b/ImageD11/nbGui/TDXRD/1_index_default.ipynb index e23db5cc..6cd1a992 100755 --- a/ImageD11/nbGui/TDXRD/1_index_default.ipynb +++ b/ImageD11/nbGui/TDXRD/1_index_default.ipynb @@ -9,17 +9,6 @@ "__Date: 21/02/2025__" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -33,7 +22,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# desination of H5 files\n", "# replace below with e.g.:\n", @@ -77,6 +68,19 @@ "dset_prefix = 'ff'" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -639,7 +643,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.9" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/ImageD11/nbGui/TDXRD/1_index_friedel.ipynb b/ImageD11/nbGui/TDXRD/1_index_friedel.ipynb index 79e316df..62fb1a1b 100755 --- a/ImageD11/nbGui/TDXRD/1_index_friedel.ipynb +++ b/ImageD11/nbGui/TDXRD/1_index_friedel.ipynb @@ -39,7 +39,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "dset_path = ''\n", "\n", @@ -94,6 +96,17 @@ "dset_prefix = 'ff'" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -755,7 +768,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3 (main)", "language": "python", "name": "python3" }, @@ -769,7 +782,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.9" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/ImageD11/nbGui/TDXRD/1_index_grid.ipynb b/ImageD11/nbGui/TDXRD/1_index_grid.ipynb index e81bd3a0..d633b047 100755 --- a/ImageD11/nbGui/TDXRD/1_index_grid.ipynb +++ b/ImageD11/nbGui/TDXRD/1_index_grid.ipynb @@ -44,7 +44,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# desination of H5 files\n", "# replace below with e.g.:\n", @@ -94,6 +96,18 @@ "dset_prefix = 'ff'" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "696bdb1e-9526-4c3b-b54f-490c1be2fd52", + "metadata": {}, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -797,7 +811,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3 (main)", "language": "python", "name": "python3" }, @@ -811,7 +825,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.9" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/ImageD11/nbGui/TDXRD/2_run_papermill.ipynb b/ImageD11/nbGui/TDXRD/2_run_papermill.ipynb index e6b54e5e..862fa408 100644 --- a/ImageD11/nbGui/TDXRD/2_run_papermill.ipynb +++ b/ImageD11/nbGui/TDXRD/2_run_papermill.ipynb @@ -49,7 +49,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "# give it a path to an existing dataset to determine the required paths\n", "dset_path = \"path/to/dataset.h5\"\n", @@ -68,6 +70,18 @@ "notebooks_to_run = None" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "5fb01b3e-d1e8-42a7-ba93-cc28a4627730", + "metadata": {}, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -217,7 +231,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.9" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/ImageD11/nbGui/TDXRD/3_merge_slices.ipynb b/ImageD11/nbGui/TDXRD/3_merge_slices.ipynb index 92df041d..e8e83bfc 100755 --- a/ImageD11/nbGui/TDXRD/3_merge_slices.ipynb +++ b/ImageD11/nbGui/TDXRD/3_merge_slices.ipynb @@ -19,25 +19,6 @@ "__Date: 21/02/2025__" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "ee3ddc28", - "metadata": { - "papermill": { - "duration": 0.017851, - "end_time": "2025-02-17T13:42:50.853074", - "exception": false, - "start_time": "2025-02-17T13:42:50.835223", - "status": "completed" - }, - "tags": [] - }, - "outputs": [], - "source": [ - "exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())" - ] - }, { "cell_type": "code", "execution_count": null, @@ -59,7 +40,9 @@ "# this cell is tagged with 'parameters'\n", "# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n", "\n", - "PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n", + "# python environment stuff\n", + "IMAGED11_PATH = None # means do not use git, otherwise \"ImageD11\" or \"ImageD11_version_xx\", etc\n", + "CHECKOUT_PATH = None # None means guess, or you can specify a folder for the checkout\n", "\n", "dset_path = ''\n", "\n", @@ -70,6 +53,27 @@ "dset_prefix = \"ff\"" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee3ddc28", + "metadata": { + "papermill": { + "duration": 0.017851, + "end_time": "2025-02-17T13:42:50.853074", + "exception": false, + "start_time": "2025-02-17T13:42:50.835223", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "if IMAGED11_PATH is not None:\n", + " exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n", + " setup_ImageD11_from_git(CHECKOUT_PATH, IMAGED11_PATH)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -684,7 +688,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.9" }, "papermill": { "default_parameters": {}, diff --git a/ImageD11/nbGui/install_ImageD11_from_git.py b/ImageD11/nbGui/install_ImageD11_from_git.py index a1b3abb3..796891b6 100644 --- a/ImageD11/nbGui/install_ImageD11_from_git.py +++ b/ImageD11/nbGui/install_ImageD11_from_git.py @@ -1,4 +1,3 @@ - import os, sys """ @@ -15,42 +14,68 @@ """ -def run_ImageD11_from_git( path, checkout ): - if not os.path.exists( path ): +def run_ImageD11_from_git(path, checkout): + """ + path = the folder to use for the git checkout + checkout = the name of the git checkout folder within path + + if checkout is None then use the system installed python + + returns what to put in the PYTHONPATH environment to get the + checked out ImageD11. None if there is not checkout + """ + if checkout is None: + return None + if not os.path.exists(path): print("Creating") - os.makedirs( path ) - assert os.path.isdir( path ) + os.makedirs(path) + assert os.path.isdir(path) code_path = os.path.join(path, checkout) if not os.path.exists(code_path): - os.system('git clone https://github.com/FABLE-3DXRD/ImageD11 ' + code_path) - assert os.path.exists(code_path),'failed to checkout from git' + os.system("git clone https://github.com/FABLE-3DXRD/ImageD11 " + code_path) + assert os.path.exists(code_path), "failed to checkout from git" bld = os.path.join(code_path, "build") if not os.path.exists(bld): - os.system('cd '+code_path+' && python setup.py build_ext --inplace') - assert os.path.exists(bld), 'failed to compile' + os.system("cd " + code_path + " && python setup.py build_ext --inplace") + assert os.path.exists(bld), "failed to compile" print("# Setting path via: ") sys.path.insert(0, code_path) - print("sys.path.insert(0,",code_path,")") + print("sys.path.insert(0,", code_path, ")") import ImageD11, ImageD11.cImageD11 + print("# Running from:", ImageD11.__file__) return code_path - - -def guess_ImageD11_code_path(): # This should be in silx somewhere? - """ Locates: + + +def guess_ImageD11_code_path(): + """Locates the SCRIPTS folder on the filesystem for ESRF users scripts can hold a local installation of ImageD11 if you need one + + Otherwise returns your $HOME/Code folder (assumes you have one). """ - path_items = os.getcwd().split('/') - if 'visitor' in path_items: - vi = path_items.index( 'visitor' ) - experiment, session = path_items[ vi + 1], path_items[ vi + 3 ] - return os.path.join( "/data", "visitor", experiment, "id11", session, "SCRIPTS" ) - return os.environ['HOME']+'/Code' + path_items = os.getcwd().split("/") + if "visitor" in path_items: + vi = path_items.index("visitor") + experiment, session = path_items[vi + 1], path_items[vi + 3] + return os.path.join("/data", "visitor", experiment, "id11", session, "SCRIPTS") + return os.environ["HOME"] + "/Code" -def setup_ImageD11_from_git( path=None, checkout="ImageD11" ): +def setup_ImageD11_from_git(path=None, checkout="ImageD11"): + """ + Called from notebooks, installs a git checkout of ImageD11 to override + the one from the system (in case you need some latest features). + """ + if checkout is None and path is None: + # we assume you want the system python + import ImageD11 + folder = os.path.split(ImageD11.__file__)[0] + pythonpath = os.path.split(folder)[0] + # probably cvmfs ..., but if it holds /data/ + if pythonpath.find("/data/") > 0: + pythonpath = pythonpath[pythonpath.find("/data/") :] + return pythonpath if path is None: - return run_ImageD11_from_git( guess_ImageD11_code_path(), checkout ) + return run_ImageD11_from_git(guess_ImageD11_code_path(), checkout) else: - return run_ImageD11_from_git( path, checkout ) - + return run_ImageD11_from_git(path, checkout) diff --git a/ImageD11/nbGui/nb_utils.py b/ImageD11/nbGui/nb_utils.py index 1a4be8f9..d2a6e586 100644 --- a/ImageD11/nbGui/nb_utils.py +++ b/ImageD11/nbGui/nb_utils.py @@ -25,6 +25,19 @@ ### General utilities (for all notebooks) +def clean_esrf_path( + fname, + fakeroots=( + "/mnt/storage", + "/gpfs/easy", + ), +): + for item in fakeroots: + if fname.startswith(item): + return fname.replace(item, "") + return fname + + def is_notebook_executed(nb_path): import nbformat @@ -94,7 +107,7 @@ def notebook_exec_pmill( papermill.execute_notebook(nb_input_path, nb_output_path, parameters=params_dict) -def prepare_notebooks_for_datasets( +def prepare_notebooks( samples_dict, notebooks, dataroot, @@ -105,6 +118,58 @@ def prepare_notebooks_for_datasets( ): """ Prepare, but not execute, a series of notebooks for each dataset in samples_dict. + + Calls the old prepare_notebooks_for_datasets + + Places the prepared notebooks for each dataset like PROCESSED_DATA/sample/sample_dataset/foo.ipynb + Returns a list of absolute paths of notebooks to execute. + + samples_dict: dict of {sample1: [ds1, ds2, ds3], sample2: [ds1, ds2, ds3]} etc. + notebooks: list of tuples of [(notebook_filename.ipynb, {params_for_notebook_1.ipynb})] etc. + Param dicts should not contain dataroot, analysisroot, sample, dataset or dsfile information. + Those are instead prepared by this function in the "0...ipynb" notebook that creates the dataset + + dataroot: path to raw data folder + analysisroot: path to root of analysis folder (usually PROCESSED_DATA) + CHECKOUT_PATH: place to check out a git version of ImageD11. + IMAGED11_PATH: name of the checkout folder. None means use the system ImageD11. + notebook_parent_dir: path to parent directory of input notebooks. Default: current working directory + """ + PYTHONPATH = None # do nothing + if IMAGED11_PATH is not None and CHECKOUT_PATH is not None: + PYTHONPATH = os.path.join(IMAGED11_PATH, CHECKOUT_PATH) + # Insert IMAGED11_PATH and CHECKOUT_PATH into nbparams only if they are requested + # Some notebooks will never need git (e.g. standalone doc style) + import papermill + + for nb_name, nb_params in notebooks: + # Check what the notebook is expecting + nb_path = os.path.join(notebook_parent_dir, nb_name) + expected_pars = papermill.inspect_notebook(nb_path) + if "CHECKOUT_PATH" in expected_pars: + nb_params["CHECKOUT_PATH"] = CHECKOUT_PATH + if "IMAGED11_PATH" in expected_pars: + nb_params["IMAGED11_PATH"] = IMAGED11_PATH + return prepare_notebooks_for_datasets( + samples_dict, + notebooks, + dataroot, + analysisroot, + PYTHONPATH=PYTHONPATH, + notebook_parent_dir=notebook_parent_dir, + ) + + +def prepare_notebooks_for_datasets( + samples_dict, + notebooks, + dataroot, + analysisroot, + PYTHONPATH=None, + notebook_parent_dir=None, +): + """ + Prepare, but not execute, a series of notebooks for each dataset in samples_dict. Places the prepared notebooks for each dataset like PROCESSED_DATA/sample/sample_dataset/foo.ipynb Returns a list of absolute paths of notebooks to execute. @@ -116,7 +181,7 @@ def prepare_notebooks_for_datasets( notebook_parent_dir: path to parent directory of input notebooks. Default: current working directory """ if notebook_parent_dir is None: - notebook_parent_dir = os.path.abspath("./") + notebook_parent_dir = clean_esrf_path(os.path.abspath(".")) notebooks_to_execute = [] for sample, datasets in samples_dict.items(): @@ -139,10 +204,8 @@ def prepare_notebooks_for_datasets( ) # use the notebook from the current folder nb_out = os.path.join(ds.analysispath, nb_name) # prepare parameters for this notebook - if CHECKOUT_PATH is not None: - nb_params["CHECKOUT_PATH"] = CHECKOUT_PATH - if IMAGED11_PATH is not None: - nb_params["IMAGED11_PATH"] = IMAGED11_PATH + if PYTHONPATH is not None: + nb_params["PYTHONPATH"] = PYTHONPATH if nb_name.startswith("0"): # the first notebook, segmentation, so we don't have a dataset name yet nb_params["dataroot"] = ds.dataroot @@ -352,7 +415,7 @@ def prepare_mlem_bash( cores_per_task=cores_per_task, python_script_path=python_script_path, id11_code_path=id11_code_path, - grainsfile=os.path.abspath(ds.grainsfile).replace("/mnt/storage", ""), + grainsfile=clean_esrf_path(os.path.abspath(ds.grainsfile)), reconfile=reconfile, dsfile=ds.dsfile, log_path=log_path, @@ -407,8 +470,8 @@ def prepare_astra_bash(ds, grainsfile, id11_code_path, group_name="grains", memo errfile_path=errfile_path, python_script_path=python_script_path, id11_code_path=id11_code_path, - grainsfile=os.path.abspath(grainsfile).replace("/mnt/storage", ""), - dsfile=os.path.abspath(ds.dsfile).replace("/mnt/storage", ""), + grainsfile=clean_esrf_path(os.path.abspath(grainsfile)), + dsfile=clean_esrf_path(os.path.abspath(ds.dsfile)), group_name=group_name, memory=memory, log_path=log_path, @@ -459,7 +522,7 @@ def prepare_pbp_bash(pbp_object, id11_code_path, minpkint): errfile_path=errfile_path, python_script_path=python_script_path, id11_code_path=id11_code_path, - dsfile=os.path.abspath(ds.dsfile).replace("/mnt/storage", ""), + dsfile=clean_esrf_path(os.path.abspath(ds.dsfile)), hkltol=pbp_object.hkl_tol, fpks=pbp_object.fpks, dstol=pbp_object.ds_tol, @@ -573,7 +636,7 @@ def assign_peaks_to_grains(grains, cf, tol): labels = np.zeros(cf.nrows, "i") # get all g-vectors from columnfile (updateGeometry) # should we instead calculate considering grain translations? (probably!) - # gv = np.transpose((cf.gx, cf.gy, cf.gz)).astype(float) # not C_CONTIGUOUS ! + # gv = np.transpose((cf.gx, cf.gy, cf.gz)).astype(float) # not C_CONTIGUOUS gv = np.empty((cf.nrows, 3), float) gv[:, 0] = cf.gx gv[:, 1] = cf.gy diff --git a/ImageD11/sparseframe.py b/ImageD11/sparseframe.py index e3b5028c..8f24450d 100644 --- a/ImageD11/sparseframe.py +++ b/ImageD11/sparseframe.py @@ -1,8 +1,7 @@ - from __future__ import print_function, division import time, sys -import h5py, scipy.sparse, numpy as np #, pylab as pl +import h5py, scipy.sparse, numpy as np # , pylab as pl from ImageD11 import cImageD11 SAFE = True @@ -10,23 +9,23 @@ # see also sandbox/harvest_pixels.py NAMES = { - "filename" : "original filename used to create a sparse frame", - "intensity" : "corrected pixel values", + "filename": "original filename used to create a sparse frame", + "intensity": "corrected pixel values", "nlabel": "Number of unique labels for an image labelling", - "threshold" : "Cut off used for thresholding", - } + "threshold": "Cut off used for thresholding", +} -class sparse_frame( object ): +class sparse_frame(object): """ Indices / shape mapping This was developed for a single 2D frame See SparseScan below for something aiming towards many frames """ - def __init__(self, row, col, shape, itype=np.uint16, pixels=None, - SAFE=SAFE ): - """ row = slow direction + + def __init__(self, row, col, shape, itype=np.uint16, pixels=None, SAFE=SAFE): + """row = slow direction col = fast direction shape = size of full image itype = the integer type to store the indices @@ -36,10 +35,10 @@ def __init__(self, row, col, shape, itype=np.uint16, pixels=None, throw in a ary.attrs if you want to save some """ if SAFE: - self.check( row, col, shape, itype, SAFE ) + self.check(row, col, shape, itype, SAFE) self.shape = shape - self.row = np.asarray(row, dtype = itype ) - self.col = np.asarray(col, dtype = itype ) + self.row = np.asarray(row, dtype=itype) + self.col = np.asarray(col, dtype=itype) self.nnz = len(self.row) # Things we could have using those indices: # raw pixel intensities @@ -54,7 +53,11 @@ def __init__(self, row, col, shape, itype=np.uint16, pixels=None, self.pixels[name] = val def __repr__(self): - h = "Sparse Frame ( %d , %d ) nnz = %d, data: "%( self.shape[0], self.shape[1], self.nnz ) + h = "Sparse Frame ( %d , %d ) nnz = %d, data: " % ( + self.shape[0], + self.shape[1], + self.nnz, + ) h += " ".join(list(self.pixels.keys())) return h @@ -73,15 +76,14 @@ def __eq__(self, other): return False for k in self.pixels.keys(): if not (self.pixels[k] == other.pixels[k]).all(): - print("pixels mismatch",k) + print("pixels mismatch", k) print(self.pixels[k]) print(other.pixels[k]) return False return True - def check(self, row, col, shape, itype, SAFE=SAFE): - """ Ensure the index data makes sense and fits """ + """Ensure the index data makes sense and fits""" if SAFE: lo = np.iinfo(itype).min hi = np.iinfo(itype).max @@ -93,29 +95,31 @@ def check(self, row, col, shape, itype, SAFE=SAFE): assert len(row) == len(col) def is_sorted(self): - """ Tests whether the data are sorted into slow/fast order + """Tests whether the data are sorted into slow/fast order rows are slow direction - columns are fast """ + columns are fast""" # TODO: non uint16 cases - assert self.row.dtype == np.uint16 and \ - cImageD11.sparse_is_sorted( self.row, self.col ) == 0 + assert ( + self.row.dtype == np.uint16 + and cImageD11.sparse_is_sorted(self.row, self.col) == 0 + ) def to_dense(self, data=None, out=None): - """ returns the full 2D image + """returns the full 2D image data = name in self.pixels or 1D array matching self.nnz Does not handle repeated indices e.g. obj.to_dense( obj.pixels['raw_intensity'] ) """ if data in self.pixels: - data = self.pixels[data] # give back this array + data = self.pixels[data] # give back this array else: - ks = list( self.pixels.keys() ) - if len(ks)==1: - data = self.pixels[ks[0]] # default for only one + ks = list(self.pixels.keys()) + if len(ks) == 1: + data = self.pixels[ks[0]] # default for only one else: - data = np.ones( self.nnz, bool ) # give a mask + data = np.ones(self.nnz, bool) # give a mask if out is None: - out = np.zeros( self.shape, data.dtype ) + out = np.zeros(self.shape, data.dtype) else: assert out.shape == self.shape assert len(data) == self.nnz @@ -123,97 +127,104 @@ def to_dense(self, data=None, out=None): # does not handle duplicate indices if they were present: # adr = self.row.astype(np.intp) * self.shape[1] + self.col # out.flat[adr] = data - out[ self.row, self.col ] = data + out[self.row, self.col] = data return out - def mask( self, msk ): - """ returns a subset of itself """ - spf = sparse_frame( self.row[msk], - self.col[msk], - self.shape, self.row.dtype ) + def mask(self, msk): + """returns a subset of itself""" + spf = sparse_frame(self.row[msk], self.col[msk], self.shape, self.row.dtype) for name, px in self.pixels.items(): if name in self.meta: m = self.meta[name].copy() else: m = None - spf.set_pixels( name, px[msk], meta = m ) + spf.set_pixels(name, px[msk], meta=m) return spf - def set_pixels( self, name, values, meta=None ): - """ Named arrays sharing these labels """ - if SAFE: assert len(values) == self.nnz + def set_pixels(self, name, values, meta=None): + """Named arrays sharing these labels""" + if SAFE: + assert len(values) == self.nnz self.pixels[name] = values if meta is not None: self.meta[name] = meta + def sort_by(self, name): + """Not sure when you would do this. For sorting + by a peak labelling to get pixels per peak""" + order = np.argsort(self.pixels[name]) + self.reorder(self, order) - def sort_by( self, name ): - """ Not sure when you would do this. For sorting - by a peak labelling to get pixels per peak """ - order = np.argsort( self.pixels[name] ) - self.reorder( self, order ) + def sort(self): + """Puts you into slow / fast looping order""" + order = np.lexsort((self.col, self.row)) + self.reorder(self, order) - def sort( self ): - """ Puts you into slow / fast looping order """ - order = np.lexsort( ( self.col, self.row ) ) - self.reorder( self, order ) - - def reorder( self, order ): - """ Put the pixels into a different order (in place) """ - if SAFE: assert len(order) == self.nnz + def reorder(self, order): + """Put the pixels into a different order (in place)""" + if SAFE: + assert len(order) == self.nnz self.row[:] = self.row[order] self.col[:] = self.col[order] for name, px in self.pixels.items(): px[:] = px[order] - def threshold(self, threshold, name='intensity'): + def threshold(self, threshold, name="intensity"): """ returns a new sparse frame with pixels > threshold """ - return self.mask( self.pixels[name] > threshold ) + return self.mask(self.pixels[name] > threshold) - def to_hdf_group( frame, group ): - """ Save a 2D sparse frame to a hdf group + def to_hdf_group(frame, group): + """Save a 2D sparse frame to a hdf group Makes 1 single frame per group """ - itype = np.dtype( frame.row.dtype ) - meta = { "itype" : itype.name, - "shape0" : frame.shape[0], - "shape1" : frame.shape[1] } + itype = np.dtype(frame.row.dtype) + meta = {"itype": itype.name, "shape0": frame.shape[0], "shape1": frame.shape[1]} for name, value in meta.items(): group.attrs[name] = value - opts = { "compression": "lzf", - "shuffle" : True, - } - #opts = {} - group.require_dataset( "row", shape=(frame.nnz,), - dtype=itype, **opts ) - group.require_dataset( "col", shape=(frame.nnz,), - dtype=itype, **opts ) - group['row'][:] = frame.row - group['col'][:] = frame.col + opts = { + "compression": "lzf", + "shuffle": True, + } + # opts = {} + group.require_dataset("row", shape=(frame.nnz,), dtype=itype, **opts) + group.require_dataset("col", shape=(frame.nnz,), dtype=itype, **opts) + group["row"][:] = frame.row + group["col"][:] = frame.col for pxname, px in frame.pixels.items(): - group.require_dataset( pxname, shape=(frame.nnz,), - dtype=px.dtype, - **opts ) + group.require_dataset(pxname, shape=(frame.nnz,), dtype=px.dtype, **opts) group[pxname][:] = px if pxname in frame.meta: - group[pxname].attrs = dict( frame.meta[pxname] ) - - -omeganames = ['measurement/rot_center', 'measurement/rot', - 'measurement/diffrz_center', 'measurement/diffrz'] -dtynames = ['measurement/dty_center', 'measurement/dty', - 'measurement/diffty_center', 'measurement/diffty'] - - -class SparseScan( object ): - - - def __init__( self, hname, scan, start = 0, n=None, - names = ('row','col','intensity'), - omeganames = omeganames, - dtynames = dtynames ): + group[pxname].attrs = dict(frame.meta[pxname]) + + +omeganames = [ + "measurement/rot_center", + "measurement/rot", + "measurement/diffrz_center", + "measurement/diffrz", +] +dtynames = [ + "measurement/dty_center", + "measurement/dty", + "measurement/diffty_center", + "measurement/diffty", +] + + +class SparseScan(object): + + def __init__( + self, + hname, + scan, + start=0, + n=None, + names=("row", "col", "intensity"), + omeganames=omeganames, + dtynames=dtynames, + ): """ hname : file coming from a sparse segmentation scan : a scan within that file @@ -229,70 +240,77 @@ def __init__( self, hname, scan, start = 0, n=None, self.names = list(names) self.omeganames = list(omeganames) self.dtynames = list(dtynames) - if scan.find('::') >= 0: # Format is "1.1::[start:end]" + if scan.find("::") >= 0: # Format is "1.1::[start:end]" scan, indexes = scan.split("::") - start, end = [int(s) for s in indexes[1:-1].split(':')] - n = end - start - with h5py.File(hname,"r") as hin: + start, end = [int(s) for s in indexes[1:-1].split(":")] + n = end - start + with h5py.File(hname, "r") as hin: grp = hin[scan] - self.shape = tuple( [ int(v) for v in ( grp.attrs['nframes'], - grp.attrs['shape0'], - grp.attrs['shape1'] ) ] ) + self.shape = tuple( + [ + int(v) + for v in ( + grp.attrs["nframes"], + grp.attrs["shape0"], + grp.attrs["shape1"], + ) + ] + ) if n is None: - end = self.shape[0] # nframes + end = self.shape[0] # nframes else: end = start + n - self.shape = end-start, self.shape[1], self.shape[2] + self.shape = end - start, self.shape[1], self.shape[2] # read the motors - if any self.motors = {} - for name, motors in [ ('omega',self.omeganames), - ('dty',self.dtynames) ]: + for name, motors in [("omega", self.omeganames), ("dty", self.dtynames)]: for motor in motors: if motor in grp: - self.motors[ name ] = grp[motor][start:end] + self.motors[name] = grp[motor][start:end] break # read the pixels - all pointers - nnz = grp['nnz'][:] - ipt = nnz_to_pointer( nnz ) + nnz = grp["nnz"][:] + ipt = nnz_to_pointer(nnz) s = ipt[start] e = ipt[end] for name in self.names: if name in grp: - setattr( self, name, grp[name][s:e] ) - if 'intensity' in self.names: + setattr(self, name, grp[name][s:e]) + if "intensity" in self.names: self.intensity_input_dtype = self.intensity.dtype self.intensity = self.intensity.astype(np.float32) # pointers into this scan self.nnz = nnz[start:end] - self.ipt = nnz_to_pointer( self.nnz ) + self.ipt = nnz_to_pointer(self.nnz) @property def frame_id(self): - """ returns the frame ID of each pixel """ - return np.repeat( np.arange(self.shape[0], dtype=np.intp ), self.nnz ) + """returns the frame ID of each pixel""" + return np.repeat(np.arange(self.shape[0], dtype=np.intp), self.nnz) def to_dense(self, out=None): - """ returns a dense image stack """ + """returns a dense image stack""" if out is None: - out = cImageD11.parallel_zeros( self.shape, self.intensity_input_dtype ) - out[ self.frame_id, self.row, self.col ] = self.intensity + out = cImageD11.parallel_zeros(self.shape, self.intensity_input_dtype) + out[self.frame_id, self.row, self.col] = self.intensity return out def getframe(self, i, SAFE=SAFE): # (self, row, col, shape, itype=np.uint16, pixels=None): s = self.ipt[i] - e = self.ipt[i+1] + e = self.ipt[i + 1] if s == e: - return None # empty frame - return sparse_frame( self.row[ s: e], - self.col[ s: e], - self.shape[1:], - pixels = { name : getattr( self, name)[s:e] for name in self.names }, - SAFE=SAFE ) - - - def cplabel(self, threshold = 0, countall=True ): - """ Label pixels using the connectedpixels assigment code + return None # empty frame + return sparse_frame( + self.row[s:e], + self.col[s:e], + self.shape[1:], + pixels={name: getattr(self, name)[s:e] for name in self.names}, + SAFE=SAFE, + ) + + def cplabel(self, threshold=0, countall=True): + """Label pixels using the connectedpixels assigment code Fills in: self.nlabels = number of peaks per frame self.labels = peak labels (should be unique) @@ -301,34 +319,35 @@ def cplabel(self, threshold = 0, countall=True ): if countall == True : labels all peaks from zero == False : labels from 1 on each frame """ - self.nlabels = np.zeros( len(self.nnz), np.int32 ) - self.labels = np.zeros( len(self.row), "i") - if 'labels' not in self.names: - self.names.append('labels') + self.nlabels = np.zeros(len(self.nnz), np.int32) + self.labels = np.zeros(len(self.row), "i") + if "labels" not in self.names: + self.names.append("labels") nl = 0 # TODO: run this in parallel with threads? - for i, npx in enumerate( self.nnz ): + for i, npx in enumerate(self.nnz): s = self.ipt[i] - e = self.ipt[i+1] + e = self.ipt[i + 1] if npx > 0: self.nlabels[i] = cImageD11.sparse_connectedpixels( - self.intensity[ s : e ], - self.row[ s : e ], - self.col[ s : e ], + self.intensity[s:e], + self.row[s:e], + self.col[s:e], threshold, - self.labels[ s : e ] ) + self.labels[s:e], + ) # zero label is the background! - self.labels[ s : e ] = np.where( self.labels[ s : e ] > 0, - self.labels[ s : e ] + nl, 0 ) + self.labels[s:e] = np.where( + self.labels[s:e] > 0, self.labels[s:e] + nl, 0 + ) else: self.nlabels[i] = 0 if countall: nl += self.nlabels[i] self.total_labels = self.nlabels.sum() - - def lmlabel(self, threshold = 0, countall=True, smooth=True ): - """ Label pixels using the localmax assigment code + def lmlabel(self, threshold=0, countall=True, smooth=True): + """Label pixels using the localmax assigment code Fills in: self.nlabels = number of peaks per frame self.labels = peak labels (should be unique) @@ -336,38 +355,41 @@ def lmlabel(self, threshold = 0, countall=True, smooth=True ): if countall == True : labels all peaks from zero == False : labels from 1 on each frame """ - self.nlabels = np.zeros( len(self.nnz), np.int32 ) - self.labels = np.zeros( len(self.row), "i") - if 'labels' not in self.names: - self.names.append('labels') + self.nlabels = np.zeros(len(self.nnz), np.int32) + self.labels = np.zeros(len(self.row), "i") + if "labels" not in self.names: + self.names.append("labels") if smooth: - self.signal = np.empty( self.intensity.shape, np.float32 ) + self.signal = np.empty(self.intensity.shape, np.float32) else: self.signal = self.intensity.astype(np.float32) # temporary workspaces npxmax = self.nnz.max() - vmx = np.zeros( npxmax, np.float32 ) - imx = np.zeros( npxmax, 'i' ) + vmx = np.zeros(npxmax, np.float32) + imx = np.zeros(npxmax, "i") nl = 0 # TODO: run this in parallel with threads? - for i, npx in enumerate( self.nnz ): + for i, npx in enumerate(self.nnz): s = self.ipt[i] - e = self.ipt[i+1] + e = self.ipt[i + 1] if npx > 0: if smooth: - cImageD11.sparse_smooth( self.intensity[ s: e], - self.row[s:e], - self.col[s:e], - self.signal[s:e] ) + cImageD11.sparse_smooth( + self.intensity[s:e], + self.row[s:e], + self.col[s:e], + self.signal[s:e], + ) self.nlabels[i] = cImageD11.sparse_localmaxlabel( - self.signal[ s : e ], - self.row[ s : e ], - self.col[ s : e ], + self.signal[s:e], + self.row[s:e], + self.col[s:e], vmx[:npx], imx[:npx], - self.labels[s : e] ) + self.labels[s:e], + ) assert (self.labels[s:e] > 0).all() - self.labels[ s : e ] += nl + self.labels[s:e] += nl else: self.nlabels[i] = 0 if countall: @@ -375,106 +397,106 @@ def lmlabel(self, threshold = 0, countall=True, smooth=True ): self.total_labels = self.nlabels.sum() def moments(self): - """ Computes the center of mass in s/f/omega - """ + """Computes the center of mass in s/f/omega""" pks = {} i32 = self.intensity.astype(np.float32) - pks['Number_of_pixels'] = np.bincount(self.labels, - weights=None, - minlength = self.total_labels+1 )[1:] - pks['sum_intensity'] = np.bincount(self.labels, - weights=i32, - minlength = self.total_labels+1 )[1:] - pks['s_raw'] = np.bincount(self.labels, - weights=i32*self.row, - minlength = self.total_labels+1 )[1:] - pks['s_raw'] /= pks['sum_intensity'] - pks['f_raw'] = np.bincount(self.labels, - weights=i32*self.col, - minlength = self.total_labels+1 )[1:] - pks['f_raw'] /= pks['sum_intensity'] - frame = np.empty( self.row.shape, np.int32 ) + pks["Number_of_pixels"] = np.bincount( + self.labels, weights=None, minlength=self.total_labels + 1 + )[1:] + pks["sum_intensity"] = np.bincount( + self.labels, weights=i32, minlength=self.total_labels + 1 + )[1:] + pks["s_raw"] = np.bincount( + self.labels, weights=i32 * self.row, minlength=self.total_labels + 1 + )[1:] + pks["s_raw"] /= pks["sum_intensity"] + pks["f_raw"] = np.bincount( + self.labels, weights=i32 * self.col, minlength=self.total_labels + 1 + )[1:] + pks["f_raw"] /= pks["sum_intensity"] + frame = np.empty(self.row.shape, np.int32) for i in range(len(self.nnz)): - frame[ self.ipt[i]:self.ipt[i+1] ] = i - for name in 'omega','dty': + frame[self.ipt[i] : self.ipt[i + 1]] = i + for name in "omega", "dty": if name in self.motors: - pks[name] = np.bincount(self.labels, - weights=i32*self.motors[name][frame], - minlength = self.total_labels+1 )[1:] - pks[name] /= pks['sum_intensity'] + pks[name] = np.bincount( + self.labels, + weights=i32 * self.motors[name][frame], + minlength=self.total_labels + 1, + )[1:] + pks[name] /= pks["sum_intensity"] return pks -def from_data_mask( mask, data, header ): +def from_data_mask(mask, data, header): """ Create a sparse from a dense array """ assert mask.shape == data.shape # using uint16 here - perhaps make this general in the future # ... but not for now - assert data.shape[0] < pow(2,16)-1 - assert data.shape[1] < pow(2,16)-1 - nnz = (mask>0).sum() - tmp = np.empty( data.shape[0],'i') # tmp hold px per row cumsums - row = np.empty( nnz, np.uint16 ) - col = np.empty( nnz, np.uint16 ) - cImageD11.mask_to_coo( mask, row, col, tmp ) - intensity = data[ mask > 0 ] + assert data.shape[0] < pow(2, 16) - 1 + assert data.shape[1] < pow(2, 16) - 1 + nnz = (mask > 0).sum() + tmp = np.empty(data.shape[0], "i") # tmp hold px per row cumsums + row = np.empty(nnz, np.uint16) + col = np.empty(nnz, np.uint16) + cImageD11.mask_to_coo(mask, row, col, tmp) + intensity = data[mask > 0] # intensity.attrs = dict(header) # FIXME USE xarray ? - spf = sparse_frame( row, col, data.shape, itype=np.uint16 ) - spf.set_pixels( "intensity" , intensity, dict( header ) ) + spf = sparse_frame(row, col, data.shape, itype=np.uint16) + spf.set_pixels("intensity", intensity, dict(header)) return spf -def from_data_cut( data, cut, header={}, detectormask=None): +def from_data_cut(data, cut, header={}, detectormask=None): assert data.dtype in (np.uint16, np.float32) if detectormask is None: - msk = np.ones(data.shape, bool ) + msk = np.ones(data.shape, bool) else: msk = detectormask - row = np.empty( data.shape, np.uint16 ) - col = np.empty( data.shape, np.uint16 ) + row = np.empty(data.shape, np.uint16) + col = np.empty(data.shape, np.uint16) if data.dtype == np.uint16: - val = np.empty( data.shape, np.uint16 ) - nnz = cImageD11.tosparse_u16( data, msk, row, col, val, cut) + val = np.empty(data.shape, np.uint16) + nnz = cImageD11.tosparse_u16(data, msk, row, col, val, cut) if data.dtype == np.float32: - val = np.empty( data.shape, np.float32 ) - nnz = cImageD11.tosparse_f32( data, msk, row, col, val, cut) - spf = sparse_frame( row.ravel()[:nnz].copy(), - col.ravel()[:nnz].copy(), - data.shape ) - spf.set_pixels( 'intensity', val.ravel()[:nnz].copy(), dict(header) ) + val = np.empty(data.shape, np.float32) + nnz = cImageD11.tosparse_f32(data, msk, row, col, val, cut) + spf = sparse_frame(row.ravel()[:nnz].copy(), col.ravel()[:nnz].copy(), data.shape) + spf.set_pixels("intensity", val.ravel()[:nnz].copy(), dict(header)) return spf - -def from_hdf_group( group ): - itype = np.dtype( group.attrs['itype'] ) - shape = group.attrs['shape0'], group.attrs['shape1'] - row = group['row'][:] # read it - col = group['col'][:] - spf = sparse_frame( row, col, shape, itype=itype ) +def from_hdf_group(group): + itype = np.dtype(group.attrs["itype"]) + shape = group.attrs["shape0"], group.attrs["shape1"] + row = group["row"][:] # read it + col = group["col"][:] + spf = sparse_frame(row, col, shape, itype=itype) for pxname in list(group): if pxname in ["row", "col"]: continue data = group[pxname][:] - header = dict( group[pxname].attrs ) - spf.set_pixels( pxname, data, header ) + header = dict(group[pxname].attrs) + spf.set_pixels(pxname, data, header) return spf -def sparse_moments( frame, intensity_name, labels_name ): - """ We rely on a labelling array carrying nlabel metadata (==labels.data.max())""" - nl = frame.meta[ labels_name ][ "nlabel" ] + +def sparse_moments(frame, intensity_name, labels_name): + """We rely on a labelling array carrying nlabel metadata (==labels.data.max())""" + nl = frame.meta[labels_name]["nlabel"] return cImageD11.sparse_blob2Dproperties( - frame.pixels[intensity_name].astype(np.float32), # limitations of f2py here. + frame.pixels[intensity_name].astype(np.float32), # limitations of f2py here. frame.row, frame.col, frame.pixels[labels_name], - nl ) + nl, + ) class overlaps_linear: - """ Memory caching object for the linear time algorithm to find + """Memory caching object for the linear time algorithm to find peak overlaps Given (row1, col1, label1) and (row2, col2, label2) it finds pixels @@ -482,46 +504,49 @@ class overlaps_linear: and returns (labels1[i], labels2[i], sum_pixels[i]) ... so the number of overlapping pixels for that pair of labels """ - def __init__(self, nnzmax=4096*4): - """ nnzmax = max pixels on a frame """ + + def __init__(self, nnzmax=4096 * 4): + """nnzmax = max pixels on a frame""" self.nnzmax = nnzmax self.realloc() def realloc(self): nnzmax = self.nnzmax - self.ki = np.empty( nnzmax,'i' ) - self.kj = np.empty( nnzmax,'i' ) - self.ect = np.empty( nnzmax, 'i' ) - self.tj = np.empty( nnzmax, 'i' ) - self.tmp = np.empty( nnzmax+1,'i') - - def __call__(self, row1, col1, labels1, n1, - row2, col2, labels2, n2, checkmem=True ): + self.ki = np.empty(nnzmax, "i") + self.kj = np.empty(nnzmax, "i") + self.ect = np.empty(nnzmax, "i") + self.tj = np.empty(nnzmax, "i") + self.tmp = np.empty(nnzmax + 1, "i") + + def __call__(self, row1, col1, labels1, n1, row2, col2, labels2, n2, checkmem=True): if checkmem: - assert len(row1)==len(col1)==len(labels1) - assert len(row2)==len(col2)==len(labels2) - nnz = max( max(len(row1), len(row2)), max(n1,n2)) + assert len(row1) == len(col1) == len(labels1) + assert len(row2) == len(col2) == len(labels2) + nnz = max(max(len(row1), len(row2)), max(n1, n2)) if nnz > self.nnzmax: self.nnzmax = nnz - print("realloc",nnz) + print("realloc", nnz) self.realloc() - npx = cImageD11.sparse_overlaps( row1, col1, self.ki[:len(row1)], - row2, col2, self.kj[:len(row2)] ) - if npx == 0: # there are no overlaps + npx = cImageD11.sparse_overlaps( + row1, col1, self.ki[: len(row1)], row2, col2, self.kj[: len(row2)] + ) + if npx == 0: # there are no overlaps return 0, None - r = labels1[ self.ki[:npx] ] # my labels - c = labels2[ self.kj[:npx] ] # your labels - nedge = cImageD11.compress_duplicates( r, c, self.ect[:npx], self.tj[:npx], self.tmp ) + r = labels1[self.ki[:npx]] # my labels + c = labels2[self.kj[:npx]] # your labels + nedge = cImageD11.compress_duplicates( + r, c, self.ect[:npx], self.tj[:npx], self.tmp + ) # overwrites r/c in place : ignore the zero label (hope it is not there) - rcl = np.zeros( (nedge, 3), 'i') - rcl[:,0] = r[:nedge] - rcl[:,1] = c[:nedge] - rcl[:,2] = self.ect[:nedge] + rcl = np.zeros((nedge, 3), "i") + rcl[:, 0] = r[:nedge] + rcl[:, 1] = c[:nedge] + rcl[:, 2] = self.ect[:nedge] return nedge, rcl class overlaps_matrix: - """ Memory caching object for the quadratic time algorithm to find + """Memory caching object for the quadratic time algorithm to find peak overlaps Given (row1, col1, label1) and (row2, col2, label2) it finds pixels @@ -531,33 +556,30 @@ class overlaps_matrix: This is easier to understand and faster for small number of peaks per frame """ + def __init__(self, npkmax=256): self.npkmax = npkmax self.realloc() def realloc(self): - self.matmem = np.empty( (self.npkmax* self.npkmax,), 'i') + self.matmem = np.empty((self.npkmax * self.npkmax,), "i") # potentially n^2 overlaps. Really? - self.results = np.empty( (3*self.npkmax*self.npkmax), 'i') - - def __call__(self, row1, col1, labels1, n1, - row2, col2, labels2, n2, checkmem=True ): - assert labels1.max()-1 < n1, "%d %d %d"%(labels1.min(), - labels1.max(), - n1) - assert labels2.max()-1 < n2 + self.results = np.empty((3 * self.npkmax * self.npkmax), "i") + + def __call__(self, row1, col1, labels1, n1, row2, col2, labels2, n2, checkmem=True): + assert labels1.max() - 1 < n1, "%d %d %d" % (labels1.min(), labels1.max(), n1) + assert labels2.max() - 1 < n2 mx = max(n1, n2) if max(n1, n2) > self.npkmax: self.npkmax = mx - print("realloc",mx) + print("realloc", mx) self.realloc() - mat = self.matmem[:n1*n2] + mat = self.matmem[: n1 * n2] mat.shape = n1, n2 - nov = cImageD11.coverlaps( row1, col1, labels1, - row2, col2, labels2, - mat, self.results ) - return nov, self.results[:nov*3].reshape((nov,3)) - + nov = cImageD11.coverlaps( + row1, col1, labels1, row2, col2, labels2, mat, self.results + ) + return nov, self.results[: nov * 3].reshape((nov, 3)) def overlaps(frame1, labels1, frame2, labels2): @@ -569,84 +591,79 @@ def overlaps(frame1, labels1, frame2, labels2): label in other (col) number of shared pixels (data) """ - ki = np.empty( frame1.nnz, 'i' ) - kj = np.empty( frame2.nnz, 'i' ) - npx = cImageD11.sparse_overlaps( frame1.row, frame1.col, ki, - frame2.row, frame2.col, kj) + ki = np.empty(frame1.nnz, "i") + kj = np.empty(frame2.nnz, "i") + npx = cImageD11.sparse_overlaps( + frame1.row, frame1.col, ki, frame2.row, frame2.col, kj + ) # self.data and other.data filled during init - row = frame1.pixels[labels1][ ki[:npx] ] # my labels - col = frame2.pixels[labels2][ kj[:npx] ] # your labels - ect = np.empty( npx, 'i') # ect = counts of overlaps - tj = np.empty( npx, 'i') # tj = temporary for sorting - n1 = frame1.meta[labels1][ "nlabel" ] - n2 = frame2.meta[labels2][ "nlabel" ] - tmp = np.empty( max(n1, n2)+1, 'i') # for histogram - nedge = cImageD11.compress_duplicates( row, col, ect, tj, tmp ) + row = frame1.pixels[labels1][ki[:npx]] # my labels + col = frame2.pixels[labels2][kj[:npx]] # your labels + ect = np.empty(npx, "i") # ect = counts of overlaps + tj = np.empty(npx, "i") # tj = temporary for sorting + n1 = frame1.meta[labels1]["nlabel"] + n2 = frame2.meta[labels2]["nlabel"] + tmp = np.empty(max(n1, n2) + 1, "i") # for histogram + nedge = cImageD11.compress_duplicates(row, col, ect, tj, tmp) # overwrites row/col in place : ignore the zero label (hope it is not there) - crow = row[:nedge]-1 - ccol = col[:nedge]-1 + crow = row[:nedge] - 1 + ccol = col[:nedge] - 1 cdata = ect[:nedge] - cedges = scipy.sparse.coo_matrix( ( cdata, (crow, ccol)), shape=(n1, n2) ) + cedges = scipy.sparse.coo_matrix((cdata, (crow, ccol)), shape=(n1, n2)) # really? return cedges -def sparse_connected_pixels( frame, - label_name="connectedpixels", - data_name="intensity", - threshold=None ): +def sparse_connected_pixels( + frame, label_name="connectedpixels", data_name="intensity", threshold=None +): """ frame = a sparse frame label_name = the array to save labels to in that frame data_name = an array in that frame threshold = float value or take data.threshold """ - labels = np.zeros( frame.nnz, "i" ) + labels = np.zeros(frame.nnz, "i") if threshold is None: threshold = frame.meta[data_name]["threshold"] nlabel = cImageD11.sparse_connectedpixels( - frame.pixels[data_name], frame.row, frame.col, - threshold, labels ) - frame.set_pixels( label_name, labels, { 'nlabel' : nlabel } ) + frame.pixels[data_name], frame.row, frame.col, threshold, labels + ) + frame.set_pixels(label_name, labels, {"nlabel": nlabel}) return nlabel -def sparse_localmax( frame, - label_name="localmax", - data_name = "intensity" ): - labels = np.zeros( frame.nnz, "i" ) - vmx = np.zeros( frame.nnz, np.float32 ) - imx = np.zeros( frame.nnz, 'i') +def sparse_localmax(frame, label_name="localmax", data_name="intensity"): + labels = np.zeros(frame.nnz, "i") + vmx = np.zeros(frame.nnz, np.float32) + imx = np.zeros(frame.nnz, "i") nlabel = cImageD11.sparse_localmaxlabel( - frame.pixels[data_name], frame.row, frame.col, - vmx, imx, labels ) - frame.set_pixels( label_name, labels, { "nlabel" : nlabel } ) + frame.pixels[data_name].astype(np.float32, copy=False), + frame.row, + frame.col, + vmx, + imx, + labels, + ) + frame.set_pixels(label_name, labels, {"nlabel": nlabel}) return nlabel -def sparse_smooth( frame, data_name='intensity' ): - smoothed = np.zeros( frame.nnz, np.float32 ) - cImageD11.sparse_smooth( frame.pixels[data_name], - frame.row, - frame.col, - smoothed ) +def sparse_smooth(frame, data_name="intensity"): + smoothed = np.zeros(frame.nnz, np.float32) + cImageD11.sparse_smooth(frame.pixels[data_name], frame.row, frame.col, smoothed) return smoothed -def nnz_to_pointer( nnz, out = None ): +def nnz_to_pointer(nnz, out=None): """ nnz = number of pixels per frame pointer = position in a single flat array """ if out is None: - out = np.empty( len(nnz)+1, int ) + out = np.empty(len(nnz) + 1, int) else: - assert len(out) == len(nnz)+1 + assert len(out) == len(nnz) + 1 out[0] = 0 - np.cumsum( nnz, out=out[1:] ) + np.cumsum(nnz, out=out[1:]) return out - - - - - diff --git a/test/papermill_test_notebooks.py b/test/papermill_test_notebooks.py index 6bb45db4..16a1e0e0 100644 --- a/test/papermill_test_notebooks.py +++ b/test/papermill_test_notebooks.py @@ -1,32 +1,107 @@ -# flake8: noqa """ Python script to automatically end-to-end test our Jupyter notebooks Currently implemented: nothing (indev) + To run this notebook, you need papermill in your Python environment. -As of 2025/01/21, this is not available in the default Jupyter environment. +This should be in the default Jupyter environment at ESRF now. + +It attempts to run tests on the ESRF file system and cluster. + +It should be able to run these tests using whatever version your system is finding, +or from a git checkout, so you get the local ImageD11 for testing. + +If you want to test the system python installation: -I suggest to do the following: -cd to your ImageD11 git checkout folder -$ pip install papermill ansicolors -t . --no-deps + python papermill_test_notebooks.py --system /my/space/on/the/file/system/for/results -This file will add its parent folder (../) to the system path so it can be imported -So you get local ImageD11 and local papermill +example to test a specific git tag (only works if this commit is in the tree and you are at ESRF): + + git clone https://github.com/FABLE_3DXRD/ImageD11 ImageD11_for_test + cd ImageD11_for_test + git checkout v2.3.4 + python setup.py build_ext --inplace + cd test + python papermill_test_notebooks.py /my/space/on/the/file/system/for/results + +As of December 2025, both git and the jupyter-slurm conda are supposed to be working. """ + import sys, os +def clean_esrf_path( + fname, + fakeroots=( + "/mnt/storage", + "/gpfs/easy", + ), + ): + for item in fakeroots: + if fname.startswith(item): + return fname.replace(item, "") + return fname -def fix_esrf_path(p): - if p.startswith("/gpfs") and p.find("/data/") > 0: - return p[p.find("/data/") :] - return p +if __name__ == "__main__": + # This has to come before importing ImageD11 if it changes sys.path + import argparse, sys + parser = argparse.ArgumentParser( + prog=sys.argv[0], + description="Runs end-to-end testcases using papermill at ESRF", + ) + parser.add_argument("destination_folder") + parser.add_argument( + "-s", + "--system", + action="store_true", + help="Use the system installation of ImageD11 and not this git checkout", + ) + opts = parser.parse_args() + destination_folder = opts.destination_folder + print("I am going to create output in", destination_folder, opts.system) + if opts.system: + # If we want to test the system install (conda or pip venv that is activated) + # Then stop setting PYTHONPATH and send these variable in as None + import ImageD11 + checkout_name = None + checkout_folder = None + folder = os.path.split(ImageD11.__file__)[0] + pythonpath = os.path.split(folder)[0] + # probably cvmfs ..., but if it holds /data/ + if pythonpath.find("/data/") > 0: + pythonpath = pythonpath[pythonpath.find("/data/") :] + print("We should be using the system python", pythonpath) + else: + # If we are working from git, this is where to find the code + items = os.path.abspath(__file__).split( os.path.sep ) # this files location (test) + # this file = items[-1] + # test = items[-2] + checkout_name = items[-3] + checkout_folder = clean_esrf_path( os.path.sep.join( items[:-3] ) ) + print(items) + print( os.path.sep.join( items[:-3] ) ) + print("Checkout folder", checkout_folder, "Checkout name", checkout_name) + pp = os.path.join( checkout_folder, checkout_name ) + print("Expected pythonpath", pp ) + assert os.path.exists(pp), "If you want to test a git version please check it out yourself" + sys.path.insert(0, pp) + import ImageD11.nbGui.install_ImageD11_from_git + pythonpath = ImageD11.nbGui.install_ImageD11_from_git.run_ImageD11_from_git( + checkout_folder, checkout_name + ) -checkout_name = os.path.split(os.path.abspath(".."))[-1] -checkout_folder = fix_esrf_path(os.path.abspath("../..")) +# FIXME: these are distributed these in the releases. +# ... we miss some kind of get_notebook thing for copying them into users folders. +nb_base_prefix = os.path.join("..", "ImageD11", "nbGui") +scan_nb_prefix = os.path.join(nb_base_prefix, "S3DXRD") +bb_nb_prefix = os.path.join(nb_base_prefix, "TDXRD") -sys.path.insert(0, checkout_folder) -print("ImageD11 path:", sys.path[0]) +def analysis_folder(aroot, name): + """ Create an analysis folder if needed """ + p = os.path.join(aroot, name) + if not os.path.exists(p): + os.makedirs(p) + return p os.environ["PYDEVD_DISABLE_FILE_VALIDATION"] = "1" # ignore papermill debugger warnings @@ -38,10 +113,41 @@ def fix_esrf_path(p): notebook_exec_pmill, ) -nb_base_prefix = os.path.join("..", "ImageD11", "nbGui") -scan_nb_prefix = os.path.join(nb_base_prefix, "S3DXRD") -bb_nb_prefix = os.path.join(nb_base_prefix, "TDXRD") +try: + from ImageD11.nbGui.nb_utils import prepare_notebooks +except: + def prepare_notebooks( + samples_dict, + notebooks, + dataroot, + analysisroot, + CHECKOUT_PATH=None, + IMAGED11_PATH=None, + notebook_parent_dir=None, + ): + PYTHONPATH = None # do nothing + if IMAGED11_PATH is not None and CHECKOUT_PATH is not None: + PYTHONPATH = os.path.join(IMAGED11_PATH, CHECKOUT_PATH) + # Insert IMAGED11_PATH and CHECKOUT_PATH into nbparams only if they are requested + # Some notebooks will never need git (e.g. standalone doc style) + import papermill + for nb_name, nb_params in notebooks: + # Check what the notebook is expecting + nb_path = os.path.join( notebook_parent_dir, nb_name ) + expected_pars = papermill.inspect_notebook(nb_path) + if "CHECKOUT_PATH" in expected_pars: + nb_params["CHECKOUT_PATH"] = CHECKOUT_PATH + if "IMAGED11_PATH" in expected_pars: + nb_params["IMAGED11_PATH"] = IMAGED11_PATH + return prepare_notebooks_for_datasets( + samples_dict, + notebooks, + dataroot, + analysisroot, + PYTHONPATH=PYTHONPATH, + notebook_parent_dir=notebook_parent_dir, + ) def notebook_route( base_dir, @@ -87,12 +193,11 @@ def notebook_route( # test the full tomographic route from start to finish -def test_tomographic_route(): - tomo_dir = "tomo_route" +def test_tomographic_route(aroot): + tomo_dir = analysis_folder(aroot, "tomo_route") dataroot = os.path.join(tomo_dir, "raw") analysisroot = os.path.join(tomo_dir, "processed") - CHECKOUT_PATH = sys.path[0] sample = "Si_cube" dataset = "S3DXRD_nt_moves_dty" samples_dict = {sample: [dataset]} @@ -168,7 +273,7 @@ def test_tomographic_route(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -183,8 +288,8 @@ def test_tomographic_route(): # test the full point-by-point route from start to finish -def test_pbp_route(): - tomo_dir = "pbp_route" +def test_pbp_route(aroot): + tomo_dir = analysis_folder(aroot, "pbp_route") dataroot = os.path.join(tomo_dir, "raw") analysisroot = os.path.join(tomo_dir, "processed") @@ -258,7 +363,7 @@ def test_pbp_route(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -272,10 +377,10 @@ def test_pbp_route(): notebook_exec_pmill(nb_path, nb_path, None) -def test_FeAu_JADB_tomo(): +def test_FeAu_JADB_tomo(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu/RAW_DATA" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu/PROCESSED_DATA/20250402_JADB/tomo_route" + analysisroot = analysis_folder(aroot, "tomo_route") # find layers to process sample = "FeAu_0p5_tR_nscope" first_dataset = "top_200um" @@ -428,7 +533,7 @@ def test_FeAu_JADB_tomo(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -446,8 +551,8 @@ def test_FeAu_JADB_tomo(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { "CHECKOUT_PATH": checkout_folder, @@ -463,10 +568,10 @@ def test_FeAu_JADB_tomo(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -def test_FeAu_JADB_pbp(): +def test_FeAu_JADB_pbp(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu/RAW_DATA" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu/PROCESSED_DATA/20250402_JADB/pbp_route" + analysisroot = analysis_folder(aroot, "pbp_route") # find layers to process sample = "FeAu_0p5_tR_nscope" first_dataset = "top_200um" @@ -607,7 +712,7 @@ def test_FeAu_JADB_pbp(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -625,8 +730,8 @@ def test_FeAu_JADB_pbp(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { "CHECKOUT_PATH": checkout_folder, @@ -642,10 +747,10 @@ def test_FeAu_JADB_pbp(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -def test_FeAu_f2scan_JADB_pbp(): +def test_FeAu_f2scan_JADB_pbp(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu_f2scan/RAW_DATA" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/S3DXRD/FeAu_f2scan/PROCESSED_DATA/20250402_JADB" + analysisroot = analysis_folder(aroot, "pbp_route") # find layers to process sample = "FeAu_No1_190um" first_dataset = "2um_redo_z_0" @@ -786,7 +891,7 @@ def test_FeAu_f2scan_JADB_pbp(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -804,8 +909,8 @@ def test_FeAu_f2scan_JADB_pbp(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { "CHECKOUT_PATH": checkout_folder, @@ -821,10 +926,10 @@ def test_FeAu_f2scan_JADB_pbp(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -def test_FeAu_JADB_bb(): +def test_FeAu_JADB_bb(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/RAW_DATA/" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/PROCESSED_DATA/20250304_JADB/default" + analysisroot = analysis_folder(aroot, "frelon") # find layers to process sample = "FeAu_0p5_tR" first_dataset = "ff1" @@ -893,7 +998,7 @@ def test_FeAu_JADB_bb(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -911,8 +1016,8 @@ def test_FeAu_JADB_bb(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { # 3_merge_slices.ipynb "CHECKOUT_PATH": checkout_folder, @@ -927,10 +1032,10 @@ def test_FeAu_JADB_bb(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -def test_FeAu_JADB_bb_grid(): +def test_FeAu_JADB_bb_grid(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/RAW_DATA/" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/PROCESSED_DATA/20250304_JADB/grid" + analysisroot = analysis_folder(aroot, "grid") # find layers to process sample = "FeAu_0p5_tR" first_dataset = "ff1" @@ -1014,7 +1119,7 @@ def test_FeAu_JADB_bb_grid(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -1032,8 +1137,8 @@ def test_FeAu_JADB_bb_grid(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { # 3_merge_slices.ipynb "CHECKOUT_PATH": checkout_folder, @@ -1048,10 +1153,10 @@ def test_FeAu_JADB_bb_grid(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -def test_FeAu_JADB_bb_friedel(): +def test_FeAu_JADB_bb_friedel(aroot): # where is the data? dataroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/RAW_DATA/" - analysisroot = "/data/id11/inhouse2/test_data_3DXRD/TDXRD/FeAu/PROCESSED_DATA/20250304_JADB/friedel" + analysisroot = analysis_folder(aroot, "friedel") # find layers to process sample = "FeAu_0p5_tR" first_dataset = "ff1" @@ -1132,7 +1237,7 @@ def test_FeAu_JADB_bb_friedel(): ), ] - notebooks_to_execute = prepare_notebooks_for_datasets( + notebooks_to_execute = prepare_notebooks( samples_dict, nb_params, dataroot, @@ -1150,8 +1255,8 @@ def test_FeAu_JADB_bb_friedel(): dset_path = os.path.join( analysisroot, sample, - f"{sample}_{first_dataset}", - f"{sample}_{first_dataset}_dataset.h5", + "%s_%s" % (sample, first_dataset), + "%s_%s_dataset.h5" % (sample, first_dataset), ) nb_param = { # 3_merge_slices.ipynb "CHECKOUT_PATH": checkout_folder, @@ -1166,14 +1271,13 @@ def test_FeAu_JADB_bb_friedel(): notebook_route(analysisroot, [nb_path], [nb_param], skip_dir_check=True) -if __name__ == "__main__": +if __name__=="__main__": print(papermill.__path__) - test_tomographic_route() - test_pbp_route() - # FIXME: None of these are re-usable by anyone except James who owns those folders - # test_FeAu_JADB_tomo() - # test_FeAu_JADB_pbp() - # test_FeAu_f2scan_JADB_pbp() - # test_FeAu_JADB_bb() - # test_FeAu_JADB_bb_grid() - # test_FeAu_JADB_bb_friedel() + test_tomographic_route(destination_folder) + test_pbp_route(destination_folder) + test_FeAu_JADB_tomo(destination_folder) + test_FeAu_JADB_pbp(destination_folder) + test_FeAu_f2scan_JADB_pbp(destination_folder) + test_FeAu_JADB_bb(destination_folder) + test_FeAu_JADB_bb_grid(destination_folder) + test_FeAu_JADB_bb_friedel(destination_folder)