diff --git a/specsanalyzer/develop/_modules/index.html b/specsanalyzer/develop/_modules/index.html index d2e7b1b..49e6f25 100644 --- a/specsanalyzer/develop/_modules/index.html +++ b/specsanalyzer/develop/_modules/index.html @@ -7,7 +7,7 @@ - Overview: module code — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + Overview: module code — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

diff --git a/specsanalyzer/develop/_modules/specsanalyzer/config.html b/specsanalyzer/develop/_modules/specsanalyzer/config.html index 4ca22ab..39ff3e9 100644 --- a/specsanalyzer/develop/_modules/specsanalyzer/config.html +++ b/specsanalyzer/develop/_modules/specsanalyzer/config.html @@ -7,7 +7,7 @@ - specsanalyzer.config — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsanalyzer.config — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,22 +551,39 @@

Source code for specsanalyzer.config

 """This module contains a config library for loading yaml/json files into dicts"""
-from __future__ import annotations
+from __future__ import annotations
 
-import json
-import os
-import platform
-from importlib.util import find_spec
-from pathlib import Path
+import json
+import os
+import platform
+from importlib.util import find_spec
+from pathlib import Path
 
-import yaml
+import yaml
+from platformdirs import user_config_path
+
+from specsanalyzer.logging import setup_logging
 
 package_dir = os.path.dirname(find_spec("specsanalyzer").origin)
 
+USER_CONFIG_PATH = user_config_path(
+    appname="specsanalyzer",
+    appauthor="OpenCOMPES",
+    ensure_exists=True,
+)
+SYSTEM_CONFIG_PATH = (
+    Path(os.environ["ALLUSERSPROFILE"]).joinpath("specsanalyzer")
+    if platform.system() == "Windows"
+    else Path("/etc/").joinpath("specsanalyzer")
+)
+
+# Configure logging
+logger = setup_logging("config")
+
 
 
[docs] -def parse_config( +def parse_config( config: dict | str = None, folder_config: dict | str = None, user_config: dict | str = None, @@ -590,12 +607,13 @@

Source code for specsanalyzer.config

         user_config (dict | str, optional): user-based config dictionary
             or file path. The loaded dictionary is completed with the user-based values,
             taking preference over system and default values.
-            Defaults to the file ".specsanalyzer/config.yaml" in the current user's home directory.
+            Defaults to the file ".config/specsanalyzer/config_v1.yaml" in the current user's home
+            directory.
         system_config (dict | str, optional): system-wide config dictionary
             or file path. The loaded dictionary is completed with the system-wide values,
             taking preference over default values.
-            Defaults to the file "/etc/specsanalyzer/config.yaml" on linux,
-            and "%ALLUSERSPROFILE%/specsanalyzer/config.yaml" on windows.
+            Defaults to the file "/etc/specsanalyzer/config_v1.yaml" on linux,
+            and "%ALLUSERSPROFILE%/specsanalyzer/config_v1.yaml" on windows.
         default_config (dict | str, optional): default config dictionary
             or file path. The loaded dictionary is completed with the default values.
             Defaults to *package_dir*/config/default.yaml".
@@ -615,7 +633,7 @@ 

Source code for specsanalyzer.config

     else:
         config_dict = load_config(config)
         if verbose:
-            print(f"Configuration loaded from: [{str(Path(config).resolve())}]")
+            logger.info(f"Configuration loaded from: [{str(Path(config).resolve())}]")
 
     folder_dict: dict = None
     if isinstance(folder_config, dict):
@@ -626,47 +644,36 @@ 

Source code for specsanalyzer.config

         if Path(folder_config).exists():
             folder_dict = load_config(folder_config)
             if verbose:
-                print(f"Folder config loaded from: [{str(Path(folder_config).resolve())}]")
+                logger.info(f"Folder config loaded from: [{str(Path(folder_config).resolve())}]")
 
     user_dict: dict = None
     if isinstance(user_config, dict):
         user_dict = user_config
     else:
         if user_config is None:
-            user_config = str(
-                Path.home().joinpath(".specsanalyzer").joinpath("config.yaml"),
-            )
+            user_config = str(USER_CONFIG_PATH.joinpath("config_v1.yaml"))
         if Path(user_config).exists():
             user_dict = load_config(user_config)
             if verbose:
-                print(f"User config loaded from: [{str(Path(user_config).resolve())}]")
+                logger.info(f"User config loaded from: [{str(Path(user_config).resolve())}]")
 
     system_dict: dict = None
     if isinstance(system_config, dict):
         system_dict = system_config
     else:
         if system_config is None:
-            if platform.system() in ["Linux", "Darwin"]:
-                system_config = str(
-                    Path("/etc/").joinpath("specsanalyzer").joinpath("config.yaml"),
-                )
-            elif platform.system() == "Windows":
-                system_config = str(
-                    Path(os.environ["ALLUSERSPROFILE"])
-                    .joinpath("specsanalyzer")
-                    .joinpath("config.yaml"),
-                )
+            system_config = str(SYSTEM_CONFIG_PATH.joinpath("config_v1.yaml"))
         if Path(system_config).exists():
             system_dict = load_config(system_config)
             if verbose:
-                print(f"System config loaded from: [{str(Path(system_config).resolve())}]")
+                logger.info(f"System config loaded from: [{str(Path(system_config).resolve())}]")
 
     if isinstance(default_config, dict):
         default_dict = default_config
     else:
         default_dict = load_config(default_config)
         if verbose:
-            print(f"Default config loaded from: [{str(Path(default_config).resolve())}]")
+            logger.info(f"Default config loaded from: [{str(Path(default_config).resolve())}]")
 
     if folder_dict is not None:
         config_dict = complete_dictionary(
@@ -694,7 +701,7 @@ 

Source code for specsanalyzer.config

 
 
[docs] -def load_config(config_path: str) -> dict: +def load_config(config_path: str) -> dict: """Loads config parameter files. Args: @@ -728,7 +735,7 @@

Source code for specsanalyzer.config

 
 
[docs] -def save_config(config_dict: dict, config_path: str, overwrite: bool = False): +def save_config(config_dict: dict, config_path: str, overwrite: bool = False): """Function to save a given config dictionary to a json or yaml file. Normally, it loads any existing file of the given name, and keeps any existing dictionary keys not present in the provided dictionary. The overwrite option creates a fully empty dictionary first. @@ -761,7 +768,7 @@

Source code for specsanalyzer.config

 
 
[docs] -def complete_dictionary(dictionary: dict, base_dictionary: dict) -> dict: +def complete_dictionary(dictionary: dict, base_dictionary: dict) -> dict: """Iteratively completes a dictionary from a base dictionary, by adding keys that are missing in the dictionary, and are present in the base dictionary. @@ -790,6 +797,94 @@

Source code for specsanalyzer.config

 
     return dictionary
+ + +def _parse_env_file(file_path: Path) -> dict: + """Helper function to parse a .env file into a dictionary. + + Args: + file_path (Path): Path to the .env file + + Returns: + dict: Dictionary of environment variables from the file + """ + env_content = {} + if file_path.exists(): + with open(file_path) as f: + for line in f: + line = line.strip() + if line and "=" in line: + key, val = line.split("=", 1) + env_content[key.strip()] = val.strip() + return env_content + + +
+[docs] +def read_env_var(var_name: str) -> str | None: + """Read an environment variable from multiple locations in order: + 1. OS environment variables + 2. .env file in current directory + 3. .env file in user config directory + 4. .env file in system config directory + + Args: + var_name (str): Name of the environment variable to read + + Returns: + str | None: Value of the environment variable or None if not found + """ + # 1. check OS environment variables + value = os.getenv(var_name) + if value is not None: + logger.debug(f"Found {var_name} in OS environment variables") + return value + + # 2. check .env in current directory + local_vars = _parse_env_file(Path(".env")) + if var_name in local_vars: + logger.debug(f"Found {var_name} in ./.env file") + return local_vars[var_name] + + # 3. check .env in user config directory + user_vars = _parse_env_file(USER_CONFIG_PATH / ".env") + if var_name in user_vars: + logger.debug(f"Found {var_name} in user config .env file") + return user_vars[var_name] + + # 4. check .env in system config directory + system_vars = _parse_env_file(SYSTEM_CONFIG_PATH / ".env") + if var_name in system_vars: + logger.debug(f"Found {var_name} in system config .env file") + return system_vars[var_name] + + logger.debug(f"Environment variable {var_name} not found in any location") + return None
+ + + +
+[docs] +def save_env_var(var_name: str, value: str) -> None: + """Save an environment variable to the .env file in the user config directory. + If the file exists, preserves other variables. If not, creates a new file. + + Args: + var_name (str): Name of the environment variable to save + value (str): Value to save for the environment variable + """ + env_path = USER_CONFIG_PATH / ".env" + env_content = _parse_env_file(env_path) + + # Update or add new variable + env_content[var_name] = value + + # Write all variables back to file + with open(env_path, "w") as f: + for key, val in env_content.items(): + f.write(f"{key}={val}\n") + logger.debug(f"Environment variable {var_name} saved to .env file")
+
diff --git a/specsanalyzer/develop/_modules/specsanalyzer/convert.html b/specsanalyzer/develop/_modules/specsanalyzer/convert.html index 56f2a50..4de742a 100644 --- a/specsanalyzer/develop/_modules/specsanalyzer/convert.html +++ b/specsanalyzer/develop/_modules/specsanalyzer/convert.html @@ -7,7 +7,7 @@ - specsanalyzer.convert — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsanalyzer.convert — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,15 +551,20 @@

Source code for specsanalyzer.convert

 """Specsanalyzer image conversion module"""
-from __future__ import annotations
+from __future__ import annotations
 
-import numpy as np
-from scipy.ndimage import map_coordinates
+import logging
+
+import numpy as np
+from scipy.ndimage import map_coordinates
+
+# Configure logging
+logger = logging.getLogger("specsanalyzer.specsscan")
 
 
 
[docs] -def get_damatrix_from_calib2d( +def get_damatrix_from_calib2d( lens_mode: str, kinetic_energy: float, pass_energy: float, @@ -594,7 +599,7 @@

Source code for specsanalyzer.convert

     except KeyError as exc:
         raise KeyError(
             "The supported modes were not found in the calib2d dictionary",
-        ) from exc
+        ) from exc
 
     if lens_mode in supported_angle_modes:
         # given the lens mode get all the retardation ratios available
@@ -636,7 +641,7 @@ 

Source code for specsanalyzer.convert

 
     elif lens_mode in supported_space_modes:
         # use the mode defaults
-        print("This is a spatial mode, using default " + lens_mode + " config")
+        logger.info("This is a spatial mode, using default " + lens_mode + " config")
         rr_vec, da_matrix_full = get_rr_da(lens_mode, calib2d_dict)
         a_inner = da_matrix_full[0][0]
         da_matrix = da_matrix_full[1:][:]
@@ -653,7 +658,7 @@ 

Source code for specsanalyzer.convert

 
 
[docs] -def bisection(array: np.ndarray, value: float) -> int: +def bisection(array: np.ndarray, value: float) -> int: """ Auxiliary function to find the closest rr index from https://stackoverflow.com/questions/2566412/ find-nearest-value-in-numpy-array @@ -697,7 +702,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def second_closest_rr(rrvec: np.ndarray, closest_rr_index: int) -> int: +def second_closest_rr(rrvec: np.ndarray, closest_rr_index: int) -> int: """Return closest_rr_index+1 unless you are at the edge of the rrvec. Args: @@ -719,7 +724,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def get_rr_da( +def get_rr_da( lens_mode: str, calib2d_dict: dict, ) -> tuple[np.ndarray, np.ndarray]: @@ -748,7 +753,7 @@

Source code for specsanalyzer.convert

     except KeyError as exc:
         raise KeyError(
             "The supported modes were not found in the calib2d dictionary",
-        ) from exc
+        ) from exc
 
     if lens_mode in supported_angle_modes:
         rr_array = np.array(list(calib2d_dict[lens_mode]["rr"]))
@@ -762,7 +767,7 @@ 

Source code for specsanalyzer.convert

         except KeyError as exc:
             raise ValueError(
                 "Da values do not exist for the given mode.",
-            ) from exc
+            ) from exc
 
         da_matrix = np.zeros([dim1, dim2, dim3])
         for count, item in enumerate(rr_array):
@@ -796,7 +801,7 @@ 

Source code for specsanalyzer.convert

 
 
[docs] -def calculate_polynomial_coef_da( +def calculate_polynomial_coef_da( da_matrix: np.ndarray, kinetic_energy: float, pass_energy: float, @@ -838,7 +843,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def zinner( +def zinner( kinetic_energy: np.ndarray, angle: np.ndarray, da_poly_matrix: np.ndarray, @@ -869,7 +874,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def zinner_diff( +def zinner_diff( kinetic_energy: np.ndarray, angle: np.ndarray, da_poly_matrix: np.ndarray, @@ -904,7 +909,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def mcp_position_mm( +def mcp_position_mm( kinetic_energy: np.ndarray, angle: np.ndarray, a_inner: float, @@ -945,7 +950,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def calculate_matrix_correction( +def calculate_matrix_correction( kinetic_energy: float, pass_energy: float, nx_pixels: int, @@ -1039,7 +1044,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def calculate_jacobian( +def calculate_jacobian( angular_correction_matrix: np.ndarray, e_correction: np.ndarray, ek_axis: np.ndarray, @@ -1067,7 +1072,7 @@

Source code for specsanalyzer.convert

 
 
[docs] -def physical_unit_data( +def physical_unit_data( image: np.ndarray, angular_correction_matrix: np.ndarray, e_correction: float, diff --git a/specsanalyzer/develop/_modules/specsanalyzer/core.html b/specsanalyzer/develop/_modules/specsanalyzer/core.html index 692dc0d..5b2a040 100644 --- a/specsanalyzer/develop/_modules/specsanalyzer/core.html +++ b/specsanalyzer/develop/_modules/specsanalyzer/core.html @@ -7,7 +7,7 @@ - specsanalyzer.core — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsanalyzer.core — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,35 +551,39 @@

Source code for specsanalyzer.core

 """This is the specsanalyzer core class"""
-from __future__ import annotations
-
-import os
-from typing import Any
-from typing import Generator
-
-import imutils
-import ipywidgets as ipw
-import matplotlib
-import matplotlib.pyplot as plt
-import numpy as np
-import xarray as xr
-from IPython.display import display
-
-from specsanalyzer import io
-from specsanalyzer.config import complete_dictionary
-from specsanalyzer.config import parse_config
-from specsanalyzer.convert import calculate_matrix_correction
-from specsanalyzer.convert import get_damatrix_from_calib2d
-from specsanalyzer.convert import physical_unit_data
-from specsanalyzer.img_tools import crop_xarray
-from specsanalyzer.img_tools import fourier_filter_2d
+from __future__ import annotations
+
+import os
+from typing import Any
+
+import imutils
+import ipywidgets as ipw
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+import xarray as xr
+from IPython.display import display
+
+from specsanalyzer import io
+from specsanalyzer.config import complete_dictionary
+from specsanalyzer.config import parse_config
+from specsanalyzer.convert import calculate_matrix_correction
+from specsanalyzer.convert import get_damatrix_from_calib2d
+from specsanalyzer.convert import physical_unit_data
+from specsanalyzer.img_tools import crop_xarray
+from specsanalyzer.img_tools import fourier_filter_2d
+from specsanalyzer.logging import set_verbosity
+from specsanalyzer.logging import setup_logging
 
 package_dir = os.path.dirname(__file__)
 
+# Configure logging
+logger = setup_logging("specsanalyzer")
+
 
 
[docs] -class SpecsAnalyzer: +class SpecsAnalyzer: """SpecsAnalyzer: A class to convert photoemission data from a SPECS Phoibos analyzer from camera image coordinates into physical units (energy, angle, position). @@ -589,10 +593,11 @@

Source code for specsanalyzer.core

         **kwds: Keyword arguments passed to ``parse_config``.
     """
 
-    def __init__(
+    def __init__(
         self,
         metadata: dict[Any, Any] = {},
         config: dict[Any, Any] | str = {},
+        verbose: bool = True,
         **kwds,
     ):
         """SpecsAnalyzer constructor.
@@ -600,12 +605,14 @@ 

Source code for specsanalyzer.core

         Args:
             metadata (dict, optional): Metadata dictionary. Defaults to {}.
             config (dict | str, optional): Metadata dictionary or file path. Defaults to {}.
+            verbose (bool, optional): Disable info logs if set to False.
             **kwds: Keyword arguments passed to ``parse_config``.
         """
         self._config = parse_config(
             config,
             **kwds,
         )
+        set_verbosity(logger, verbose)
         self.metadata = metadata
         self._data_array = None
         self.print_msg = True
@@ -619,7 +626,7 @@ 

Source code for specsanalyzer.core

 
         self._correction_matrix_dict: dict[Any, Any] = {}
 
-    def __repr__(self):
+    def __repr__(self):
         if self._config is None:
             pretty_str = "No configuration available"
         else:
@@ -630,23 +637,23 @@ 

Source code for specsanalyzer.core

         return pretty_str if pretty_str is not None else ""
 
     @property
-    def config(self) -> dict:
+    def config(self) -> dict:
         """Get config"""
         return self._config
 
     @property
-    def calib2d(self) -> dict:
+    def calib2d(self) -> dict:
         """Get calib2d dict"""
         return self._calib2d
 
     @property
-    def correction_matrix_dict(self) -> dict:
+    def correction_matrix_dict(self) -> dict:
         """Get correction_matrix_dict"""
         return self._correction_matrix_dict
 
 
[docs] - def convert_image( + def convert_image( self, raw_img: np.ndarray, lens_mode: str, @@ -843,7 +850,7 @@

Source code for specsanalyzer.core

                 ek_min = range_dict["ek_min"]
                 ek_max = range_dict["ek_max"]
                 if self.print_msg:
-                    print("Using saved crop parameters...")
+                    logger.info("Using saved crop parameters...")
                 data_array = crop_xarray(data_array, ang_min, ang_max, ek_min, ek_max)
             except KeyError:
                 try:
@@ -900,11 +907,13 @@ 

Source code for specsanalyzer.core

                         + data_array.coords[data_array.dims[1]][0]
                     )
                     if self.print_msg:
-                        print("Cropping parameters not found, using cropping ranges from config...")
+                        logger.info(
+                            "Cropping parameters not found, using cropping ranges from config...",
+                        )
                     data_array = crop_xarray(data_array, ang_min, ang_max, ek_min, ek_max)
                 except KeyError:
                     if self.print_msg:
-                        print(
+                        logger.warning(
                             "Warning: Cropping parameters not found, "
                             "use method crop_tool() after loading.",
                         )
@@ -914,7 +923,7 @@ 

Source code for specsanalyzer.core

 
 
[docs] - def crop_tool( + def crop_tool( self, raw_img: np.ndarray, lens_mode: str, @@ -941,6 +950,8 @@

Source code for specsanalyzer.core

                 - ek_range_max
                 - ang_range_min
                 - ang_range_max
+                - angle_offset_px
+                - rotation_angle
 
                 Other parameters are passed to ``convert_image()``.
         """
@@ -960,7 +971,7 @@ 

Source code for specsanalyzer.core

         try:
             mesh_obj = data_array.plot(ax=ax)
         except AttributeError:
-            print("Load the scan first!")
+            logger.info("Load the scan first!")
             raise
 
         lineh1 = ax.axhline(y=data_array.Angle[0])
@@ -1032,6 +1043,15 @@ 

Source code for specsanalyzer.core

         vline_range = [ek_min, ek_max]
         hline_range = [ang_min, ang_max]
 
+        angle_offset_px = kwds.get("angle_offset_px", self._config.get("angle_offset_px", 0))
+        rotation_angle = kwds.get("rotation_angle", self._config.get("rotation_angle", 0))
+
+        clim_slider = ipw.FloatRangeSlider(
+            description="colorbar limits",
+            value=[data_array.data.min(), data_array.data.max()],
+            min=data_array.data.min(),
+            max=data_array.data.max(),
+        )
         vline_slider = ipw.FloatRangeSlider(
             description="Ekin",
             value=vline_range,
@@ -1046,14 +1066,33 @@ 

Source code for specsanalyzer.core

             max=data_array.Angle[-1],
             step=0.1,
         )
-        clim_slider = ipw.FloatRangeSlider(
-            description="colorbar limits",
-            value=[data_array.data.min(), data_array.data.max()],
-            min=data_array.data.min(),
-            max=data_array.data.max(),
+        ang_offset_slider = ipw.FloatSlider(
+            description="Angle offset",
+            value=angle_offset_px,
+            min=-20,
+            max=20,
+            step=1,
+        )
+        rotation_slider = ipw.FloatSlider(
+            description="Rotation angle",
+            value=rotation_angle,
+            min=-5,
+            max=5,
+            step=0.1,
         )
 
-        def update(hline, vline, v_vals):
+        def update(hline, vline, v_vals, angle_offset_px, rotation_angle):
+            data_array = self.convert_image(
+                raw_img=raw_img,
+                lens_mode=lens_mode,
+                kinetic_energy=kinetic_energy,
+                pass_energy=pass_energy,
+                work_function=work_function,
+                crop=False,
+                angle_offset_px=angle_offset_px,
+                rotation_angle=rotation_angle,
+            )
+            mesh_obj.update({"array": data_array.data})
             lineh1.set_ydata([hline[0]])
             lineh2.set_ydata([hline[1]])
             linev1.set_xdata([vline[0]])
@@ -1066,9 +1105,11 @@ 

Source code for specsanalyzer.core

             hline=hline_slider,
             vline=vline_slider,
             v_vals=clim_slider,
+            angle_offset_px=ang_offset_slider,
+            rotation_angle=rotation_slider,
         )
 
-        def cropit(val):  # pylint: disable=unused-argument
+        def cropit(val):  # noqa: ARG001
             ang_min = min(hline_slider.value)
             ang_max = max(hline_slider.value)
             ek_min = min(vline_slider.value)
@@ -1111,6 +1152,8 @@ 

Source code for specsanalyzer.core

                 )
             ).item()
             self._config["crop"] = True
+            self._config["angle_offset_px"] = ang_offset_slider.value
+            self._config["rotation_angle"] = rotation_slider.value
 
             ax.cla()
             self._data_array.plot(ax=ax, add_colorbar=False)
@@ -1120,6 +1163,8 @@ 

Source code for specsanalyzer.core

             hline_slider.close()
             clim_slider.close()
             apply_button.close()
+            ang_offset_slider.close()
+            rotation_slider.close()
 
         apply_button = ipw.Button(description="Crop")
         display(apply_button)
@@ -1131,7 +1176,7 @@ 

Source code for specsanalyzer.core

 
 
[docs] - def fft_tool( + def fft_tool( self, raw_image: np.ndarray, apply: bool = False, @@ -1177,7 +1222,7 @@

Source code for specsanalyzer.core

 
             filtered = fourier_filter_2d(raw_image, peaks=fft_filter_peaks, ret="filtered")
         except IndexError:
-            print("Load the scan first!")
+            logger.warning("Load the scan first!")
             raise
 
         fig = plt.figure()
@@ -1252,7 +1297,7 @@ 

Source code for specsanalyzer.core

             max=int(np.log10(np.abs(img).max())) + 1,
         )
 
-        def update(v_vals, pos_x, pos_y, sigma_x, sigma_y, amplitude):
+        def update(v_vals, pos_x, pos_y, sigma_x, sigma_y, amplitude):
             fft_filter_peaks = create_fft_params(amplitude, pos_x, pos_y, sigma_x, sigma_y)
             msk = fourier_filter_2d(raw_image, peaks=fft_filter_peaks, ret="mask")
             filtered_new = fourier_filter_2d(raw_image, peaks=fft_filter_peaks, ret="filtered")
@@ -1289,7 +1334,7 @@ 

Source code for specsanalyzer.core

             v_vals=clim_slider,
         )
 
-        def apply_fft(apply: bool):  # pylint: disable=unused-argument
+        def apply_fft(apply: bool):  # noqa: ARG001
             amplitude = amplitude_slider.value
             pos_x = pos_x_slider.value
             pos_y = pos_y_slider.value
@@ -1329,7 +1374,7 @@ 

Source code for specsanalyzer.core

 
 
[docs] -def create_fft_params( +def create_fft_params( amplitude: float, pos_x: float, pos_y: float, diff --git a/specsanalyzer/develop/_modules/specsanalyzer/img_tools.html b/specsanalyzer/develop/_modules/specsanalyzer/img_tools.html index 78931aa..ca940ae 100644 --- a/specsanalyzer/develop/_modules/specsanalyzer/img_tools.html +++ b/specsanalyzer/develop/_modules/specsanalyzer/img_tools.html @@ -7,7 +7,7 @@ - specsanalyzer.img_tools — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsanalyzer.img_tools — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,17 +551,17 @@

Source code for specsanalyzer.img_tools

 """This module contains image manipulation tools for the specsanalyzer package"""
-from __future__ import annotations
+from __future__ import annotations
 
-from typing import Sequence
+from typing import Sequence
 
-import numpy as np
-import xarray as xr
+import numpy as np
+import xarray as xr
 
 
 
[docs] -def gauss2d( +def gauss2d( x: float | np.ndarray, y: float | np.ndarray, mx: float, @@ -592,7 +592,7 @@

Source code for specsanalyzer.img_tools

 
 
[docs] -def fourier_filter_2d( +def fourier_filter_2d( image: np.ndarray, peaks: Sequence[dict], ret: str = "filtered", @@ -641,7 +641,7 @@

Source code for specsanalyzer.img_tools

             raise KeyError(
                 f"The peaks input is supposed to be a list of dicts with the "
                 "following structure: pos_x, pos_y, sigma_x, sigma_y, amplitude.",
-            ) from exc
+            ) from exc
 
     # apply mask to the FFT, and transform back
     filtered = np.fft.irfft2(np.fft.ifftshift(image_fft * mask, axes=0))
@@ -661,7 +661,7 @@ 

Source code for specsanalyzer.img_tools

 
 
[docs] -def crop_xarray( +def crop_xarray( data_array: xr.DataArray, x_min: float, x_max: float, diff --git a/specsanalyzer/develop/_modules/specsanalyzer/io.html b/specsanalyzer/develop/_modules/specsanalyzer/io.html index 3fdc97b..fe83cd4 100644 --- a/specsanalyzer/develop/_modules/specsanalyzer/io.html +++ b/specsanalyzer/develop/_modules/specsanalyzer/io.html @@ -7,7 +7,7 @@ - specsanalyzer.io — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsanalyzer.io — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,17 +551,17 @@

Source code for specsanalyzer.io

 """This module contains file input/output functions for the specsanalyzer module"""
-from __future__ import annotations
+from __future__ import annotations
 
-from pathlib import Path
-from typing import Any
-from typing import Sequence
+from pathlib import Path
+from typing import Any
+from typing import Sequence
 
-import h5py
-import numpy as np
-import tifffile
-import xarray as xr
-from pynxtools.dataconverter.convert import convert
+import h5py
+import numpy as np
+import tifffile
+import xarray as xr
+from pynxtools.dataconverter.convert import convert
 
 _IMAGEJ_DIMS_ORDER = "TZCYXS"
 _IMAGEJ_DIMS_ALIAS = {
@@ -591,7 +591,7 @@ 

Source code for specsanalyzer.io

 
 
[docs] -def recursive_write_metadata(h5group: h5py.Group, node: dict): +def recursive_write_metadata(h5group: h5py.Group, node: dict): """Recurses through a python dictionary and writes it into an hdf5 file. Args: @@ -631,13 +631,13 @@

Source code for specsanalyzer.io

             except BaseException as exc:
                 raise ValueError(
                     f"Unknown error occurred, cannot save {item} of type {type(item)}.",
-                ) from exc
+ ) from exc
[docs] -def recursive_parse_metadata( +def recursive_parse_metadata( node: h5py.Group | h5py.Dataset, ) -> dict: """Recurses through an hdf5 file, and parse it into a dictionary. @@ -668,7 +668,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def to_h5(data: xr.DataArray, faddr: str, mode: str = "w"): +def to_h5(data: xr.DataArray, faddr: str, mode: str = "w"): """Save xarray formatted data to hdf5 Args: @@ -720,7 +720,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def load_h5(faddr: str, mode: str = "r") -> xr.DataArray: +def load_h5(faddr: str, mode: str = "r") -> xr.DataArray: """Read xarray data from formatted hdf5 file Args: @@ -737,7 +737,7 @@

Source code for specsanalyzer.io

         except KeyError as exc:
             raise ValueError(
                 f"Wrong Data Format, the BinnedData were not found. The error was{exc}.",
-            ) from exc
+            ) from exc
 
         # Reading the axes
         bin_axes = []
@@ -750,7 +750,7 @@ 

Source code for specsanalyzer.io

         except KeyError as exc:
             raise ValueError(
                 f"Wrong Data Format, the axes were not found. The error was {exc}",
-            ) from exc
+            ) from exc
 
         # load metadata
         metadata = None
@@ -780,7 +780,7 @@ 

Source code for specsanalyzer.io

 
 
[docs] -def to_tiff( +def to_tiff( data: xr.DataArray | np.ndarray, faddr: Path | str, alias_dict: dict = None, @@ -845,7 +845,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def _sort_dims_for_imagej(dims: list, alias_dict: dict = None) -> list: +def _sort_dims_for_imagej(dims: list, alias_dict: dict = None) -> list: """Guess the order of the dimensions from the alias dictionary Args: @@ -871,7 +871,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def _fill_missing_dims(dims: list, alias_dict: dict = None) -> list: +def _fill_missing_dims(dims: list, alias_dict: dict = None) -> list: """Guess the order of the dimensions from the alias dictionary Args: @@ -925,7 +925,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def load_tiff( +def load_tiff( faddr: str | Path, coords: dict = None, dims: Sequence = None, @@ -975,7 +975,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def to_nexus( +def to_nexus( data: xr.DataArray, faddr: str, reader: str, @@ -1014,7 +1014,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def get_pair_from_list(list_line: list) -> list: +def get_pair_from_list(list_line: list) -> list: """Returns key value pair for the read function where a line in the file contains '=' character. @@ -1047,7 +1047,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def read_calib2d(filepath: str) -> list: +def read_calib2d(filepath: str) -> list: """Reads the calib2d file into a convenient list for the parser function containing useful and cleaned data. @@ -1090,7 +1090,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def parse_calib2d_to_dict(filepath: str) -> dict: +def parse_calib2d_to_dict(filepath: str) -> dict: """Parses the given calib2d file into a nested dictionary structure to provide parameters for image conversion. @@ -1133,7 +1133,7 @@

Source code for specsanalyzer.io

 
 
[docs] -def get_modes_from_calib_dict(calib_dict: dict) -> tuple[list, list]: +def get_modes_from_calib_dict(calib_dict: dict) -> tuple[list, list]: """create a list of supported modes, divided in spatial and angular modes Args: diff --git a/specsanalyzer/develop/_modules/specsscan/core.html b/specsanalyzer/develop/_modules/specsscan/core.html index 1604edb..1cf8c9f 100644 --- a/specsanalyzer/develop/_modules/specsscan/core.html +++ b/specsanalyzer/develop/_modules/specsscan/core.html @@ -7,7 +7,7 @@ - specsscan.core — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + specsscan.core — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,41 +551,47 @@

Source code for specsscan.core

 """This is the SpecsScan core class"""
-from __future__ import annotations
-
-import copy
-import os
-import pathlib
-from importlib.util import find_spec
-from logging import warn
-from pathlib import Path
-from typing import Any
-from typing import Sequence
-
-import matplotlib
-import numpy as np
-import xarray as xr
-from tqdm.auto import tqdm
-
-from specsanalyzer import SpecsAnalyzer
-from specsanalyzer.config import parse_config
-from specsanalyzer.io import to_h5
-from specsanalyzer.io import to_nexus
-from specsanalyzer.io import to_tiff
-from specsscan.helpers import get_coords
-from specsscan.helpers import get_scan_path
-from specsscan.helpers import handle_meta
-from specsscan.helpers import load_images
-from specsscan.helpers import parse_info_to_dict
-from specsscan.helpers import parse_lut_to_df
+from __future__ import annotations
+
+import copy
+import os
+import pathlib
+from importlib.util import find_spec
+from logging import warn
+from pathlib import Path
+from typing import Any
+from typing import Sequence
+
+import matplotlib
+import numpy as np
+import xarray as xr
+from tqdm.auto import tqdm
+
+from specsanalyzer import SpecsAnalyzer
+from specsanalyzer.config import parse_config
+from specsanalyzer.config import save_config
+from specsanalyzer.io import to_h5
+from specsanalyzer.io import to_nexus
+from specsanalyzer.io import to_tiff
+from specsanalyzer.logging import set_verbosity
+from specsanalyzer.logging import setup_logging
+from specsscan.helpers import get_coords
+from specsscan.helpers import get_scan_path
+from specsscan.helpers import handle_meta
+from specsscan.helpers import load_images
+from specsscan.helpers import parse_info_to_dict
+from specsscan.helpers import parse_lut_to_df
 
 
 package_dir = os.path.dirname(find_spec("specsscan").origin)
 
+# Configure logging
+logger = setup_logging("specsscan")
+
 
 
[docs] -class SpecsScan: +class SpecsScan: """SpecsAnalyzer class for loading scans and data from SPECS Phoibos electron analyzers, generated with the ARPESControl software at Fritz Haber Institute, Berlin, and EPFL, Lausanne. @@ -595,10 +601,11 @@

Source code for specsscan.core

         **kwds: Keyword arguments passed to ``parse_config``.
     """
 
-    def __init__(
+    def __init__(
         self,
         metadata: dict = {},
         config: dict | str = {},
+        verbose: bool = True,
         **kwds,
     ):
         """SpecsScan constructor.
@@ -606,6 +613,7 @@ 

Source code for specsscan.core

         Args:
             metadata (dict, optional): Metadata dictionary. Defaults to {}.
             config (Union[dict, str], optional): Metadata dictionary or file path. Defaults to {}.
+            verbose (bool, optional): Disable info logs if set to False.
             **kwds: Keyword arguments passed to ``parse_config``.
         """
         self._config = parse_config(
@@ -614,6 +622,8 @@ 

Source code for specsscan.core

             **kwds,
         )
 
+        set_verbosity(logger, verbose)
+
         self.metadata = metadata
 
         self._scan_info: dict[Any, Any] = {}
@@ -624,18 +634,20 @@ 

Source code for specsscan.core

                 folder_config={},
                 user_config={},
                 system_config={},
+                verbose=verbose,
             )
         except KeyError:
             self.spa = SpecsAnalyzer(
                 folder_config={},
                 user_config={},
                 system_config={},
+                verbose=verbose,
             )
 
         self._result: xr.DataArray = None
 
     # pylint: disable=duplicate-code
-    def __repr__(self):
+    def __repr__(self):
         if self._config is None:
             pretty_str = "No configuration available"
         else:
@@ -646,12 +658,12 @@ 

Source code for specsscan.core

         return pretty_str if pretty_str is not None else ""
 
     @property
-    def config(self):
+    def config(self):
         """Get config"""
         return self._config
 
     @config.setter
-    def config(self, config: dict | str):
+    def config(self, config: dict | str):
         """Set config"""
         self._config = parse_config(
             config,
@@ -663,13 +675,13 @@ 

Source code for specsscan.core

             self.spa = SpecsAnalyzer()
 
     @property
-    def result(self):
+    def result(self):
         """Get result xarray"""
         return self._result
 
 
[docs] - def load_scan( + def load_scan( self, scan: int, path: str | Path = "", @@ -700,6 +712,7 @@

Source code for specsscan.core

             xr.DataArray: xarray DataArray object with kinetic energy, angle/position and
             optionally a third scanned axis (for ex., delay, temperature) as coordinates.
         """
+        token = kwds.pop("token", None)
         scan_path = get_scan_path(path, scan, self._config["data_path"])
         df_lut = parse_lut_to_df(scan_path)
 
@@ -792,10 +805,11 @@ 

Source code for specsscan.core

             k: coordinate_mapping[k] for k in coordinate_mapping.keys() if k in res_xarray.dims
         }
         depends_dict = {
-            rename_dict[k]: coordinate_depends[k]
+            rename_dict.get(k, k): coordinate_depends[k]
             for k in coordinate_depends.keys()
             if k in res_xarray.dims
         }
+
         res_xarray = res_xarray.rename(rename_dict)
         for k, v in coordinate_mapping.items():
             if k in fast_axes:
@@ -810,12 +824,13 @@ 

Source code for specsscan.core

             "/entry/sample/transformations/sample_polar": "Polar",
             "/entry/sample/transformations/sample_tilt": "Tilt",
             "/entry/sample/transformations/sample_azimuth": "Azimuth",
+            "/entry/instrument/beam_pump/pulse_delay": "delay",
         }
 
-        # store link information for resolved axis coordinates
+        # store data for resolved axis coordinates
         for k, v in depends_dict.items():
             if v in axis_dict:
-                self._scan_info[axis_dict[v]] = "@link:/entry/data/" + k
+                self._scan_info[axis_dict[v]] = res_xarray.coords[k].data
 
         for name in res_xarray.dims:
             try:
@@ -825,14 +840,16 @@ 

Source code for specsscan.core

 
         self.metadata.update(
             **handle_meta(
-                df_lut,
-                self._scan_info,
-                self.config,
+                df_lut=df_lut,
+                scan_info=self._scan_info,
+                config=self.config.get("metadata", {}),
+                scan=scan,
                 fast_axes=list(fast_axes),  # type: ignore
                 slow_axes=list(slow_axes),
                 projection=projection,
                 metadata=copy.deepcopy(metadata),
                 collect_metadata=collect_metadata,
+                token=token,
             ),
             **{"loader": loader_dict},
             **{"conversion_parameters": conversion_metadata},
@@ -846,7 +863,7 @@ 

Source code for specsscan.core

 
 
[docs] - def crop_tool(self, scan: int = None, path: Path | str = "", **kwds): + def crop_tool(self, scan: int = None, path: Path | str = "", **kwds): """Cropping tool interface to crop_tool method of the SpecsAnalyzer class. Args: @@ -871,7 +888,7 @@

Source code for specsscan.core

             try:
                 image = self.metadata["loader"]["raw_data"][0]
             except KeyError as exc:
-                raise ValueError("No image loaded, load image first!") from exc
+                raise ValueError("No image loaded, load image first!") from exc
 
         self.spa.crop_tool(
             image,
@@ -883,9 +900,44 @@ 

Source code for specsscan.core

         )
+
+[docs] + def save_crop_params( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated crop parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "specs_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "specs_config.yaml" + if "ek_range_min" not in self.spa.config: + raise ValueError("No crop parameters to save!") + + config = { + "spa_params": { + "crop": self.spa.config["crop"], + "ek_range_min": self.spa.config["ek_range_min"], + "ek_range_max": self.spa.config["ek_range_max"], + "ang_range_min": self.spa.config["ang_range_min"], + "ang_range_max": self.spa.config["ang_range_max"], + "angle_offset_px": self.spa.config["angle_offset_px"], + "rotation_angle": self.spa.config["rotation_angle"], + }, + } + save_config(config, filename, overwrite) + logger.info(f'Saved crop parameters to "{filename}".')
+ +
[docs] - def fft_tool(self, scan: int = None, path: Path | str = "", **kwds): + def fft_tool(self, scan: int = None, path: Path | str = "", **kwds): """FFT tool to play around with the peak parameters in the Fourier plane. Built to filter out the meshgrid appearing in the raw data images. The optimized parameters are stored in the class config dict under fft_filter_peaks. @@ -915,7 +967,7 @@

Source code for specsscan.core

             try:
                 image = self.metadata["loader"]["raw_data"][0]
             except KeyError as exc:
-                raise ValueError("No image loaded, load image first!") from exc
+                raise ValueError("No image loaded, load image first!") from exc
 
         self.spa.fft_tool(
             image,
@@ -923,9 +975,39 @@ 

Source code for specsscan.core

         )
+
+[docs] + def save_fft_params( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated fft filter parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "specs_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "specs_config.yaml" + if len(self.spa.config["fft_filter_peaks"]) == 0: + raise ValueError("No fft parameters to save!") + + config = { + "spa_params": { + "fft_filter_peaks": self.spa.config["fft_filter_peaks"], + "apply_fft_filter": self.spa.config["apply_fft_filter"], + }, + } + save_config(config, filename, overwrite) + logger.info(f'Saved fft parameters to "{filename}".')
+ +
[docs] - def check_scan( + def check_scan( self, scan: int, delays: Sequence[int] | int, @@ -955,6 +1037,7 @@

Source code for specsscan.core

         Returns:
             xr.DataArray: 3-D xarray of dimensions (Ekin, Angle, Iterations)
         """
+        token = kwds.pop("token", None)
         scan_path = get_scan_path(path, scan, self._config["data_path"])
         df_lut = parse_lut_to_df(scan_path)
 
@@ -1002,7 +1085,7 @@ 

Source code for specsscan.core

 
         conversion_metadata = xr_list[0].attrs["conversion_parameters"]
 
-        dims = get_coords(
+        dims = get_coords(  # noqa: F841
             scan_path=scan_path,
             scan_type=scan_type,
             scan_info=self._scan_info,
@@ -1030,14 +1113,16 @@ 

Source code for specsscan.core

 
         self.metadata.update(
             **handle_meta(
-                df_lut,
-                self._scan_info,
-                self.config,
+                df_lut=df_lut,
+                scan_info=self._scan_info,
+                config=self.config.get("metadata", {}),
+                scan=scan,
                 fast_axes=list(fast_axes),  # type: ignore
                 slow_axes=list(slow_axes),
                 projection=projection,
-                metadata=metadata,
+                metadata=copy.deepcopy(metadata),
                 collect_metadata=collect_metadata,
+                token=token,
             ),
             **{"loader": loader_dict},
             **{"conversion_parameters": conversion_metadata},
@@ -1054,7 +1139,7 @@ 

Source code for specsscan.core

 
 
[docs] - def save( + def save( self, faddr: str, **kwds, @@ -1140,7 +1225,7 @@

Source code for specsscan.core

 
 
[docs] - def process_sweep_scan( + def process_sweep_scan( self, raw_data: list[np.ndarray], kinetic_energy: np.ndarray, @@ -1187,7 +1272,7 @@

Source code for specsscan.core

             )
             or not self.spa.config["crop"]
         ):
-            warn("No valid cropping parameters found, consider using crop_tool() to set.")
+            logger.warning("No valid cropping parameters found, consider using crop_tool() to set.")
 
         e_step = converted.Ekin[1] - converted.Ekin[0]
         e0 = converted.Ekin[-1] - ekin_step
diff --git a/specsanalyzer/develop/_modules/specsscan/helpers.html b/specsanalyzer/develop/_modules/specsscan/helpers.html
index 1540213..cfb98d7 100644
--- a/specsanalyzer/develop/_modules/specsscan/helpers.html
+++ b/specsanalyzer/develop/_modules/specsscan/helpers.html
@@ -7,7 +7,7 @@
   
     
     
-    specsscan.helpers — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation
+    specsscan.helpers — specsanalyzer 0.5.2.dev10+g554f714 documentation
   
   
   
@@ -29,7 +29,7 @@
   
 
 
-    
+    
   
   
   
@@ -37,7 +37,7 @@
   
 
 
-    
+    
     
     
     
@@ -46,7 +46,7 @@
     
@@ -54,7 +54,7 @@
     
   
   
-  
+  
   
   
   
@@ -116,7 +116,7 @@
   
   
   
-    

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -551,27 +551,28 @@

Source code for specsscan.helpers

 """This script contains helper functions used by the specsscan class"""
-from __future__ import annotations
+from __future__ import annotations
 
-import datetime as dt
-import json
-from pathlib import Path
-from typing import Any
-from typing import Sequence
-from urllib.error import HTTPError
-from urllib.error import URLError
-from urllib.request import urlopen
+import datetime as dt
+import logging
+from pathlib import Path
+from typing import Any
+from typing import Sequence
 
-import numpy as np
-import pandas as pd
-from tqdm.auto import tqdm
+import numpy as np
+import pandas as pd
+from tqdm.auto import tqdm
 
-from specsanalyzer.config import complete_dictionary
+from specsanalyzer.config import complete_dictionary
+from specsscan.metadata import MetadataRetriever
+
+# Configure logging
+logger = logging.getLogger("specsanalyzer.specsscan")
 
 
 
[docs] -def get_scan_path(path: Path | str, scan: int, basepath: Path | str) -> Path: +def get_scan_path(path: Path | str, scan: int, basepath: Path | str) -> Path: """Returns the path to the given scan. Args: @@ -607,7 +608,7 @@

Source code for specsscan.helpers

 
 
[docs] -def load_images( +def load_images( scan_path: Path, df_lut: pd.DataFrame = None, iterations: np.ndarray | slice | Sequence[int] | Sequence[slice] = None, @@ -681,9 +682,9 @@

Source code for specsscan.helpers

                 "the chosen data. In case of a single scan, "
                 f"try without passing iterations inside the "
                 "load_scan method.",
-            ) from exc
+            ) from exc
 
-        print(f"Averaging over {avg_dim}...")
+        logger.info(f"Averaging over {avg_dim}...")
         for dim in tqdm(raw_2d_sliced):
             avg_list = []
             for image in tqdm(dim, leave=False, disable=not tqdm_enable_nested):
@@ -718,7 +719,7 @@ 

Source code for specsscan.helpers

 
 
[docs] -def get_raw2d(scan_list: list[str], raw_array: np.ndarray) -> np.ndarray: +def get_raw2d(scan_list: list[str], raw_array: np.ndarray) -> np.ndarray: """Converts a 1-D array of raw scan names into 2-D based on the number of iterations Args: @@ -762,7 +763,7 @@

Source code for specsscan.helpers

 
 
[docs] -def parse_lut_to_df(scan_path: Path) -> pd.DataFrame: +def parse_lut_to_df(scan_path: Path) -> pd.DataFrame: """Loads the contents of LUT.txt file into a pandas data frame to be used as metadata. Args: @@ -776,14 +777,14 @@

Source code for specsscan.helpers

         df_lut.reset_index(inplace=True)
 
         new_cols = df_lut.columns.to_list()[1:]
-        new_cols[new_cols.index("delaystage")] = "Delay"
+        new_cols[new_cols.index("delaystage")] = "DelayStage"
         new_cols.insert(3, "delay (fs)")  # Create label to drop the column later
 
         df_lut = df_lut.set_axis(new_cols, axis="columns")
         df_lut.drop(columns="delay (fs)", inplace=True)
 
     except FileNotFoundError:
-        print(
+        logger.info(
             "LUT.txt not found. Storing metadata from info.txt",
         )
         return None
@@ -794,7 +795,7 @@ 

Source code for specsscan.helpers

 
 
[docs] -def get_coords( +def get_coords( scan_path: Path, scan_type: str, scan_info: dict[Any, Any], @@ -834,7 +835,7 @@

Source code for specsscan.helpers

             return (np.array([]), "")
 
         if df_lut is not None:
-            print("scanvector.txt not found. Obtaining coordinates from LUT")
+            logger.info("scanvector.txt not found. Obtaining coordinates from LUT")
 
             df_new: pd.DataFrame = df_lut.loc[:, df_lut.columns[2:]]
 
@@ -842,20 +843,28 @@ 

Source code for specsscan.helpers

             dim = df_new.columns[index]
 
         else:
-            raise FileNotFoundError("scanvector.txt file not found!") from exc
+            raise FileNotFoundError("scanvector.txt file not found!") from exc
 
     if scan_type == "delay":
-        t_0 = scan_info["TimeZero"]
-        coords -= t_0
-        coords *= 2 / 3e11 * 1e15
+        t0 = scan_info["TimeZero"]
+        coords = mm_to_fs(coords, t0)
 
     return coords, dim
+
+[docs] +def mm_to_fs(delaystage, t0): + delay = delaystage - t0 + delay *= 2 / 2.99792458e11 * 1e15 + return delay
+ + +
[docs] -def compare_coords(axis_data: np.ndarray) -> tuple[np.ndarray, int]: +def compare_coords(axis_data: np.ndarray) -> tuple[np.ndarray, int]: """Identifies the most changing column in a given 2-D numpy array. Args: @@ -881,7 +890,7 @@

Source code for specsscan.helpers

 
 
[docs] -def parse_info_to_dict(path: Path) -> dict: +def parse_info_to_dict(path: Path) -> dict: """Parses the contents of info.txt file into a dictionary Args: @@ -911,7 +920,10 @@

Source code for specsscan.helpers

                     info_dict[key] = value
 
     except FileNotFoundError as exc:
-        raise FileNotFoundError("info.txt file not found.") from exc
+        raise FileNotFoundError("info.txt file not found.") from exc
+
+    if "DelayStage" in info_dict and "TimeZero" in info_dict:
+        info_dict["delay"] = mm_to_fs(info_dict["DelayStage"], info_dict["TimeZero"])
 
     return info_dict
@@ -919,15 +931,17 @@

Source code for specsscan.helpers

 
 
[docs] -def handle_meta( +def handle_meta( df_lut: pd.DataFrame, scan_info: dict, config: dict, + scan: int, fast_axes: list[str], slow_axes: list[str], projection: str, metadata: dict = None, collect_metadata: bool = False, + token: str = None, ) -> dict: """Helper function for the handling metadata from different files @@ -936,22 +950,24 @@

Source code for specsscan.helpers

             from ``parse_lut_to_df()``
         scan_info (dict): scan_info class dict containing containing the contents of info.txt file
         config (dict): config dictionary containing the contents of config.yaml file
+        scan (int): Scan number
         fast_axes (list[str]): The fast-axis dimensions of the scan
         slow_axes (list[str]): The slow-axis dimensions of the scan
         metadata (dict, optional): Metadata dictionary with additional metadata for the scan.
             Defaults to empty dictionary.
         collect_metadata (bool, optional): Option to collect further metadata e.g. from EPICS
             archiver needed for NeXus conversion. Defaults to False.
+        token (str, optional):: The elabFTW api token to use for fetching metadata
 
     Returns:
         dict: metadata dictionary containing additional metadata from the EPICS
-        archive.
+        archive and elabFTW.
     """
 
     if metadata is None:
         metadata = {}
 
-    print("Gathering metadata from different locations")
+    logger.info("Gathering metadata from different locations")
     # get metadata from LUT dataframe
     lut_meta = {}
     energy_scan_mode = "snapshot"
@@ -969,10 +985,10 @@ 

Source code for specsscan.helpers

 
     metadata["scan_info"] = complete_dictionary(
         metadata.get("scan_info", {}),
-        complete_dictionary(lut_meta, scan_info),
+        complete_dictionary(scan_info, lut_meta),
     )  # merging dictionaries
 
-    print("Collecting time stamps...")
+    logger.info("Collecting time stamps...")
     if "time" in metadata["scan_info"]:
         time_list = [metadata["scan_info"]["time"][0], metadata["scan_info"]["time"][-1]]
     elif "StartTime" in metadata["scan_info"]:
@@ -996,53 +1012,18 @@ 

Source code for specsscan.helpers

     }
 
     if collect_metadata:
-        # Get metadata from Epics archive if not present already
-        start = dt.datetime.utcfromtimestamp(ts_from).isoformat()
+        metadata_retriever = MetadataRetriever(config, token)
 
-        # replace metadata names by epics channels
-        try:
-            replace_dict = config["epics_channels"]
-            for key in list(metadata["scan_info"]):
-                if key.lower() in replace_dict:
-                    metadata["scan_info"][replace_dict[key.lower()]] = metadata["scan_info"][key]
-                    metadata["scan_info"].pop(key)
-            epics_channels = replace_dict.values()
-        except KeyError:
-            epics_channels = []
-
-        channels_missing = set(epics_channels) - set(metadata["scan_info"].keys())
-        if channels_missing:
-            print("Collecting data from the EPICS archive...")
-            for channel in channels_missing:
-                try:
-                    _, vals = get_archiver_data(
-                        archiver_url=config.get("archiver_url"),
-                        archiver_channel=channel,
-                        ts_from=ts_from,
-                        ts_to=ts_to,
-                    )
-                    metadata["scan_info"][f"{channel}"] = np.mean(vals)
+        metadata = metadata_retriever.fetch_epics_metadata(
+            ts_from=ts_from,
+            ts_to=ts_to,
+            metadata=metadata,
+        )
 
-                except IndexError:
-                    metadata["scan_info"][f"{channel}"] = np.nan
-                    print(
-                        f"Data for channel {channel} doesn't exist for time {start}",
-                    )
-                except HTTPError as exc:
-                    print(
-                        f"Incorrect URL for the archive channel {channel}. "
-                        "Make sure that the channel name and file start and end times are "
-                        "correct.",
-                    )
-                    print("Error code: ", exc)
-                except URLError as exc:
-                    print(
-                        f"Cannot access the archive URL for channel {channel}. "
-                        f"Make sure that you are within the FHI network."
-                        f"Skipping over channels {channels_missing}.",
-                    )
-                    print("Error code: ", exc)
-                    break
+        metadata = metadata_retriever.fetch_elab_metadata(
+            scan=scan,
+            metadata=metadata,
+        )
 
     metadata["scan_info"]["energy_scan_mode"] = energy_scan_mode
 
@@ -1054,46 +1035,13 @@ 

Source code for specsscan.helpers

     metadata["scan_info"]["slow_axes"] = slow_axes
     metadata["scan_info"]["fast_axes"] = fast_axes
 
-    print("Done!")
-
     return metadata
-
-[docs] -def get_archiver_data( - archiver_url: str, - archiver_channel: str, - ts_from: float, - ts_to: float, -) -> tuple[np.ndarray, np.ndarray]: - """Extract time stamps and corresponding data from and EPICS archiver instance - - Args: - archiver_url (str): URL of the archiver data extraction interface - archiver_channel (str): EPICS channel to extract data for - ts_from (float): starting time stamp of the range of interest - ts_to (float): ending time stamp of the range of interest - - Returns: - tuple[List, List]: The extracted time stamps and corresponding data - """ - iso_from = dt.datetime.utcfromtimestamp(ts_from).isoformat() - iso_to = dt.datetime.utcfromtimestamp(ts_to).isoformat() - req_str = archiver_url + archiver_channel + "&from=" + iso_from + "Z&to=" + iso_to + "Z" - with urlopen(req_str) as req: - data = json.load(req) - secs = [x["secs"] + x["nanos"] * 1e-9 for x in data[0]["data"]] - vals = [x["val"] for x in data[0]["data"]] - - return (np.asarray(secs), np.asarray(vals))
- - -
[docs] -def find_scan(path: Path, scan: int) -> list[Path]: +def find_scan(path: Path, scan: int) -> list[Path]: """Search function to locate the scan folder Args: @@ -1103,7 +1051,7 @@

Source code for specsscan.helpers

     Returns:
         List[Path]: scan_path: Path object pointing to the scan folder
     """
-    print("Scan path not provided, searching directories...")
+    logger.info("Scan path not provided, searching directories...")
     for file in path.iterdir():
         if file.is_dir():
             try:
@@ -1117,7 +1065,7 @@ 

Source code for specsscan.helpers

                     file.glob(f"*/*/Raw Data/{scan}"),
                 )
                 if scan_path:
-                    print("Scan found at path:", scan_path[0])
+                    logger.info(f"Scan found at path: {scan_path[0]}")
                     break
     else:
         scan_path = []
@@ -1127,7 +1075,7 @@ 

Source code for specsscan.helpers

 
 
[docs] -def find_scan_type( +def find_scan_type( path: Path, scan_type: str, ): diff --git a/specsanalyzer/develop/_sources/specsanalyzer/config.rst.txt b/specsanalyzer/develop/_sources/specsanalyzer/config.rst.txt index 4165f69..34ce2f1 100644 --- a/specsanalyzer/develop/_sources/specsanalyzer/config.rst.txt +++ b/specsanalyzer/develop/_sources/specsanalyzer/config.rst.txt @@ -4,8 +4,8 @@ The config module contains a mechanics to collect configuration parameters from It will load an (optional) provided config file, or alternatively use a passed python dictionary as initial config dictionary, and subsequently look for the following additional config files to load: * ``folder_config``: A config file of name :file:`specs_config.yaml` in the current working directory. This is mostly intended to pass calibration parameters of the workflow between different notebook instances. -* ``user_config``: A config file provided by the user, stored as :file:`.specsanalyzer/config.yaml` in the current user's home directly. This is intended to give a user the option for individual configuration modifications of system settings. -* ``system_config``: A config file provided by the system administrator, stored as :file:`/etc/specsanalyzer/config.yaml` on Linux-based systems, and :file:`%ALLUSERSPROFILE%/specsanalyzer/config.yaml` on Windows. This should provide all necessary default parameters for using the specsanalyzer processor with a given setup. For an example for the setup at the Fritz Haber Institute setup, see :ref:`example_config` +* ``user_config``: A config file provided by the user, stored as :file:`.config/specsanalyzer/config_v1.yaml` in the current user's home directly. This is intended to give a user the option for individual configuration modifications of system settings. +* ``system_config``: A config file provided by the system administrator, stored as :file:`/etc/specsanalyzer/config_v1.yaml` on Linux-based systems, and :file:`%ALLUSERSPROFILE%/specsanalyzer/config_v1.yaml` on Windows. This should provide all necessary default parameters for using the specsanalyzer processor with a given setup. For an example for the setup at the Fritz Haber Institute setup, see :ref:`example_config` * ``default_config``: The default configuration shipped with the package. Typically, all parameters here should be overwritten by any of the other configuration files. The config mechanism returns the combined dictionary, and reports the loaded configuration files. In order to disable or overwrite any of the configuration files, they can be also given as optional parameters (path to a file, or python dictionary). diff --git a/specsanalyzer/develop/_static/documentation_options.js b/specsanalyzer/develop/_static/documentation_options.js index 8813e08..cb94bc8 100644 --- a/specsanalyzer/develop/_static/documentation_options.js +++ b/specsanalyzer/develop/_static/documentation_options.js @@ -1,5 +1,5 @@ const DOCUMENTATION_OPTIONS = { - VERSION: '0.4.2.dev40+g8ed2f0c', + VERSION: '0.5.2.dev10+g554f714', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/specsanalyzer/develop/_static/pygments.css b/specsanalyzer/develop/_static/pygments.css index 012e6a0..d7dd577 100644 --- a/specsanalyzer/develop/_static/pygments.css +++ b/specsanalyzer/develop/_static/pygments.css @@ -6,11 +6,11 @@ html[data-theme="light"] .highlight span.linenos.special { color: #000000; backg html[data-theme="light"] .highlight .hll { background-color: #fae4c2 } html[data-theme="light"] .highlight { background: #fefefe; color: #080808 } html[data-theme="light"] .highlight .c { color: #515151 } /* Comment */ -html[data-theme="light"] .highlight .err { color: #a12236 } /* Error */ -html[data-theme="light"] .highlight .k { color: #6730c5 } /* Keyword */ -html[data-theme="light"] .highlight .l { color: #7f4707 } /* Literal */ +html[data-theme="light"] .highlight .err { color: #A12236 } /* Error */ +html[data-theme="light"] .highlight .k { color: #6730C5 } /* Keyword */ +html[data-theme="light"] .highlight .l { color: #7F4707 } /* Literal */ html[data-theme="light"] .highlight .n { color: #080808 } /* Name */ -html[data-theme="light"] .highlight .o { color: #00622f } /* Operator */ +html[data-theme="light"] .highlight .o { color: #00622F } /* Operator */ html[data-theme="light"] .highlight .p { color: #080808 } /* Punctuation */ html[data-theme="light"] .highlight .ch { color: #515151 } /* Comment.Hashbang */ html[data-theme="light"] .highlight .cm { color: #515151 } /* Comment.Multiline */ @@ -18,135 +18,135 @@ html[data-theme="light"] .highlight .cp { color: #515151 } /* Comment.Preproc */ html[data-theme="light"] .highlight .cpf { color: #515151 } /* Comment.PreprocFile */ html[data-theme="light"] .highlight .c1 { color: #515151 } /* Comment.Single */ html[data-theme="light"] .highlight .cs { color: #515151 } /* Comment.Special */ -html[data-theme="light"] .highlight .gd { color: #005b82 } /* Generic.Deleted */ +html[data-theme="light"] .highlight .gd { color: #005B82 } /* Generic.Deleted */ html[data-theme="light"] .highlight .ge { font-style: italic } /* Generic.Emph */ -html[data-theme="light"] .highlight .gh { color: #005b82 } /* Generic.Heading */ +html[data-theme="light"] .highlight .gh { color: #005B82 } /* Generic.Heading */ html[data-theme="light"] .highlight .gs { font-weight: bold } /* Generic.Strong */ -html[data-theme="light"] .highlight .gu { color: #005b82 } /* Generic.Subheading */ -html[data-theme="light"] .highlight .kc { color: #6730c5 } /* Keyword.Constant */ -html[data-theme="light"] .highlight .kd { color: #6730c5 } /* Keyword.Declaration */ -html[data-theme="light"] .highlight .kn { color: #6730c5 } /* Keyword.Namespace */ -html[data-theme="light"] .highlight .kp { color: #6730c5 } /* Keyword.Pseudo */ -html[data-theme="light"] .highlight .kr { color: #6730c5 } /* Keyword.Reserved */ -html[data-theme="light"] .highlight .kt { color: #7f4707 } /* Keyword.Type */ -html[data-theme="light"] .highlight .ld { color: #7f4707 } /* Literal.Date */ -html[data-theme="light"] .highlight .m { color: #7f4707 } /* Literal.Number */ -html[data-theme="light"] .highlight .s { color: #00622f } /* Literal.String */ +html[data-theme="light"] .highlight .gu { color: #005B82 } /* Generic.Subheading */ +html[data-theme="light"] .highlight .kc { color: #6730C5 } /* Keyword.Constant */ +html[data-theme="light"] .highlight .kd { color: #6730C5 } /* Keyword.Declaration */ +html[data-theme="light"] .highlight .kn { color: #6730C5 } /* Keyword.Namespace */ +html[data-theme="light"] .highlight .kp { color: #6730C5 } /* Keyword.Pseudo */ +html[data-theme="light"] .highlight .kr { color: #6730C5 } /* Keyword.Reserved */ +html[data-theme="light"] .highlight .kt { color: #7F4707 } /* Keyword.Type */ +html[data-theme="light"] .highlight .ld { color: #7F4707 } /* Literal.Date */ +html[data-theme="light"] .highlight .m { color: #7F4707 } /* Literal.Number */ +html[data-theme="light"] .highlight .s { color: #00622F } /* Literal.String */ html[data-theme="light"] .highlight .na { color: #912583 } /* Name.Attribute */ -html[data-theme="light"] .highlight .nb { color: #7f4707 } /* Name.Builtin */ -html[data-theme="light"] .highlight .nc { color: #005b82 } /* Name.Class */ -html[data-theme="light"] .highlight .no { color: #005b82 } /* Name.Constant */ -html[data-theme="light"] .highlight .nd { color: #7f4707 } /* Name.Decorator */ -html[data-theme="light"] .highlight .ni { color: #00622f } /* Name.Entity */ -html[data-theme="light"] .highlight .ne { color: #6730c5 } /* Name.Exception */ -html[data-theme="light"] .highlight .nf { color: #005b82 } /* Name.Function */ -html[data-theme="light"] .highlight .nl { color: #7f4707 } /* Name.Label */ +html[data-theme="light"] .highlight .nb { color: #7F4707 } /* Name.Builtin */ +html[data-theme="light"] .highlight .nc { color: #005B82 } /* Name.Class */ +html[data-theme="light"] .highlight .no { color: #005B82 } /* Name.Constant */ +html[data-theme="light"] .highlight .nd { color: #7F4707 } /* Name.Decorator */ +html[data-theme="light"] .highlight .ni { color: #00622F } /* Name.Entity */ +html[data-theme="light"] .highlight .ne { color: #6730C5 } /* Name.Exception */ +html[data-theme="light"] .highlight .nf { color: #005B82 } /* Name.Function */ +html[data-theme="light"] .highlight .nl { color: #7F4707 } /* Name.Label */ html[data-theme="light"] .highlight .nn { color: #080808 } /* Name.Namespace */ html[data-theme="light"] .highlight .nx { color: #080808 } /* Name.Other */ -html[data-theme="light"] .highlight .py { color: #005b82 } /* Name.Property */ -html[data-theme="light"] .highlight .nt { color: #005b82 } /* Name.Tag */ -html[data-theme="light"] .highlight .nv { color: #a12236 } /* Name.Variable */ -html[data-theme="light"] .highlight .ow { color: #6730c5 } /* Operator.Word */ +html[data-theme="light"] .highlight .py { color: #005B82 } /* Name.Property */ +html[data-theme="light"] .highlight .nt { color: #005B82 } /* Name.Tag */ +html[data-theme="light"] .highlight .nv { color: #A12236 } /* Name.Variable */ +html[data-theme="light"] .highlight .ow { color: #6730C5 } /* Operator.Word */ html[data-theme="light"] .highlight .pm { color: #080808 } /* Punctuation.Marker */ html[data-theme="light"] .highlight .w { color: #080808 } /* Text.Whitespace */ -html[data-theme="light"] .highlight .mb { color: #7f4707 } /* Literal.Number.Bin */ -html[data-theme="light"] .highlight .mf { color: #7f4707 } /* Literal.Number.Float */ -html[data-theme="light"] .highlight .mh { color: #7f4707 } /* Literal.Number.Hex */ -html[data-theme="light"] .highlight .mi { color: #7f4707 } /* Literal.Number.Integer */ -html[data-theme="light"] .highlight .mo { color: #7f4707 } /* Literal.Number.Oct */ -html[data-theme="light"] .highlight .sa { color: #00622f } /* Literal.String.Affix */ -html[data-theme="light"] .highlight .sb { color: #00622f } /* Literal.String.Backtick */ -html[data-theme="light"] .highlight .sc { color: #00622f } /* Literal.String.Char */ -html[data-theme="light"] .highlight .dl { color: #00622f } /* Literal.String.Delimiter */ -html[data-theme="light"] .highlight .sd { color: #00622f } /* Literal.String.Doc */ -html[data-theme="light"] .highlight .s2 { color: #00622f } /* Literal.String.Double */ -html[data-theme="light"] .highlight .se { color: #00622f } /* Literal.String.Escape */ -html[data-theme="light"] .highlight .sh { color: #00622f } /* Literal.String.Heredoc */ -html[data-theme="light"] .highlight .si { color: #00622f } /* Literal.String.Interpol */ -html[data-theme="light"] .highlight .sx { color: #00622f } /* Literal.String.Other */ -html[data-theme="light"] .highlight .sr { color: #a12236 } /* Literal.String.Regex */ -html[data-theme="light"] .highlight .s1 { color: #00622f } /* Literal.String.Single */ -html[data-theme="light"] .highlight .ss { color: #005b82 } /* Literal.String.Symbol */ -html[data-theme="light"] .highlight .bp { color: #7f4707 } /* Name.Builtin.Pseudo */ -html[data-theme="light"] .highlight .fm { color: #005b82 } /* Name.Function.Magic */ -html[data-theme="light"] .highlight .vc { color: #a12236 } /* Name.Variable.Class */ -html[data-theme="light"] .highlight .vg { color: #a12236 } /* Name.Variable.Global */ -html[data-theme="light"] .highlight .vi { color: #a12236 } /* Name.Variable.Instance */ -html[data-theme="light"] .highlight .vm { color: #7f4707 } /* Name.Variable.Magic */ -html[data-theme="light"] .highlight .il { color: #7f4707 } /* Literal.Number.Integer.Long */ +html[data-theme="light"] .highlight .mb { color: #7F4707 } /* Literal.Number.Bin */ +html[data-theme="light"] .highlight .mf { color: #7F4707 } /* Literal.Number.Float */ +html[data-theme="light"] .highlight .mh { color: #7F4707 } /* Literal.Number.Hex */ +html[data-theme="light"] .highlight .mi { color: #7F4707 } /* Literal.Number.Integer */ +html[data-theme="light"] .highlight .mo { color: #7F4707 } /* Literal.Number.Oct */ +html[data-theme="light"] .highlight .sa { color: #00622F } /* Literal.String.Affix */ +html[data-theme="light"] .highlight .sb { color: #00622F } /* Literal.String.Backtick */ +html[data-theme="light"] .highlight .sc { color: #00622F } /* Literal.String.Char */ +html[data-theme="light"] .highlight .dl { color: #00622F } /* Literal.String.Delimiter */ +html[data-theme="light"] .highlight .sd { color: #00622F } /* Literal.String.Doc */ +html[data-theme="light"] .highlight .s2 { color: #00622F } /* Literal.String.Double */ +html[data-theme="light"] .highlight .se { color: #00622F } /* Literal.String.Escape */ +html[data-theme="light"] .highlight .sh { color: #00622F } /* Literal.String.Heredoc */ +html[data-theme="light"] .highlight .si { color: #00622F } /* Literal.String.Interpol */ +html[data-theme="light"] .highlight .sx { color: #00622F } /* Literal.String.Other */ +html[data-theme="light"] .highlight .sr { color: #A12236 } /* Literal.String.Regex */ +html[data-theme="light"] .highlight .s1 { color: #00622F } /* Literal.String.Single */ +html[data-theme="light"] .highlight .ss { color: #005B82 } /* Literal.String.Symbol */ +html[data-theme="light"] .highlight .bp { color: #7F4707 } /* Name.Builtin.Pseudo */ +html[data-theme="light"] .highlight .fm { color: #005B82 } /* Name.Function.Magic */ +html[data-theme="light"] .highlight .vc { color: #A12236 } /* Name.Variable.Class */ +html[data-theme="light"] .highlight .vg { color: #A12236 } /* Name.Variable.Global */ +html[data-theme="light"] .highlight .vi { color: #A12236 } /* Name.Variable.Instance */ +html[data-theme="light"] .highlight .vm { color: #7F4707 } /* Name.Variable.Magic */ +html[data-theme="light"] .highlight .il { color: #7F4707 } /* Literal.Number.Integer.Long */ html[data-theme="dark"] .highlight pre { line-height: 125%; } html[data-theme="dark"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } html[data-theme="dark"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } html[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } html[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } html[data-theme="dark"] .highlight .hll { background-color: #ffd9002e } -html[data-theme="dark"] .highlight { background: #2b2b2b; color: #f8f8f2 } -html[data-theme="dark"] .highlight .c { color: #ffd900 } /* Comment */ -html[data-theme="dark"] .highlight .err { color: #ffa07a } /* Error */ -html[data-theme="dark"] .highlight .k { color: #dcc6e0 } /* Keyword */ -html[data-theme="dark"] .highlight .l { color: #ffd900 } /* Literal */ -html[data-theme="dark"] .highlight .n { color: #f8f8f2 } /* Name */ -html[data-theme="dark"] .highlight .o { color: #abe338 } /* Operator */ -html[data-theme="dark"] .highlight .p { color: #f8f8f2 } /* Punctuation */ -html[data-theme="dark"] .highlight .ch { color: #ffd900 } /* Comment.Hashbang */ -html[data-theme="dark"] .highlight .cm { color: #ffd900 } /* Comment.Multiline */ -html[data-theme="dark"] .highlight .cp { color: #ffd900 } /* Comment.Preproc */ -html[data-theme="dark"] .highlight .cpf { color: #ffd900 } /* Comment.PreprocFile */ -html[data-theme="dark"] .highlight .c1 { color: #ffd900 } /* Comment.Single */ -html[data-theme="dark"] .highlight .cs { color: #ffd900 } /* Comment.Special */ -html[data-theme="dark"] .highlight .gd { color: #00e0e0 } /* Generic.Deleted */ +html[data-theme="dark"] .highlight { background: #2b2b2b; color: #F8F8F2 } +html[data-theme="dark"] .highlight .c { color: #FFD900 } /* Comment */ +html[data-theme="dark"] .highlight .err { color: #FFA07A } /* Error */ +html[data-theme="dark"] .highlight .k { color: #DCC6E0 } /* Keyword */ +html[data-theme="dark"] .highlight .l { color: #FFD900 } /* Literal */ +html[data-theme="dark"] .highlight .n { color: #F8F8F2 } /* Name */ +html[data-theme="dark"] .highlight .o { color: #ABE338 } /* Operator */ +html[data-theme="dark"] .highlight .p { color: #F8F8F2 } /* Punctuation */ +html[data-theme="dark"] .highlight .ch { color: #FFD900 } /* Comment.Hashbang */ +html[data-theme="dark"] .highlight .cm { color: #FFD900 } /* Comment.Multiline */ +html[data-theme="dark"] .highlight .cp { color: #FFD900 } /* Comment.Preproc */ +html[data-theme="dark"] .highlight .cpf { color: #FFD900 } /* Comment.PreprocFile */ +html[data-theme="dark"] .highlight .c1 { color: #FFD900 } /* Comment.Single */ +html[data-theme="dark"] .highlight .cs { color: #FFD900 } /* Comment.Special */ +html[data-theme="dark"] .highlight .gd { color: #00E0E0 } /* Generic.Deleted */ html[data-theme="dark"] .highlight .ge { font-style: italic } /* Generic.Emph */ -html[data-theme="dark"] .highlight .gh { color: #00e0e0 } /* Generic.Heading */ +html[data-theme="dark"] .highlight .gh { color: #00E0E0 } /* Generic.Heading */ html[data-theme="dark"] .highlight .gs { font-weight: bold } /* Generic.Strong */ -html[data-theme="dark"] .highlight .gu { color: #00e0e0 } /* Generic.Subheading */ -html[data-theme="dark"] .highlight .kc { color: #dcc6e0 } /* Keyword.Constant */ -html[data-theme="dark"] .highlight .kd { color: #dcc6e0 } /* Keyword.Declaration */ -html[data-theme="dark"] .highlight .kn { color: #dcc6e0 } /* Keyword.Namespace */ -html[data-theme="dark"] .highlight .kp { color: #dcc6e0 } /* Keyword.Pseudo */ -html[data-theme="dark"] .highlight .kr { color: #dcc6e0 } /* Keyword.Reserved */ -html[data-theme="dark"] .highlight .kt { color: #ffd900 } /* Keyword.Type */ -html[data-theme="dark"] .highlight .ld { color: #ffd900 } /* Literal.Date */ -html[data-theme="dark"] .highlight .m { color: #ffd900 } /* Literal.Number */ -html[data-theme="dark"] .highlight .s { color: #abe338 } /* Literal.String */ -html[data-theme="dark"] .highlight .na { color: #ffd900 } /* Name.Attribute */ -html[data-theme="dark"] .highlight .nb { color: #ffd900 } /* Name.Builtin */ -html[data-theme="dark"] .highlight .nc { color: #00e0e0 } /* Name.Class */ -html[data-theme="dark"] .highlight .no { color: #00e0e0 } /* Name.Constant */ -html[data-theme="dark"] .highlight .nd { color: #ffd900 } /* Name.Decorator */ -html[data-theme="dark"] .highlight .ni { color: #abe338 } /* Name.Entity */ -html[data-theme="dark"] .highlight .ne { color: #dcc6e0 } /* Name.Exception */ -html[data-theme="dark"] .highlight .nf { color: #00e0e0 } /* Name.Function */ -html[data-theme="dark"] .highlight .nl { color: #ffd900 } /* Name.Label */ -html[data-theme="dark"] .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ -html[data-theme="dark"] .highlight .nx { color: #f8f8f2 } /* Name.Other */ -html[data-theme="dark"] .highlight .py { color: #00e0e0 } /* Name.Property */ -html[data-theme="dark"] .highlight .nt { color: #00e0e0 } /* Name.Tag */ -html[data-theme="dark"] .highlight .nv { color: #ffa07a } /* Name.Variable */ -html[data-theme="dark"] .highlight .ow { color: #dcc6e0 } /* Operator.Word */ -html[data-theme="dark"] .highlight .pm { color: #f8f8f2 } /* Punctuation.Marker */ -html[data-theme="dark"] .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ -html[data-theme="dark"] .highlight .mb { color: #ffd900 } /* Literal.Number.Bin */ -html[data-theme="dark"] .highlight .mf { color: #ffd900 } /* Literal.Number.Float */ -html[data-theme="dark"] .highlight .mh { color: #ffd900 } /* Literal.Number.Hex */ -html[data-theme="dark"] .highlight .mi { color: #ffd900 } /* Literal.Number.Integer */ -html[data-theme="dark"] .highlight .mo { color: #ffd900 } /* Literal.Number.Oct */ -html[data-theme="dark"] .highlight .sa { color: #abe338 } /* Literal.String.Affix */ -html[data-theme="dark"] .highlight .sb { color: #abe338 } /* Literal.String.Backtick */ -html[data-theme="dark"] .highlight .sc { color: #abe338 } /* Literal.String.Char */ -html[data-theme="dark"] .highlight .dl { color: #abe338 } /* Literal.String.Delimiter */ -html[data-theme="dark"] .highlight .sd { color: #abe338 } /* Literal.String.Doc */ -html[data-theme="dark"] .highlight .s2 { color: #abe338 } /* Literal.String.Double */ -html[data-theme="dark"] .highlight .se { color: #abe338 } /* Literal.String.Escape */ -html[data-theme="dark"] .highlight .sh { color: #abe338 } /* Literal.String.Heredoc */ -html[data-theme="dark"] .highlight .si { color: #abe338 } /* Literal.String.Interpol */ -html[data-theme="dark"] .highlight .sx { color: #abe338 } /* Literal.String.Other */ -html[data-theme="dark"] .highlight .sr { color: #ffa07a } /* Literal.String.Regex */ -html[data-theme="dark"] .highlight .s1 { color: #abe338 } /* Literal.String.Single */ -html[data-theme="dark"] .highlight .ss { color: #00e0e0 } /* Literal.String.Symbol */ -html[data-theme="dark"] .highlight .bp { color: #ffd900 } /* Name.Builtin.Pseudo */ -html[data-theme="dark"] .highlight .fm { color: #00e0e0 } /* Name.Function.Magic */ -html[data-theme="dark"] .highlight .vc { color: #ffa07a } /* Name.Variable.Class */ -html[data-theme="dark"] .highlight .vg { color: #ffa07a } /* Name.Variable.Global */ -html[data-theme="dark"] .highlight .vi { color: #ffa07a } /* Name.Variable.Instance */ -html[data-theme="dark"] .highlight .vm { color: #ffd900 } /* Name.Variable.Magic */ -html[data-theme="dark"] .highlight .il { color: #ffd900 } /* Literal.Number.Integer.Long */ \ No newline at end of file +html[data-theme="dark"] .highlight .gu { color: #00E0E0 } /* Generic.Subheading */ +html[data-theme="dark"] .highlight .kc { color: #DCC6E0 } /* Keyword.Constant */ +html[data-theme="dark"] .highlight .kd { color: #DCC6E0 } /* Keyword.Declaration */ +html[data-theme="dark"] .highlight .kn { color: #DCC6E0 } /* Keyword.Namespace */ +html[data-theme="dark"] .highlight .kp { color: #DCC6E0 } /* Keyword.Pseudo */ +html[data-theme="dark"] .highlight .kr { color: #DCC6E0 } /* Keyword.Reserved */ +html[data-theme="dark"] .highlight .kt { color: #FFD900 } /* Keyword.Type */ +html[data-theme="dark"] .highlight .ld { color: #FFD900 } /* Literal.Date */ +html[data-theme="dark"] .highlight .m { color: #FFD900 } /* Literal.Number */ +html[data-theme="dark"] .highlight .s { color: #ABE338 } /* Literal.String */ +html[data-theme="dark"] .highlight .na { color: #FFD900 } /* Name.Attribute */ +html[data-theme="dark"] .highlight .nb { color: #FFD900 } /* Name.Builtin */ +html[data-theme="dark"] .highlight .nc { color: #00E0E0 } /* Name.Class */ +html[data-theme="dark"] .highlight .no { color: #00E0E0 } /* Name.Constant */ +html[data-theme="dark"] .highlight .nd { color: #FFD900 } /* Name.Decorator */ +html[data-theme="dark"] .highlight .ni { color: #ABE338 } /* Name.Entity */ +html[data-theme="dark"] .highlight .ne { color: #DCC6E0 } /* Name.Exception */ +html[data-theme="dark"] .highlight .nf { color: #00E0E0 } /* Name.Function */ +html[data-theme="dark"] .highlight .nl { color: #FFD900 } /* Name.Label */ +html[data-theme="dark"] .highlight .nn { color: #F8F8F2 } /* Name.Namespace */ +html[data-theme="dark"] .highlight .nx { color: #F8F8F2 } /* Name.Other */ +html[data-theme="dark"] .highlight .py { color: #00E0E0 } /* Name.Property */ +html[data-theme="dark"] .highlight .nt { color: #00E0E0 } /* Name.Tag */ +html[data-theme="dark"] .highlight .nv { color: #FFA07A } /* Name.Variable */ +html[data-theme="dark"] .highlight .ow { color: #DCC6E0 } /* Operator.Word */ +html[data-theme="dark"] .highlight .pm { color: #F8F8F2 } /* Punctuation.Marker */ +html[data-theme="dark"] .highlight .w { color: #F8F8F2 } /* Text.Whitespace */ +html[data-theme="dark"] .highlight .mb { color: #FFD900 } /* Literal.Number.Bin */ +html[data-theme="dark"] .highlight .mf { color: #FFD900 } /* Literal.Number.Float */ +html[data-theme="dark"] .highlight .mh { color: #FFD900 } /* Literal.Number.Hex */ +html[data-theme="dark"] .highlight .mi { color: #FFD900 } /* Literal.Number.Integer */ +html[data-theme="dark"] .highlight .mo { color: #FFD900 } /* Literal.Number.Oct */ +html[data-theme="dark"] .highlight .sa { color: #ABE338 } /* Literal.String.Affix */ +html[data-theme="dark"] .highlight .sb { color: #ABE338 } /* Literal.String.Backtick */ +html[data-theme="dark"] .highlight .sc { color: #ABE338 } /* Literal.String.Char */ +html[data-theme="dark"] .highlight .dl { color: #ABE338 } /* Literal.String.Delimiter */ +html[data-theme="dark"] .highlight .sd { color: #ABE338 } /* Literal.String.Doc */ +html[data-theme="dark"] .highlight .s2 { color: #ABE338 } /* Literal.String.Double */ +html[data-theme="dark"] .highlight .se { color: #ABE338 } /* Literal.String.Escape */ +html[data-theme="dark"] .highlight .sh { color: #ABE338 } /* Literal.String.Heredoc */ +html[data-theme="dark"] .highlight .si { color: #ABE338 } /* Literal.String.Interpol */ +html[data-theme="dark"] .highlight .sx { color: #ABE338 } /* Literal.String.Other */ +html[data-theme="dark"] .highlight .sr { color: #FFA07A } /* Literal.String.Regex */ +html[data-theme="dark"] .highlight .s1 { color: #ABE338 } /* Literal.String.Single */ +html[data-theme="dark"] .highlight .ss { color: #00E0E0 } /* Literal.String.Symbol */ +html[data-theme="dark"] .highlight .bp { color: #FFD900 } /* Name.Builtin.Pseudo */ +html[data-theme="dark"] .highlight .fm { color: #00E0E0 } /* Name.Function.Magic */ +html[data-theme="dark"] .highlight .vc { color: #FFA07A } /* Name.Variable.Class */ +html[data-theme="dark"] .highlight .vg { color: #FFA07A } /* Name.Variable.Global */ +html[data-theme="dark"] .highlight .vi { color: #FFA07A } /* Name.Variable.Instance */ +html[data-theme="dark"] .highlight .vm { color: #FFD900 } /* Name.Variable.Magic */ +html[data-theme="dark"] .highlight .il { color: #FFD900 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/specsanalyzer/develop/genindex.html b/specsanalyzer/develop/genindex.html index 9688754..d47651f 100644 --- a/specsanalyzer/develop/genindex.html +++ b/specsanalyzer/develop/genindex.html @@ -7,7 +7,7 @@ - Index — specsanalyzer 0.4.2.dev40+g8ed2f0c documentation + Index — specsanalyzer 0.5.2.dev10+g554f714 documentation @@ -29,7 +29,7 @@ - + @@ -37,7 +37,7 @@ - + @@ -46,7 +46,7 @@ @@ -54,7 +54,7 @@ - + @@ -116,7 +116,7 @@ -

specsanalyzer 0.4.2.dev40+g8ed2f0c documentation

+

specsanalyzer 0.5.2.dev10+g554f714 documentation

@@ -627,17 +627,15 @@

G

- +