Skip to content

Commit

Permalink
add logging and delay as metadata
Browse files Browse the repository at this point in the history
  • Loading branch information
rettigl committed Jan 19, 2025
1 parent 987c315 commit e88f080
Show file tree
Hide file tree
Showing 5 changed files with 57 additions and 22 deletions.
7 changes: 6 additions & 1 deletion src/specsanalyzer/convert.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
"""Specsanalyzer image conversion module"""
from __future__ import annotations

import logging

import numpy as np
from scipy.ndimage import map_coordinates

# Configure logging
logger = logging.getLogger("specsanalyzer.specsscan")


def get_damatrix_from_calib2d(
lens_mode: str,
Expand Down Expand Up @@ -82,7 +87,7 @@ def get_damatrix_from_calib2d(

elif lens_mode in supported_space_modes:
# use the mode defaults
print("This is a spatial mode, using default " + lens_mode + " config")
logger.info("This is a spatial mode, using default " + lens_mode + " config")
rr_vec, da_matrix_full = get_rr_da(lens_mode, calib2d_dict)
a_inner = da_matrix_full[0][0]
da_matrix = da_matrix_full[1:][:]
Expand Down
20 changes: 15 additions & 5 deletions src/specsanalyzer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,14 @@
from specsanalyzer.convert import physical_unit_data
from specsanalyzer.img_tools import crop_xarray
from specsanalyzer.img_tools import fourier_filter_2d
from specsanalyzer.logging import set_verbosity
from specsanalyzer.logging import setup_logging

package_dir = os.path.dirname(__file__)

# Configure logging
logger = setup_logging("specsanalyzer")


class SpecsAnalyzer:
"""SpecsAnalyzer: A class to convert photoemission data from a SPECS Phoibos analyzer from
Expand All @@ -38,19 +43,22 @@ def __init__(
self,
metadata: dict[Any, Any] = {},
config: dict[Any, Any] | str = {},
verbose: bool = True,
**kwds,
):
"""SpecsAnalyzer constructor.
Args:
metadata (dict, optional): Metadata dictionary. Defaults to {}.
config (dict | str, optional): Metadata dictionary or file path. Defaults to {}.
verbose (bool, optional): Disable info logs if set to False.
**kwds: Keyword arguments passed to ``parse_config``.
"""
self._config = parse_config(
config,
**kwds,
)
set_verbosity(logger, verbose)
self.metadata = metadata
self._data_array = None
self.print_msg = True
Expand Down Expand Up @@ -286,7 +294,7 @@ def convert_image(
ek_min = range_dict["ek_min"]
ek_max = range_dict["ek_max"]
if self.print_msg:
print("Using saved crop parameters...")
logger.info("Using saved crop parameters...")
data_array = crop_xarray(data_array, ang_min, ang_max, ek_min, ek_max)
except KeyError:
try:
Expand Down Expand Up @@ -343,11 +351,13 @@ def convert_image(
+ data_array.coords[data_array.dims[1]][0]
)
if self.print_msg:
print("Cropping parameters not found, using cropping ranges from config...")
logger.info(
"Cropping parameters not found, using cropping ranges from config...",
)
data_array = crop_xarray(data_array, ang_min, ang_max, ek_min, ek_max)
except KeyError:
if self.print_msg:
print(
logger.warning(
"Warning: Cropping parameters not found, "
"use method crop_tool() after loading.",
)
Expand Down Expand Up @@ -402,7 +412,7 @@ def crop_tool(
try:
mesh_obj = data_array.plot(ax=ax)
except AttributeError:
print("Load the scan first!")
logger.info("Load the scan first!")
raise

lineh1 = ax.axhline(y=data_array.Angle[0])
Expand Down Expand Up @@ -650,7 +660,7 @@ def fft_tool(

filtered = fourier_filter_2d(raw_image, peaks=fft_filter_peaks, ret="filtered")
except IndexError:
print("Load the scan first!")
logger.warning("Load the scan first!")
raise

fig = plt.figure()
Expand Down
11 changes: 10 additions & 1 deletion src/specsscan/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from specsanalyzer.io import to_h5
from specsanalyzer.io import to_nexus
from specsanalyzer.io import to_tiff
from specsanalyzer.logging import set_verbosity
from specsanalyzer.logging import setup_logging
from specsscan.helpers import get_coords
from specsscan.helpers import get_scan_path
Expand Down Expand Up @@ -50,13 +51,15 @@ def __init__(
self,
metadata: dict = {},
config: dict | str = {},
verbose: bool = True,
**kwds,
):
"""SpecsScan constructor.
Args:
metadata (dict, optional): Metadata dictionary. Defaults to {}.
config (Union[dict, str], optional): Metadata dictionary or file path. Defaults to {}.
verbose (bool, optional): Disable info logs if set to False.
**kwds: Keyword arguments passed to ``parse_config``.
"""
self._config = parse_config(
Expand All @@ -65,6 +68,8 @@ def __init__(
**kwds,
)

set_verbosity(logger, verbose)

self.metadata = metadata

self._scan_info: dict[Any, Any] = {}
Expand All @@ -75,12 +80,14 @@ def __init__(
folder_config={},
user_config={},
system_config={},
verbose=verbose,
)
except KeyError:
self.spa = SpecsAnalyzer(
folder_config={},
user_config={},
system_config={},
verbose=verbose,
)

self._result: xr.DataArray = None
Expand Down Expand Up @@ -242,10 +249,11 @@ def load_scan(
k: coordinate_mapping[k] for k in coordinate_mapping.keys() if k in res_xarray.dims
}
depends_dict = {
rename_dict[k]: coordinate_depends[k]
rename_dict.get(k, k): coordinate_depends[k]
for k in coordinate_depends.keys()
if k in res_xarray.dims
}

res_xarray = res_xarray.rename(rename_dict)
for k, v in coordinate_mapping.items():
if k in fast_axes:
Expand All @@ -260,6 +268,7 @@ def load_scan(
"/entry/sample/transformations/sample_polar": "Polar",
"/entry/sample/transformations/sample_tilt": "Tilt",
"/entry/sample/transformations/sample_azimuth": "Azimuth",
"/entry/instrument/beam_pump/pulse_delay": "delay",
}

# store data for resolved axis coordinates
Expand Down
39 changes: 25 additions & 14 deletions src/specsscan/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,22 @@
from __future__ import annotations

import datetime as dt
import logging
from pathlib import Path
from typing import Any
from typing import Sequence

import numpy as np
import pandas as pd
import xarray as xr
from tqdm.auto import tqdm

from specsanalyzer.config import complete_dictionary
from specsscan.metadata import MetadataRetriever

# Configure logging
logger = logging.getLogger("specsanalyzer.specsscan")


def get_scan_path(path: Path | str, scan: int, basepath: Path | str) -> Path:
"""Returns the path to the given scan.
Expand Down Expand Up @@ -123,7 +128,7 @@ def load_images(
"load_scan method.",
) from exc

print(f"Averaging over {avg_dim}...")
logger.info(f"Averaging over {avg_dim}...")
for dim in tqdm(raw_2d_sliced):
avg_list = []
for image in tqdm(dim, leave=False, disable=not tqdm_enable_nested):
Expand Down Expand Up @@ -210,14 +215,14 @@ def parse_lut_to_df(scan_path: Path) -> pd.DataFrame:
df_lut.reset_index(inplace=True)

new_cols = df_lut.columns.to_list()[1:]
new_cols[new_cols.index("delaystage")] = "Delay"
new_cols[new_cols.index("delaystage")] = "DelayStage"
new_cols.insert(3, "delay (fs)") # Create label to drop the column later

df_lut = df_lut.set_axis(new_cols, axis="columns")
df_lut.drop(columns="delay (fs)", inplace=True)

except FileNotFoundError:
print(
logger.info(
"LUT.txt not found. Storing metadata from info.txt",
)
return None
Expand Down Expand Up @@ -265,7 +270,7 @@ def get_coords(
return (np.array([]), "")

if df_lut is not None:
print("scanvector.txt not found. Obtaining coordinates from LUT")
logger.info("scanvector.txt not found. Obtaining coordinates from LUT")

df_new: pd.DataFrame = df_lut.loc[:, df_lut.columns[2:]]

Expand All @@ -276,13 +281,18 @@ def get_coords(
raise FileNotFoundError("scanvector.txt file not found!") from exc

if scan_type == "delay":
t_0 = scan_info["TimeZero"]
coords -= t_0
coords *= 2 / 3e11 * 1e15
t0 = scan_info["TimeZero"]
coords = mm_to_fs(coords, t0)

return coords, dim


def mm_to_fs(delaystage: xr.DataArray | np.ndarray | float, t0: float) -> float:
delay = delaystage - t0
delay *= 2 / 2.99792458e11 * 1e15
return delay


def compare_coords(axis_data: np.ndarray) -> tuple[np.ndarray, int]:
"""Identifies the most changing column in a given 2-D numpy array.
Expand Down Expand Up @@ -338,6 +348,9 @@ def parse_info_to_dict(path: Path) -> dict:
except FileNotFoundError as exc:
raise FileNotFoundError("info.txt file not found.") from exc

if "DelayStage" in info_dict and "TimeZero" in info_dict:
info_dict["delay"] = mm_to_fs(info_dict["DelayStage"], info_dict["TimeZero"])

return info_dict


Expand Down Expand Up @@ -377,7 +390,7 @@ def handle_meta(
if metadata is None:
metadata = {}

print("Gathering metadata from different locations")
logger.info("Gathering metadata from different locations")
# get metadata from LUT dataframe
lut_meta = {}
energy_scan_mode = "snapshot"
Expand All @@ -395,10 +408,10 @@ def handle_meta(

metadata["scan_info"] = complete_dictionary(
metadata.get("scan_info", {}),
complete_dictionary(lut_meta, scan_info),
complete_dictionary(scan_info, lut_meta),
) # merging dictionaries

print("Collecting time stamps...")
logger.info("Collecting time stamps...")
if "time" in metadata["scan_info"]:
time_list = [metadata["scan_info"]["time"][0], metadata["scan_info"]["time"][-1]]
elif "StartTime" in metadata["scan_info"]:
Expand Down Expand Up @@ -445,8 +458,6 @@ def handle_meta(
metadata["scan_info"]["slow_axes"] = slow_axes
metadata["scan_info"]["fast_axes"] = fast_axes

print("Done!")

return metadata


Expand All @@ -460,7 +471,7 @@ def find_scan(path: Path, scan: int) -> list[Path]:
Returns:
List[Path]: scan_path: Path object pointing to the scan folder
"""
print("Scan path not provided, searching directories...")
logger.info("Scan path not provided, searching directories...")
for file in path.iterdir():
if file.is_dir():
try:
Expand All @@ -474,7 +485,7 @@ def find_scan(path: Path, scan: int) -> list[Path]:
file.glob(f"*/*/Raw Data/{scan}"),
)
if scan_path:
print("Scan found at path:", scan_path[0])
logger.info(f"Scan found at path: {scan_path[0]}")
break
else:
scan_path = []
Expand Down
2 changes: 1 addition & 1 deletion src/specsscan/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def fetch_epics_metadata(self, ts_from: float, ts_to: float, metadata: dict) ->
for key in list(metadata["scan_info"]):
if key.lower() in replace_dict:
metadata["scan_info"][replace_dict[key.lower()]] = metadata["scan_info"][key]
metadata["scan_info"].pop(key)
del metadata["scan_info"][key]
epics_channels = replace_dict.values()
except KeyError:
epics_channels = []
Expand Down

0 comments on commit e88f080

Please sign in to comment.