Skip to content

Commit

Permalink
Merge branch 'develop' of github.com:HarryHeres/SlicerBoneMorphing in…
Browse files Browse the repository at this point in the history
…to develop
  • Loading branch information
evaherbst committed Jan 11, 2025
2 parents 0ea85ac + b318f92 commit 856753a
Show file tree
Hide file tree
Showing 8 changed files with 344 additions and 188 deletions.
3 changes: 0 additions & 3 deletions .github/workflows/create-pre-release.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
name: Create and publish pre-release

on:
push:
branches:
- 'cicd'
workflow_dispatch:

jobs:
Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- CHANGELOG
- CODEOWNERS

### Improved
- Improving rigid registration [https://github.com/HarryHeres/SlicerBoneMorphing/issues/27]

### Changed
- Naming of the generated model based on the selected target node
- Updated documentation [https://github.com/HarryHeres/SlicerBoneMorphing/issues/11]
Expand Down
316 changes: 183 additions & 133 deletions SlicerBoneMorphing/Resources/UI/SlicerBoneMorphing.ui

Large diffs are not rendered by default.

24 changes: 13 additions & 11 deletions SlicerBoneMorphing/src/logic/Constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,30 @@

### PREPROCESSING PARAMETERS ###
PREPROCESSING_KEY = "preprocessing"
PREPROCESSING_KEY_DOWNSAMPLING_VOXEL_SIZE = "dvs"
PREPROCESSING_KEY_DOWNSAMPLING = "down"
PREPROCESSING_KEY_DOWNSAMPLING_SOURCE_TO_TARGET = "dstt"
PREPROCESSING_KEY_DOWNSAMPLING_TARGET_TO_SOURCE = "dtts"
PREPROCESSING_KEY_NORMALS_ESTIMATION_RADIUS = "ner"
PREPROCESSING_KEY_FPFH_ESTIMATION_RADIUS = "fer"
PREPROCESSING_KEY_MAX_NN_NORMALS = "mnnn"
PREPROCESSING_KEY_MAX_NN_FPFH = "mnf"
PREPROCESSING_KEY_NORMALS_MAX_NN = "mnnn"
PREPROCESSING_KEY_FPFH_MAX_NN = "mnf"

PREPROCESSING_DEFAULT_VALUE_DOWNSAMPLING_VOXEL_SIZE = 0.0
PREPROCESSING_DEFAULT_VALUE_RADIUS_NORMAL_SCALE = 0.5
PREPROCESSING_DEFAULT_VALUE_RADIUS_FEATURE_SCALE = 10
PREPROCESSING_DEFAULT_VALUE_MAX_NN_NORMALS = 10
PREPROCESSING_DEFAULT_VALUE_MAX_NN_FPFH = 100
PREPROCESSING_DEFAULT_VALUE_NORMALS_ESTIMATION_RADIUS = 8
PREPROCESSING_DEFAULT_VALUE_FPFPH_ESTIMATION_RADIUS = 20
PREPROCESSING_DEFAULT_VALUE_NORMALS_MAX_NN = 25
PREPROCESSING_DEFAULT_VALUE_FPFH_MAX_NN = 50

### REGISTRATION PARAMETERS ###
REGISTRATION_KEY_RANSAC_DISTANCE_THRESHOLD = "rrdt"
REGISTRATION_KEY_FITNESS_THRESHOLD = "rft"
REGISTRATION_KEY_MAX_ITERATIONS = "rmi"
REGISTRATION_KEY_ICP_DISTANCE_THRESHOLD = "idt"

REGISTRATION_DEFAULT_VALUE_RANSAC_DISTANCE_THRESHOLD = 1
REGISTRATION_DEFAULT_VALUE_RANSAC_DISTANCE_THRESHOLD = 2.7
REGISTRATION_DEFAULT_VALUE_FITNESS_THRESHOLD = 0.999
REGISTRATION_DEFAULT_VALUE_MAX_ITERATIONS = 100000
REGISTRATION_DEFAULT_VALUE_ICP_DISTANCE_THRESHOLD = 1
REGISTRATION_DEFAULT_VALUE_ICP_DISTANCE_THRESHOLD = 0.7
REGISTRATION_DEFAULT_VALUE_RANSAC_CONVERGENCE_CONFIDENCE = 1.0 # Leave at 1.0 to NOT terminate early

### BCPD PARAMETERS ###
Expand Down Expand Up @@ -79,7 +81,7 @@

## Tuning parameters ##
BCPD_DEFAULT_VALUE_OMEGA = 0.1
BCPD_DEFAULT_VALUE_LAMBDA = 10
BCPD_DEFAULT_VALUE_LAMBDA = 50
BCPD_DEFAULT_VALUE_BETA = 10
BCPD_DEFAULT_VALUE_GAMMA = 0.1
BCPD_DEFAULT_VALUE_KAPPA = 1000
Expand All @@ -98,7 +100,7 @@
## Acceleration mode ##
BCPD_DEFAULT_VALUE_ACCELERATION_MODE = BcpdAccelerationMode.AUTOMATIC.value
BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_G = 140
BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_J = 600
BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_J = 500
BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_R = 1
BCPD_DEFAULT_VALUE_ACCELERATION_KD_TREE_SCALE = 7
BCPD_DEFAULT_VALUE_ACCELERATION_KD_TREE_RADIUS = 0.3
Expand Down
76 changes: 56 additions & 20 deletions SlicerBoneMorphing/src/logic/SlicerBoneMorphingLogic.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,6 @@ def __convert_mesh_to_point_cloud(self, mesh: o3d.geometry.TriangleMesh) -> o3d.
Converted PointCloud
"""

# mesh_center = mesh.get_center()
# mesh.translate(-mesh_center, relative=False) # Not needed for Slicer
pcd = o3d.geometry.PointCloud()
pcd.points = mesh.vertices
pcd.colors = mesh.vertex_colors
Expand Down Expand Up @@ -225,51 +223,85 @@ def __preprocess_model(
source_pcd = self.__convert_mesh_to_point_cloud(source_mesh)
target_pcd = self.__convert_mesh_to_point_cloud(target_mesh)

points_min = np.min([len(source_pcd.points), len(target_pcd.points)])
max_nn_normals = int(points_min * (parameters[const.PREPROCESSING_KEY_NORMALS_MAX_NN] / 100))
max_nn_fpfh = int(points_min * (parameters[const.PREPROCESSING_KEY_FPFH_MAX_NN] / 100))

object_size = 0.0
if parameters[const.PREPROCESSING_KEY_DOWNSAMPLING] is True:
if parameters[const.PREPROCESSING_KEY_DOWNSAMPLING_SOURCE_TO_TARGET] is True:
object_size = self.__calculate_object_size(target_pcd)
elif parameters[const.PREPROCESSING_KEY_DOWNSAMPLING_TARGET_TO_SOURCE] is True:
object_size = self.__calculate_object_size(source_pcd)
else:
print("ERROR: Downsampling is enabled but neither of the downsampling options was selected")
return [const.EXIT_FAILURE, None]

print("Preprocessing source mesh...")
source_pcd_downsampled, source_pcd_fpfh = self.__preprocess_point_cloud(
source_pcd,
parameters[const.PREPROCESSING_KEY_DOWNSAMPLING_VOXEL_SIZE],
object_size,
parameters[const.PREPROCESSING_KEY_NORMALS_ESTIMATION_RADIUS],
parameters[const.PREPROCESSING_KEY_FPFH_ESTIMATION_RADIUS],
parameters[const.PREPROCESSING_KEY_MAX_NN_NORMALS],
parameters[const.PREPROCESSING_KEY_MAX_NN_FPFH]
max_nn_normals,
max_nn_fpfh
)

print("Preprocessing target mesh...")
target_pcd_downsampled, target_pcd_fpfh = self.__preprocess_point_cloud(
target_pcd,
parameters[const.PREPROCESSING_KEY_DOWNSAMPLING_VOXEL_SIZE],
object_size,
parameters[const.PREPROCESSING_KEY_NORMALS_ESTIMATION_RADIUS],
parameters[const.PREPROCESSING_KEY_FPFH_ESTIMATION_RADIUS],
parameters[const.PREPROCESSING_KEY_MAX_NN_NORMALS],
parameters[const.PREPROCESSING_KEY_MAX_NN_FPFH]
max_nn_normals,
max_nn_fpfh
)

object_size = np.max([self.__calculate_object_size(source_pcd), self.__calculate_object_size(target_pcd)])

try:
result_ransac = self.__ransac_pcd_registration(
source_pcd_downsampled, target_pcd_downsampled,
source_pcd_fpfh, target_pcd_fpfh,
parameters[const.REGISTRATION_KEY_RANSAC_DISTANCE_THRESHOLD],
object_size * (parameters[const.REGISTRATION_KEY_RANSAC_DISTANCE_THRESHOLD] / 100),
parameters[const.REGISTRATION_KEY_FITNESS_THRESHOLD],
parameters[const.REGISTRATION_KEY_MAX_ITERATIONS]
)
if result_ransac is None:
raise RuntimeError
except RuntimeError:
print("No registration fit was found using the RANSAC algorithm. Please, try adjusting the preprocessing parameters")
print("No rigid registration fit was found using the RANSAC algorithm. Try adjusting the preprocessing parameters")
return const.EXIT_FAILURE, None

result_icp = o3d.pipelines.registration.registration_icp(
source_pcd_downsampled, target_pcd_downsampled,
parameters[const.REGISTRATION_KEY_ICP_DISTANCE_THRESHOLD],
object_size * (parameters[const.REGISTRATION_KEY_ICP_DISTANCE_THRESHOLD] / 100),
result_ransac.transformation,
o3d.pipelines.registration.TransformationEstimationPointToPlane()
)

return const.EXIT_OK, result_icp

def __calculate_object_size(self, source: o3d.geometry.Geometry) -> float:
"""
Calculates the object's size based on the size of it's bounding box
Parameters
----------
source: Open3D.geometry.Geometry source geometrical entity
Returns
-------
Euclidean size of the diagonal of the bounding box
"""

bounding_box = source.get_minimal_oriented_bounding_box(robust=False)
return np.linalg.norm(np.asarray(bounding_box.get_max_bound()) - np.asarray(bounding_box.get_min_bound()))

def __preprocess_point_cloud(
self,
pcd: o3d.geometry.PointCloud,
downsampling_voxel_size: float,
downsampling_object_size: float,
normals_estimation_radius: float,
fpfh_estimation_radius: float,
max_nn_normals: int,
Expand All @@ -280,8 +312,8 @@ def __preprocess_point_cloud(
Parameters
----------
o3d.geometry.PointCloud pcd: Source point cloud
float downsampling_distance_threshold: Distance threshold for downsampling
o3d.geometry.PointCloud pcd: Point cloud to preprocess
float downsampling_object_size: Size of the object to downsample to
float normals_estimation_radius: Radius for estimating normals
float fpfh_estimation_radius: Radius for the FPFH computation
int max_nn_normals: Maximum number of neighbours considered for normals estimation
Expand All @@ -294,13 +326,15 @@ def __preprocess_point_cloud(
- [1] = FPFH
'''

if downsampling_voxel_size > 0.0:
pcd = pcd.voxel_down_sample(downsampling_voxel_size)

pcd.estimate_normals(o3d.geometry.KDTreeSearchParamHybrid(radius=normals_estimation_radius, max_nn=max_nn_normals))

pcd_fpfh = o3d.pipelines.registration.compute_fpfh_feature(pcd, o3d.geometry.KDTreeSearchParamHybrid(radius=fpfh_estimation_radius, max_nn=max_nn_fpfh))
pcd_object_size = self.__calculate_object_size(pcd)
if downsampling_object_size > 0.0 and downsampling_object_size != pcd_object_size and pcd_object_size > downsampling_object_size:
print("Downsampling point cloud with size: " + str(pcd_object_size) + " to target object size: " + str(downsampling_object_size))
pcd = pcd.object_down_sample(downsampling_object_size)
else:
print("Downsampling will not be performed. The target voxel size is either less than 0, equal to the calculated voxel size and/or larger, than current voxel size")

pcd.estimate_normals(o3d.geometry.KDTreeSearchParamHybrid(radius=(pcd_object_size * (normals_estimation_radius / 100)), max_nn=max_nn_normals))
pcd_fpfh = o3d.pipelines.registration.compute_fpfh_feature(pcd, o3d.geometry.KDTreeSearchParamHybrid(radius=pcd_object_size * (fpfh_estimation_radius / 100), max_nn=max_nn_fpfh))
return pcd, pcd_fpfh

def __ransac_pcd_registration(
Expand Down Expand Up @@ -383,6 +417,8 @@ def __deformable_registration(
cmd = f'{BCPD_EXEC} -h -x {target_path} -y {source_path}'

for key in bcpd_parameters.keys():
if key == const.BCPD_VALUE_KEY_LAMBDA:
bcpd_parameters[key] *= self.__calculate_object_size(source_pcd)
cmd += f' {key}{bcpd_parameters[key]}'

cmd += f' -o {output_path}'
Expand Down
41 changes: 20 additions & 21 deletions SlicerBoneMorphing/src/widget/SlicerBoneMorphingWidget.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ def __setup_ui(self) -> None:
self.__ui.sourceNodeSelectionBox.setMRMLScene(slicer.mrmlScene)
self.__ui.targetNodeSelectionBox.setMRMLScene(slicer.mrmlScene)

self.__ui.preprocessingDownsamplingGroupBox.setVisible(False)

self.__ui.bcpdAdvancedControlsGroupBox.setVisible(False)

self.__setup_combo_box(self.__ui.bcpdKernelTypeComboBox, BcpdKernelType, self.__show_kernel_type)
Expand All @@ -45,8 +47,6 @@ def __setup_ui(self) -> None:

self.__setup_combo_box(self.__ui.bcpdNormalizationComboBox, BcpdNormalizationOptions, None)

self.__ui.bcpdDownsamplingCollapsibleGroupBox.visible = False

self.__ui.bcpdResetParametersPushButton.clicked.connect(self.__reset_parameters_to_default)
self.__ui.generateModelButton.clicked.connect(self.__generate_model)

Expand All @@ -60,11 +60,12 @@ def __reset_parameters_to_default(self) -> None:
self.__ui.optionsImportRegistrationModelCheckBox.setChecked(False)

## Preprocessing parameters ##
self.__ui.preprocessingDownsamplingVoxelSizeDoubleSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_DOWNSAMPLING_VOXEL_SIZE
self.__ui.preprocessingNormalsEstimationRadiusDoubleSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_RADIUS_NORMAL_SCALE
self.__ui.preprocessingNormalsEstimationMaxNeighboursSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_MAX_NN_NORMALS
self.__ui.preprocessingFpfhRadiusDoubleSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_RADIUS_FEATURE_SCALE
self.__ui.preprocessingFpfhMaxNeighboursSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_MAX_NN_FPFH
self.__ui.preprocessingDownsamplingCheckBox.checked = False
self.__ui.preprocessingDownsamplingSourceToTargetRadioButton.checked = True
self.__ui.preprocessingNormalsEstimationRadiusDoubleSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_NORMALS_ESTIMATION_RADIUS
self.__ui.preprocessingNormalsEstimationMaxNeighboursSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_NORMALS_MAX_NN
self.__ui.preprocessingFpfhRadiusDoubleSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_FPFPH_ESTIMATION_RADIUS
self.__ui.preprocessingFpfhMaxNeighboursSpinBox.value = const.PREPROCESSING_DEFAULT_VALUE_FPFH_MAX_NN

## Registration parameters ##
self.__ui.registrationMaxIterationsSpinBox.value = const.REGISTRATION_DEFAULT_VALUE_MAX_ITERATIONS
Expand Down Expand Up @@ -102,9 +103,6 @@ def __reset_parameters_to_default(self) -> None:
self.__ui.bcpdAccelerationManualKdTreeRadiusDoubleSpinBox.value = const.BCPD_DEFAULT_VALUE_ACCELERATION_KD_TREE_RADIUS
self.__ui.bcpdAccelerationManualKdTreeThresholdDoubleSpinBox.value = const.BCPD_DEFAULT_VALUE_ACCELERATION_KD_TREE_SIGMA_THRESHOLD

## Downsampling options ##
self.__ui.bcpdDownsamplingLineEdit.text = const.BCPD_DEFAULT_VALUE_DOWNSAMPLING_OPTIONS

## Convergence options ##
self.__ui.bcpdConvergenceToleranceDoubleSpinBox.value = const.BCPD_DEFAULT_VALUE_CONVERGENCE_TOLERANCE
self.__ui.bcpdConvergenceMaxIterationsSpinBox.value = const.BCPD_DEFAULT_VALUE_CONVERGENCE_MAX_ITERATIONS
Expand Down Expand Up @@ -179,11 +177,14 @@ def __parse_parameters_preprocessing(self) -> dict:
params = {}

# Preprocessing
params[const.PREPROCESSING_KEY_DOWNSAMPLING_VOXEL_SIZE] = self.__ui.preprocessingDownsamplingVoxelSizeDoubleSpinBox.value

params[const.PREPROCESSING_KEY_DOWNSAMPLING] = self.__ui.preprocessingDownsamplingCheckBox.checked
params[const.PREPROCESSING_KEY_DOWNSAMPLING_SOURCE_TO_TARGET] = self.__ui.preprocessingDownsamplingSourceToTargetRadioButton.checked
params[const.PREPROCESSING_KEY_DOWNSAMPLING_TARGET_TO_SOURCE] = self.__ui.preprocessingDownsamplingTargetToSourceRadioButton.checked
params[const.PREPROCESSING_KEY_NORMALS_ESTIMATION_RADIUS] = self.__ui.preprocessingNormalsEstimationRadiusDoubleSpinBox.value
params[const.PREPROCESSING_KEY_MAX_NN_NORMALS] = self.__ui.preprocessingNormalsEstimationMaxNeighboursSpinBox.value
params[const.PREPROCESSING_KEY_NORMALS_MAX_NN] = self.__ui.preprocessingNormalsEstimationMaxNeighboursSpinBox.value
params[const.PREPROCESSING_KEY_FPFH_ESTIMATION_RADIUS] = self.__ui.preprocessingFpfhRadiusDoubleSpinBox.value
params[const.PREPROCESSING_KEY_MAX_NN_FPFH] = self.__ui.preprocessingFpfhMaxNeighboursSpinBox.value
params[const.PREPROCESSING_KEY_FPFH_MAX_NN] = self.__ui.preprocessingFpfhMaxNeighboursSpinBox.value

# Registration
params[const.REGISTRATION_KEY_MAX_ITERATIONS] = self.__ui.registrationMaxIterationsSpinBox.value
Expand All @@ -201,7 +202,7 @@ def __parse_parameters_bcpd(self) -> dict:

## Tuning parameters ##
params[const.BCPD_VALUE_KEY_OMEGA] = self.__ui.bcpdOmegaDoubleSpinBox.value
params[const.BCPD_VALUE_KEY_LAMBDA] = self.__ui.bcpdLambdaDoubleSpinBox.value
params[const.BCPD_VALUE_KEY_LAMBDA] = self.__ui.bcpdLambdaDoubleSpinBox.value / 100
params[const.BCPD_VALUE_KEY_BETA] = self.__ui.bcpdBetaDoubleSpinBox.value
params[const.BCPD_VALUE_KEY_GAMMA] = self.__ui.bcpdGammaDoubleSpinBox.value

Expand Down Expand Up @@ -248,15 +249,17 @@ def __parse_advanced_parameters(self, params: dict) -> None:
## Acceleration settings ##
if self.__ui.bcpdAccelerationModeComboBox.currentIndex == BcpdAccelerationMode.AUTOMATIC.value:
if self.__ui.bcpdAccelerationAutomaticVbiCheckBox.checked is True:
params[const.BCPD_VALUE_KEY_NYSTORM_G] = 70
params[const.BCPD_VALUE_KEY_NYSTORM_P] = 300
params[const.BCPD_VALUE_KEY_NYSTORM_G] = const.BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_G
params[const.BCPD_VALUE_KEY_NYSTORM_P] = const.BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_J
params[const.BCPD_VALUE_KEY_NYSTORM_R] = const.BCPD_DEFAULT_VALUE_ACCELERATION_NYSTORM_SAMPLES_R

# Option switch without a value
params[const.BCPD_VALUE_KEY_KD_TREE] = ""
params[const.BCPD_VALUE_KEY_KD_TREE_SCALE] = 7
params[const.BCPD_VALUE_KEY_KD_TREE_RADIUS] = 0.15

if self.__ui.bcpdAccelerationAutomaticPlusPlusCheckBox.checked is True:
params[const.BCPD_VALUE_KEY_DOWNSAMPLING] = "B,10000,0.08"
params[const.BCPD_VALUE_KEY_DOWNSAMPLING] = const.BCPD_DEFAULT_VALUE_DOWNSAMPLING_OPTIONS
else: # Manual acceleration
if self.__ui.bcpdAccelerationManualNystormGroupBox.checked is True:
params[const.BCPD_VALUE_KEY_NYSTORM_G] = self.__ui.bcpdAccelerationManualNystormGSpinBox.value
Expand All @@ -270,10 +273,6 @@ def __parse_advanced_parameters(self, params: dict) -> None:
params[const.BCPD_VALUE_KEY_KD_TREE_RADIUS] = self.__ui.bcpdAccelerationManualKdTreeRadiusDoubleSpinBox.value
params[const.BCPD_VALUE_KEY_KD_TREE_THRESHOLD] = self.__ui.bcpdAccelerationManualKdTreeThresholdDoubleSpinBox.value

## Downsampling settings ##
if params.get(const.BCPD_VALUE_KEY_DOWNSAMPLING) is None:
params[const.BCPD_VALUE_KEY_DOWNSAMPLING] = self.__ui.bcpdDownsamplingLineEdit.text

## Convergence options ##
params[const.BCPD_VALUE_KEY_CONVERGENCE_TOLERANCE] = self.__ui.bcpdConvergenceToleranceDoubleSpinBox.value
params[const.BCPD_VALUE_KEY_CONVERGENCE_MIN_ITERATIONS] = self.__ui.bcpdConvergenceMinIterationsSpinBox.value
Expand Down
41 changes: 41 additions & 0 deletions docs/Notes/To_Do.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
Eva
- [ ] upsample meshes
- [ ] fill holes
- [ ] upsample
- [ ] clean
- [ ] double check point numbers pre and post crop to make sure no issues

Jan
- [ ] add scaling factor in GUI
- in global_registration:
- Arthur used:
-  `distance_threshold = voxel_size * 1.5
- voxel_size = size / 55 (size is bounding box)`
- so our default distance_threshold in GUI should be size*1.5/55


- so to replicate Arthur's settings
- if we do 0.007 factor (0.7%) of bounding box size for distance_threshold for ICP
- then 0.027 (2.7%) of bounding box size for distance_threshold for Ransac
- can interconvert, I think it makes sense for user to put it goal for ICP since that is the final one, and calculate Ransac from that



*Idea for setting max deformation allowed*

- check after rigid alignment max distance between two corresponding points (on cropped models only)
- could use DeCa for this since the models are already aligned
- then use that to inform lambda value for deformation
- ie model allowed to deform enough to match proximal surfaces but distal surface can only deform by this amount
_Note_ hopefully this would not cause anisotropic morphing on the distal end, which could cause unrealistic epicondyle shapes. Maybe we can also constrain the distal end to not deform much in morphology


*Other notes from Meeting 29/11/24*
- we discussed that SSMs will not solve our problem (as Jan already stated in thesis defense)
- gives variation in length but would not necessarily help predict where within that feasible space the individual falls
- indeed, other SSM methods for distal humerus reconstruction have issues with getting length exact
- we would be happy to get similar errors to them
- but we can do so by limiting deformation as discussed above
- true length could only really be predicted if some part of proximal morphology can be used as predictor of distal
- future idea find closest match in proximal morphology (after scaling and rigid alignment)
- check if finding closest match from "libary" of models used to create mean model gives better results than using mean model
Loading

0 comments on commit 856753a

Please sign in to comment.