Skip to content

Commit 1ae304a

Browse files
committed
refactor
1 parent 9c19027 commit 1ae304a

File tree

1 file changed

+53
-59
lines changed

1 file changed

+53
-59
lines changed

pylabrobot/liquid_handling/liquid_handler.py

Lines changed: 53 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -2478,14 +2478,12 @@ async def consolidate_tip_inventory(self, tip_racks: List[TipRack]):
24782478
"""
24792479

24802480
def merge_sublists(lists: List[List[int]], max_len: int) -> List[List[int]]:
2481-
"""
2482-
Merge adjacent sublists if combined length <= max_len,
2483-
without splitting sublists."""
2481+
"""Merge adjacent sublists if combined length <= max_len, without splitting sublists."""
24842482
merged: List[List[int]] = []
24852483
buffer: List[int] = []
24862484

24872485
for sublist in lists:
2488-
if not sublist:
2486+
if len(sublist) == 0:
24892487
continue # skip empty sublists
24902488

24912489
if len(buffer) + len(sublist) <= max_len:
@@ -2495,23 +2493,22 @@ def merge_sublists(lists: List[List[int]], max_len: int) -> List[List[int]]:
24952493
merged.append(buffer)
24962494
buffer = sublist # start new buffer
24972495

2498-
if buffer:
2496+
if len(buffer) > 0:
24992497
merged.append(buffer)
25002498

25012499
return merged
25022500

25032501
def divide_list_into_chunks(
25042502
list_l: List[Any], chunk_size: int
25052503
) -> Generator[List[Any], None, None]:
2506-
"""
2507-
Divides a list into smaller chunks of a specified size.
2504+
"""Divides a list into smaller chunks of a specified size.
25082505
25092506
Parameters:
2510-
- list_l (List[Any]): The list to be divided into chunks.
2511-
- chunk_size (int): The size of each chunk.
2507+
- list_l: The list to be divided into chunks.
2508+
- chunk_size: The size of each chunk.
25122509
25132510
Returns:
2514-
- Generator[List[Any], None, None]: A generator that yields chunks of the list.
2511+
A generator that yields chunks of the list.
25152512
"""
25162513
for i in range(0, len(list_l), chunk_size):
25172514
yield list_l[i : i + chunk_size]
@@ -2520,33 +2517,35 @@ def divide_list_into_chunks(
25202517

25212518
for idx, tip_rack in enumerate(tip_racks):
25222519
# Only consider partially-filled tip_racks
2523-
tip_status = [tip_spot.tracker.has_tip for tip_spot in tip_rack.children]
2524-
partially_filled = any(tip_status) and not all(tip_status)
2525-
2526-
if partially_filled:
2527-
tipspots_w_tips = [i for b, i in zip(tip_status, tip_rack.children) if b]
2520+
tip_status = [tip_spot.tracker.has_tip for tip_spot in tip_rack.get_all_items()]
25282521

2529-
# Identify model by hashed unique physical characteristics
2530-
current_model = hash(tipspots_w_tips[0].tracker.get_tip())
2522+
if not (any(tip_status) and not all(tip_status)):
2523+
continue # ignore non-partially-filled tip_racks
25312524

2532-
num_empty_tipspots = len(tip_status) - len(tipspots_w_tips)
2525+
tipspots_w_tips = [tip_spot for has_tip, tip_spot in zip(tip_status, tip_rack.children) if has_tip]
25332526

2534-
sanity_check = all(
2535-
hash(tip_spot.tracker.get_tip()) == current_model for tip_spot in tipspots_w_tips[1:]
2527+
# Identify model by hashed unique physical characteristics
2528+
current_model = hash(tipspots_w_tips[0].tracker.get_tip())
2529+
if not all(
2530+
hash(tip_spot.tracker.get_tip()) == current_model for tip_spot in tipspots_w_tips[1:]
2531+
):
2532+
raise ValueError(
2533+
f"Tip rack {tip_rack.name} has mixed tip models, cannot consolidate: "
2534+
f"{[tip_spot.tracker.get_tip() for tip_spot in tipspots_w_tips]}"
25362535
)
25372536

2538-
if sanity_check:
2539-
clusters_by_model.setdefault(current_model, []).append((tip_rack, num_empty_tipspots))
2537+
num_empty_tipspots = len(tip_status) - len(tipspots_w_tips)
2538+
clusters_by_model.setdefault(current_model, []).append((tip_rack, num_empty_tipspots))
25402539

2541-
# Sort partially-filled tipracks by minimal fill_len
2540+
# Sort partially-filled tipracks from most to least empty
25422541
for model, rack_list in clusters_by_model.items():
25432542
rack_list.sort(key=lambda x: x[1])
25442543

25452544
# Consolidate one tip model at a time across all tip_racks of that model
25462545
for model, rack_list in clusters_by_model.items():
2547-
print(f"Consolidating:\n - {', '.join([rack.name for rack, num in rack_list])}")
2546+
print(f"Consolidating: - {', '.join([rack.name for rack, _ in rack_list])}")
25482547

2549-
all_tip_spots_list = [tip for tip_rack, _ in rack_list for tip in tip_rack.children]
2548+
all_tip_spots_list = [tip_spot for tip_rack, _ in rack_list for tip_spot in tip_rack.children]
25502549

25512550
# 1: Record current tip state
25522551
current_tip_presence_list = [tip_spot.has_tip() for tip_spot in all_tip_spots_list]
@@ -2555,11 +2554,7 @@ def divide_list_into_chunks(
25552554
total_length = len(all_tip_spots_list)
25562555
num_tips_per_model = sum(current_tip_presence_list)
25572556

2558-
target_tip_presence_list = [
2559-
# True if i < num_tips_per_model else False for i in range(total_length)
2560-
i < num_tips_per_model
2561-
for i in range(total_length)
2562-
]
2557+
target_tip_presence_list = [i < num_tips_per_model for i in range(total_length)]
25632558

25642559
# 3: Calculate tip_spots involved in tip movement
25652560
tip_movement_list = [
@@ -2572,23 +2567,24 @@ def divide_list_into_chunks(
25722567
tip_target_indices = [i for i, v in enumerate(tip_movement_list) if v == -1]
25732568
all_target_tip_spots = [all_tip_spots_list[idx] for idx in tip_target_indices]
25742569

2570+
# Only continue if tip_racks are not already consolidated
2571+
if len(all_target_tip_spots) == 0:
2572+
print("Tips already optimally consolidated!")
2573+
continue
2574+
25752575
# 4: Cluster target tip_spots by BOTH parent tip_rack & x-coordinate
25762576
sorted_tip_spots = sorted(
2577-
all_target_tip_spots, key=lambda tip: (str(tip.parent), round(tip.location.x, 3))
2577+
all_target_tip_spots, key=lambda tip: (tip.parent.name, round(tip.location.x, 3))
25782578
)
25792579

25802580
target_tip_clusters_by_parent_x: Dict[Tuple[str, float], List[TipSpot]] = {}
25812581

25822582
for tip_spot in sorted_tip_spots:
2583-
key = (str(tip_spot.parent), round(tip_spot.location.x, 3))
2583+
key = (tip_spot.parent.name, round(tip_spot.location.x, 3))
25842584
if key not in target_tip_clusters_by_parent_x:
25852585
target_tip_clusters_by_parent_x[key] = []
25862586
target_tip_clusters_by_parent_x[key].append(tip_spot)
25872587

2588-
# Only continue if tip_racks are not already consolidated
2589-
if len(target_tip_clusters_by_parent_x) > 0:
2590-
raise ValueError(f"No channel capable of handling tips on deck: {current_tip_model}")
2591-
25922588
current_tip_model = all_origin_tip_spots[0].tracker.get_tip()
25932589

25942590
# Ensure there are channels that can pick up the tip model
@@ -2600,31 +2596,29 @@ def divide_list_into_chunks(
26002596
]
26012597
)
26022598

2603-
# 5: Optimise speed
2604-
if num_channels_available > 0:
2605-
# by aggregating drop columns i.e. same drop column should not be visited twice!
2606-
if num_channels_available >= 8: # physical constraint of tip_rack's having 8 rows
2607-
merged_target_tip_clusters = merge_sublists(
2608-
target_tip_clusters_by_parent_x.values(), max_len=8
2609-
)
2610-
2611-
else: # by chunking drop tip_spots list into size of available channels
2612-
merged_target_tip_clusters = list(
2613-
divide_list_into_chunks(all_target_tip_spots, chunk_size=num_channels_available)
2614-
)
2599+
# 5: Optimize speed
2600+
if num_channels_available == 0:
2601+
raise ValueError(f"No channel capable of handling tips on deck: {current_tip_model}")
26152602

2616-
len_transfers = len(merged_target_tip_clusters)
2603+
# by aggregating drop columns i.e. same drop column should not be visited twice!
2604+
if num_channels_available >= 8: # physical constraint of tip_rack's having 8 rows
2605+
merged_target_tip_clusters = merge_sublists(
2606+
target_tip_clusters_by_parent_x.values(), max_len=8
2607+
)
2608+
else: # by chunking drop tip_spots list into size of available channels
2609+
merged_target_tip_clusters = list(
2610+
divide_list_into_chunks(all_target_tip_spots, chunk_size=num_channels_available)
2611+
)
26172612

2618-
# 6: Execute tip movement/consolidation
2619-
for idx, target_tip_spots in enumerate(merged_target_tip_clusters):
2620-
print(f" - tip transfer cycle: {idx} / {len_transfers - 1}")
2621-
num_channels = len(target_tip_spots)
2622-
use_channels = list(range(num_channels))
2613+
len_transfers = len(merged_target_tip_clusters)
26232614

2624-
origin_tip_spots = [all_origin_tip_spots.pop(0) for idx in range(num_channels)]
2615+
# 6: Execute tip movement/consolidation
2616+
for idx, target_tip_spots in enumerate(merged_target_tip_clusters):
2617+
print(f" - tip transfer cycle: {idx+1} / {len_transfers}")
2618+
num_channels = len(target_tip_spots)
2619+
use_channels = list(range(num_channels))
26252620

2626-
await self.pick_up_tips(origin_tip_spots, use_channels=use_channels)
2621+
origin_tip_spots = [all_origin_tip_spots.pop(0) for _ in range(num_channels)]
26272622

2628-
await self.drop_tips(target_tip_spots, use_channels=use_channels)
2629-
else:
2630-
print("Tips already optimally consolidated!")
2623+
await self.pick_up_tips(origin_tip_spots, use_channels=use_channels)
2624+
await self.drop_tips(target_tip_spots, use_channels=use_channels)

0 commit comments

Comments
 (0)