99
1010import numpy as np
1111import pandas as pd
12+ import fastremap
1213from flask import current_app , g , jsonify , make_response , request
1314from pytz import UTC
1415
1516from pychunkedgraph import __version__
1617from pychunkedgraph .app import app_utils
17- from pychunkedgraph .graph import (
18- attributes ,
19- cutting ,
20- segmenthistory ,
21- )
18+ from pychunkedgraph .graph import attributes , cutting , segmenthistory , ChunkedGraph
2219from pychunkedgraph .graph import (
2320 edges as cg_edges ,
2421)
2724)
2825from pychunkedgraph .graph .analysis import pathing
2926from pychunkedgraph .graph .attributes import OperationLogs
27+ from pychunkedgraph .graph .edits_sv import split_supervoxel
3028from pychunkedgraph .graph .misc import get_contact_sites
3129from pychunkedgraph .graph .operation import GraphEditOperation
3230from pychunkedgraph .graph .utils import basetypes
@@ -393,7 +391,7 @@ def handle_merge(table_id, allow_same_segment_merge=False):
393391 current_app .operation_id = ret .operation_id
394392 if ret .new_root_ids is None :
395393 raise cg_exceptions .InternalServerError (
396- " Could not merge selected " "supervoxel ."
394+ f" { ret . operation_id } : Could not merge selected supervoxels ."
397395 )
398396
399397 current_app .logger .debug (("lvl2_nodes:" , ret .new_lvl2_ids ))
@@ -407,24 +405,10 @@ def handle_merge(table_id, allow_same_segment_merge=False):
407405### SPLIT ----------------------------------------------------------------------
408406
409407
410- def handle_split (table_id ):
411- current_app .table_id = table_id
412- user_id = str (g .auth_user .get ("id" , current_app .user_id ))
413-
414- data = json .loads (request .data )
415- is_priority = request .args .get ("priority" , True , type = str2bool )
416- remesh = request .args .get ("remesh" , True , type = str2bool )
417- mincut = request .args .get ("mincut" , True , type = str2bool )
418-
408+ def _get_sources_and_sinks (cg : ChunkedGraph , data ):
419409 current_app .logger .debug (data )
420-
421- # Call ChunkedGraph
422- cg = app_utils .get_cg (table_id , skip_cache = True )
423410 node_idents = []
424- node_ident_map = {
425- "sources" : 0 ,
426- "sinks" : 1 ,
427- }
411+ node_ident_map = {"sources" : 0 , "sinks" : 1 }
428412 coords = []
429413 node_ids = []
430414
@@ -437,18 +421,74 @@ def handle_split(table_id):
437421 node_ids = np .array (node_ids , dtype = np .uint64 )
438422 coords = np .array (coords )
439423 node_idents = np .array (node_idents )
424+
425+ start = time .time ()
440426 sv_ids = app_utils .handle_supervoxel_id_lookup (cg , coords , node_ids )
427+ current_app .logger .info (f"SV lookup took { time .time () - start } s." )
441428 current_app .logger .debug (
442429 {"node_id" : node_ids , "sv_id" : sv_ids , "node_ident" : node_idents }
443430 )
444431
432+ source_ids = sv_ids [node_idents == 0 ]
433+ sink_ids = sv_ids [node_idents == 1 ]
434+ source_coords = coords [node_idents == 0 ]
435+ sink_coords = coords [node_idents == 1 ]
436+ return (source_ids , sink_ids , source_coords , sink_coords )
437+
438+
439+ def handle_split (table_id ):
440+ current_app .table_id = table_id
441+ user_id = str (g .auth_user .get ("id" , current_app .user_id ))
442+
443+ data = json .loads (request .data )
444+ is_priority = request .args .get ("priority" , True , type = str2bool )
445+ remesh = request .args .get ("remesh" , True , type = str2bool )
446+ mincut = request .args .get ("mincut" , True , type = str2bool )
447+
448+ cg = app_utils .get_cg (table_id , skip_cache = True )
449+ sources , sinks , source_coords , sink_coords = _get_sources_and_sinks (cg , data )
445450 try :
446451 ret = cg .remove_edges (
447452 user_id = user_id ,
448- source_ids = sv_ids [node_idents == 0 ],
449- sink_ids = sv_ids [node_idents == 1 ],
450- source_coords = coords [node_idents == 0 ],
451- sink_coords = coords [node_idents == 1 ],
453+ source_ids = sources ,
454+ sink_ids = sinks ,
455+ source_coords = source_coords ,
456+ sink_coords = sink_coords ,
457+ mincut = mincut ,
458+ )
459+ except cg_exceptions .SupervoxelSplitRequiredError as e :
460+ current_app .logger .info (e )
461+ sources_remapped = fastremap .remap (
462+ sources ,
463+ e .sv_remapping ,
464+ preserve_missing_labels = True ,
465+ in_place = False ,
466+ )
467+ sinks_remapped = fastremap .remap (
468+ sinks ,
469+ e .sv_remapping ,
470+ preserve_missing_labels = True ,
471+ in_place = False ,
472+ )
473+ overlap_mask = np .isin (sources_remapped , sinks_remapped )
474+ for sv_to_split in np .unique (sources_remapped [overlap_mask ]):
475+ _mask0 = sources_remapped [sources_remapped == sv_to_split ]
476+ _mask1 = sinks_remapped [sinks_remapped == sv_to_split ]
477+ split_supervoxel (
478+ cg ,
479+ sv_to_split ,
480+ source_coords [_mask0 ],
481+ sink_coords [_mask1 ],
482+ e .operation_id ,
483+ )
484+
485+ sources , sinks , source_coords , sink_coords = _get_sources_and_sinks (cg , data )
486+ ret = cg .remove_edges (
487+ user_id = user_id ,
488+ source_ids = sources ,
489+ sink_ids = sinks ,
490+ source_coords = source_coords ,
491+ sink_coords = sink_coords ,
452492 mincut = mincut ,
453493 )
454494 except cg_exceptions .LockingError as e :
@@ -459,7 +499,7 @@ def handle_split(table_id):
459499 current_app .operation_id = ret .operation_id
460500 if ret .new_root_ids is None :
461501 raise cg_exceptions .InternalServerError (
462- " Could not split selected segment groups."
502+ f" { ret . operation_id } : Could not split selected segment groups."
463503 )
464504
465505 current_app .logger .debug (("after split:" , ret .new_root_ids ))
0 commit comments