25
25
import numpy as np
26
26
import quantities as pq
27
27
28
+ from neo .core .baseneo import _check_annotations
28
29
from neo .core import Segment , SpikeTrain , Epoch , Event , AnalogSignal , IrregularlySampledSignal , Block , ImageSequence
29
30
from neo .io .baseio import BaseIO
30
31
from neo .io .proxyobjects import (
@@ -195,6 +196,18 @@ def _recompose_unit(base_unit_name, conversion):
195
196
return pq .dimensionless
196
197
197
198
199
+ def nwb_obj_to_dict (obj ):
200
+ if not hasattr (obj , "fields" ):
201
+ raise TypeError ("Does not seem to be an NWB object" )
202
+ result = {}
203
+ for key , value in obj .fields .items ():
204
+ if hasattr (value , "fields" ):
205
+ result [key ] = nwb_obj_to_dict (value )
206
+ else :
207
+ result [key ] = value
208
+ return result
209
+
210
+
198
211
class NWBIO (BaseIO ):
199
212
"""
200
213
Class for "reading" experimental data from a .nwb file, and "writing" a .nwb file from Neo
@@ -244,6 +257,7 @@ def read_all_blocks(self, lazy=False, **kwargs):
244
257
"""
245
258
Load all blocks in the file.
246
259
"""
260
+ import hdmf
247
261
import pynwb
248
262
249
263
if self .nwb_file_mode not in ("r" ,):
@@ -264,6 +278,11 @@ def read_all_blocks(self, lazy=False, **kwargs):
264
278
if value is not None :
265
279
if annotation_name in POSSIBLE_JSON_FIELDS :
266
280
value = try_json_field (value )
281
+ elif isinstance (value , hdmf .utils .StrDataset ):
282
+ value = list (value )
283
+ # placing this check here for easier debugging, but it's redundant so we should remove it
284
+ # once we're handling all possible annotation types
285
+ _check_annotations (value )
267
286
self .global_block_metadata [annotation_name ] = value
268
287
if "session_description" in self .global_block_metadata :
269
288
self .global_block_metadata ["description" ] = self .global_block_metadata ["session_description" ]
@@ -691,6 +710,8 @@ class AnalogSignalProxy(BaseAnalogSignalProxy):
691
710
)
692
711
693
712
def __init__ (self , timeseries , nwb_group ):
713
+ import pynwb
714
+
694
715
self ._timeseries = timeseries
695
716
self .units = timeseries .unit
696
717
if timeseries .conversion :
@@ -722,8 +743,11 @@ def __init__(self, timeseries, nwb_group):
722
743
pass
723
744
for field_name in metadata_fields :
724
745
value = getattr (timeseries , field_name )
746
+ if hasattr (value , "fields" ):
747
+ value = nwb_obj_to_dict (value )
725
748
if value is not None :
726
749
self .annotations [f"nwb:{ field_name } " ] = value
750
+ _check_annotations (value ) # tmp for easier debugging
727
751
self .annotations ["nwb_neurodata_type" ] = (timeseries .__class__ .__module__ , timeseries .__class__ .__name__ )
728
752
if hasattr (timeseries , "electrode" ):
729
753
# todo: once the Group class is available, we could add electrode metadata
@@ -865,15 +889,22 @@ def __init__(self, units_table, id):
865
889
self ._units_table = units_table
866
890
self .id = id
867
891
self .units = pq .s
868
- obs_intervals = units_table .get_unit_obs_intervals (id )
869
- if len (obs_intervals ) == 0 :
892
+ try :
893
+ obs_intervals = units_table .get_unit_obs_intervals (id )
894
+ except KeyError :
895
+ logger .warn ("Unable to retrieve obs_intervals" )
870
896
t_start , t_stop = None , None
871
- elif len (obs_intervals ) == 1 :
872
- t_start , t_stop = obs_intervals [0 ]
873
897
else :
874
- raise NotImplementedError ("Can't yet handle multiple observation intervals" )
875
- self .t_start = t_start * pq .s
876
- self .t_stop = t_stop * pq .s
898
+ if len (obs_intervals ) == 0 :
899
+ t_start , t_stop = None , None
900
+ elif len (obs_intervals ) == 1 :
901
+ t_start , t_stop = obs_intervals [0 ]
902
+ t_start = t_start * pq .s
903
+ t_stop = t_stop * pq .s
904
+ else :
905
+ raise NotImplementedError ("Can't yet handle multiple observation intervals" )
906
+ self .t_start = t_start
907
+ self .t_stop = t_stop
877
908
self .annotations = {"nwb_group" : "acquisition" }
878
909
try :
879
910
# NWB files created by Neo store the name as an extra column
0 commit comments