1
1
import os
2
- import json
3
2
import numpy as np
4
3
from datetime import datetime
5
4
from shutil import copyfile
@@ -19,47 +18,34 @@ def set_state_machine(self, sm_info):
19
18
self .sm_info = sm_info
20
19
self .ID2name_fw = self .sm_info ['ID2name' ] # Dict mapping framework IDs to names.
21
20
22
- def open_data_file (self , data_dir , experiment_name , setup_ID , subject_ID ,
23
- file_type , datetime_now = None ):
21
+ def open_data_file (self , data_dir , experiment_name , setup_ID , subject_ID , datetime_now = None ):
24
22
'''Open file tsv/txt file for event data and write header information.
25
23
If state machine uses analog inputs instantiate analog data writers.'''
26
24
self .data_dir = data_dir
27
25
self .experiment_name = experiment_name
28
26
self .subject_ID = subject_ID
29
27
self .setup_ID = setup_ID
30
- self .file_type = file_type
31
28
if datetime_now is None : datetime_now = datetime .now ()
32
- self .end_time = - 1
33
- file_name = self .subject_ID + datetime_now .strftime ('-%Y-%m-%d-%H%M%S' ) + '.' + self . file_type
29
+ self .end_timestamp = - 1
30
+ file_name = self .subject_ID + datetime_now .strftime ('-%Y-%m-%d-%H%M%S' ) + '.tsv'
34
31
self .file_path = os .path .join (self .data_dir , file_name )
35
32
self .data_file = open (self .file_path , 'w' , newline = '\n ' )
36
- if self .file_type == 'tsv' : # Write header.
37
- self .data_file .write (self .tsv_row_str (
38
- rtype = 'type' , time = 'time' , name = 'name' , value = 'value' ))
39
- self .write_info_line ('Experiment name' , self .experiment_name )
40
- self .write_info_line ('Task name' , self .sm_info ['name' ])
41
- self .write_info_line ('Task file hash' , self .sm_info ['task_hash' ])
42
- self .write_info_line ('Setup ID' , self .setup_ID )
43
- self .write_info_line ('Framework version' , self .sm_info ['framework_version' ])
44
- self .write_info_line ('Micropython version' , self .sm_info ['micropython_version' ])
45
- self .write_info_line ('Subject ID' , self .subject_ID )
46
- if self .file_type == 'txt' :
47
- self .write_info_line ('Start date' , datetime_now .strftime ('%Y/%m/%d %H:%M:%S' ))
48
- self .data_file .write ('\n ' )
49
- self .data_file .write ('S {}\n \n ' .format (json .dumps (self .sm_info ['states' ])))
50
- self .data_file .write ('E {}\n \n ' .format (json .dumps (self .sm_info ['events' ])))
51
- else :
52
- self .write_info_line ('start_time' , datetime .utcnow ().isoformat (timespec = 'milliseconds' ))
33
+ self .data_file .write (self .tsv_row_str ( # Write header with row names.
34
+ rtype = 'type' , time = 'time' , name = 'name' , value = 'value' ))
35
+ self .write_info_line ('experiment_name' , self .experiment_name )
36
+ self .write_info_line ('task_name' , self .sm_info ['name' ])
37
+ self .write_info_line ('task_file_hash' , self .sm_info ['task_hash' ])
38
+ self .write_info_line ('setup_ID' , self .setup_ID )
39
+ self .write_info_line ('framework_version' , self .sm_info ['framework_version' ])
40
+ self .write_info_line ('micropython_version' , self .sm_info ['micropython_version' ])
41
+ self .write_info_line ('subject_ID' , self .subject_ID )
42
+ self .write_info_line ('start_time' , datetime .utcnow ().isoformat (timespec = 'milliseconds' ))
53
43
self .analog_writers = {ID :
54
44
Analog_writer (ai ['name' ], ai ['fs' ], ai ['dtype' ], self .file_path )
55
45
for ID , ai in self .sm_info ['analog_inputs' ].items ()}
56
46
57
47
def write_info_line (self , name , value , time = 0 ):
58
- if self .file_type == 'tsv' :
59
- name = name .lower ().replace (' ' , '_' )
60
- self .data_file .write (self .tsv_row_str ('info' , time = time , name = name , value = value ))
61
- elif self .file_type == 'txt' :
62
- self .data_file .write (f'I { name } : { value } \n ' )
48
+ self .data_file .write (self .tsv_row_str ('info' , time = time , name = name , value = value ))
63
49
64
50
def tsv_row_str (self , rtype , time = '' , name = '' , value = '' ):
65
51
time_str = f'{ time / 1000 :.3f} ' if type (time ) == int else time
@@ -78,6 +64,7 @@ def copy_task_file(self, data_dir, tasks_dir, dir_name='task_files'):
78
64
79
65
def close_files (self ):
80
66
if self .data_file :
67
+ self .write_info_line ('end_time' , self .end_datetime .isoformat (timespec = 'milliseconds' ), self .end_timestamp )
81
68
self .data_file .close ()
82
69
self .data_file = None
83
70
self .file_path = None
@@ -91,7 +78,7 @@ def process_data(self, new_data):
91
78
if self .data_file :
92
79
self .write_to_file (new_data )
93
80
if self .print_func :
94
- self .print_func (self .data_to_string (new_data , verbose = True ), end = '' )
81
+ self .print_func (self .data_to_string (new_data ). replace ( ' \t \t ' , ' \t ' ), end = '' )
95
82
if self .data_consumers :
96
83
for data_consumer in self .data_consumers :
97
84
data_consumer .process_data (new_data )
@@ -105,49 +92,27 @@ def write_to_file(self, new_data):
105
92
if nd .type == 'A' :
106
93
self .analog_writers [nd .ID ].save_analog_chunk (timestamp = nd .time , data_array = nd .data )
107
94
108
- def data_to_string (self , new_data , verbose = False ):
95
+ def data_to_string (self , new_data ):
109
96
'''Convert list of data tuples into a string. If verbose=True state and event names are used,
110
97
if verbose=False state and event IDs are used.'''
111
98
data_string = ''
112
99
for nd in new_data :
113
- if verbose or self .file_type == 'txt' :
114
- if nd .type == 'D' : # State entry or event.
115
- if verbose : # Print state or event name.
116
- data_string += f'D { nd .time } { self .ID2name_fw [nd .ID ]} \n '
117
- else : # Print state or event ID.
118
- data_string += f'D { nd .time } { nd .ID } \n '
119
- elif nd .type == 'P' : # User print output.
120
- data_string += f'P { nd .time } { nd .data } \n '
121
- elif nd .type == 'V' : # Variables
122
- if nd .ID == 'print' :
123
- data_string += f'P { nd .time } { nd .data } \n '
124
- elif nd .ID in ('set' ,'get' ):
125
- for v_name , v_value in json .loads (nd .data ).items ():
126
- data_string += f'V { nd .time } { v_name } { v_value } \n '
127
- elif nd .type == '!' : # Warning
128
- data_string += f'! { nd .data } \n '
129
- elif nd .type == '!!' : # Crash traceback.
130
- error_string = nd .data
131
- if not verbose : # In data files multi-line tracebacks have ! prepended to all lines aid parsing data file.
132
- error_string = '! ' + error_string .replace ('\n ' , '\n ! ' )
133
- data_string += '\n ' + error_string + '\n '
134
- elif self .file_type == 'tsv' :
135
- if nd .type == 'D' : # State entry or event.
136
- if nd .ID in self .sm_info ['states' ].values ():
137
- data_string += self .tsv_row_str ('state' , time = nd .time , name = self .ID2name_fw [nd .ID ])
138
- else :
139
- data_string += self .tsv_row_str ('event' , time = nd .time , name = self .ID2name_fw [nd .ID ])
140
- elif nd .type == 'P' : # User print output.
141
- data_string += self .tsv_row_str ('print' , time = nd .time , value = nd .data )
142
- elif nd .type == 'V' : # Variable.
143
- data_string += self .tsv_row_str ('variable' , time = nd .time , name = nd .ID , value = nd .data )
144
- elif nd .type == '!' : # Warning
145
- data_string += self .tsv_row_str ('warning' , value = nd .data )
146
- elif nd .type == '!!' : # Error
147
- data_string += self .tsv_row_str ('error' , value = nd .data .replace ('\n ' ,'|' ).replace ('\r ' ,'|' ))
148
- elif nd .type == 'S' : # Framework stop.
149
- self .write_info_line ('end_time' , datetime .utcnow ().isoformat (timespec = 'milliseconds' ), time = nd .time )
150
- self .end_time = nd .time # Used by run_experiment_tab for printing summary variables to file.
100
+ if nd .type == 'D' : # State entry or event.
101
+ if nd .ID in self .sm_info ['states' ].values ():
102
+ data_string += self .tsv_row_str ('state' , time = nd .time , name = self .ID2name_fw [nd .ID ])
103
+ else :
104
+ data_string += self .tsv_row_str ('event' , time = nd .time , name = self .ID2name_fw [nd .ID ])
105
+ elif nd .type == 'P' : # User print output.
106
+ data_string += self .tsv_row_str ('print' , time = nd .time , value = nd .data )
107
+ elif nd .type == 'V' : # Variable.
108
+ data_string += self .tsv_row_str ('variable' , time = nd .time , name = nd .ID , value = nd .data )
109
+ elif nd .type == '!' : # Warning
110
+ data_string += self .tsv_row_str ('warning' , value = nd .data )
111
+ elif nd .type == '!!' : # Error
112
+ data_string += self .tsv_row_str ('error' , value = nd .data .replace ('\n ' ,'|' ).replace ('\r ' ,'|' ))
113
+ elif nd .type == 'S' : # Framework stop.
114
+ self .end_datetime = datetime .utcnow ()
115
+ self .end_timestamp = nd .time
151
116
return data_string
152
117
153
118
@@ -163,46 +128,29 @@ def __init__(self, name, sampling_rate, data_type, session_filepath):
163
128
def open_data_files (self , session_filepath ):
164
129
ses_path_stem , file_ext = os .path .splitext (session_filepath )
165
130
self .path_stem = ses_path_stem + f'_{ self .name } '
166
- self .file_type = 'npy' if file_ext [- 3 :] == 'tsv' else 'pca'
167
- if self .file_type == 'pca' :
168
- file_path = self .path_stem + '.pca'
169
- self .pca_file = open (file_path , 'wb' )
170
- elif self .file_type == 'npy' :
171
- self .t_tempfile_path = self .path_stem + '.time.temp'
172
- self .d_tempfile_path = self .path_stem + f'.data.1{ self .data_type } .temp'
173
- self .time_tempfile = open (self .t_tempfile_path , 'wb' )
174
- self .data_tempfile = open (self .d_tempfile_path , 'wb' )
131
+ self .t_tempfile_path = self .path_stem + '.time.temp'
132
+ self .d_tempfile_path = self .path_stem + f'.data.1{ self .data_type } .temp'
133
+ self .time_tempfile = open (self .t_tempfile_path , 'wb' )
134
+ self .data_tempfile = open (self .d_tempfile_path , 'wb' )
175
135
176
136
def close_files (self ):
177
137
'''Close data files. Convert temp files to numpy.'''
178
- if self .file_type == 'pca' :
179
- self .pca_file .close ()
180
- elif self .file_type == 'npy' :
181
- self .time_tempfile .close ()
182
- self .data_tempfile .close ()
183
- with open (self .t_tempfile_path , 'rb' ) as f :
184
- times = np .frombuffer (f .read (), dtype = 'float64' )
185
- np .save (self .path_stem + '.time.npy' , times )
186
- with open (self .d_tempfile_path , 'rb' ) as f :
187
- data = np .frombuffer (f .read (), dtype = self .data_type )
188
- np .save (self .path_stem + '.data.npy' , data )
189
- os .remove (self .t_tempfile_path )
190
- os .remove (self .d_tempfile_path )
138
+ self .time_tempfile .close ()
139
+ self .data_tempfile .close ()
140
+ with open (self .t_tempfile_path , 'rb' ) as f :
141
+ times = np .frombuffer (f .read (), dtype = 'float64' )
142
+ np .save (self .path_stem + '.time.npy' , times )
143
+ with open (self .d_tempfile_path , 'rb' ) as f :
144
+ data = np .frombuffer (f .read (), dtype = self .data_type )
145
+ np .save (self .path_stem + '.data.npy' , data )
146
+ os .remove (self .t_tempfile_path )
147
+ os .remove (self .d_tempfile_path )
191
148
192
149
def save_analog_chunk (self , timestamp , data_array ):
193
- '''Save a chunk of analog data to .pca data file. File is created if not
194
- already open for that analog input.'''
195
- if self .file_type == 'pca' :
196
- ms_per_sample = 1000 / self .sampling_rate
197
- for i , x in enumerate (data_array ):
198
- t = int (timestamp + i * ms_per_sample )
199
- self .pca_file .write (t .to_bytes (4 ,'little' , signed = True ))
200
- self .pca_file .write (x .to_bytes (4 ,'little' , signed = True ))
201
- self .pca_file .flush ()
202
- elif self .file_type == 'npy' :
203
- times = (np .arange (len (data_array ), dtype = 'float64' )
204
- / self .sampling_rate ) + timestamp / 1000 # Seconds
205
- self .time_tempfile .write (times .tobytes ())
206
- self .data_tempfile .write (data_array .tobytes ())
207
- self .time_tempfile .flush ()
208
- self .data_tempfile .flush ()
150
+ '''Save a chunk of analog data to .pca data file.'''
151
+ times = (np .arange (len (data_array ), dtype = 'float64' )
152
+ / self .sampling_rate ) + timestamp / 1000 # Seconds
153
+ self .time_tempfile .write (times .tobytes ())
154
+ self .data_tempfile .write (data_array .tobytes ())
155
+ self .time_tempfile .flush ()
156
+ self .data_tempfile .flush ()
0 commit comments