10
10
logger = logging .getLogger (__name__ )
11
11
12
12
class RateCounterBase (abc .ABC ):
13
-
13
+ """
14
+ Subclasses must implement a clock_rate attribute or property.
15
+ """
14
16
def __init__ (self ):
15
- self .clock_rate = 1 # default clock rate
16
17
self .running = False
17
-
18
+ self . clock_rate = 0
18
19
def stop (self ):
19
20
"""
20
21
subclasses may override this for custom behavior
@@ -34,27 +35,92 @@ def close(self):
34
35
pass
35
36
36
37
@abc .abstractmethod
37
- def sample_counts (self , n_samples = 1 ) -> np . ndarray :
38
+ def _read_samples (self ) :
38
39
"""
39
- Should return a numpy array of size n_samples, with each row being
40
- an array (or tuple) of two values, The first value is equal to the number of counts,
41
- and the second value is the number of clock samples that were used to measure the counts.
40
+ subclasses must implement this method
41
+
42
+ Should return total_counts, num_clock_samples
43
+ """
44
+ pass
45
+
46
+ def sample_counts (self , n_batches = 1 , sum_counts = True ):
47
+ """
48
+ Performs n_batches of batch reads from _read_samples method.
49
+
50
+ This is useful when hardware (such as NIDAQ) is pre-configured to acquire a fixed number of samples
51
+ and the caller wishes to read more data than the number of samples acquired.
52
+ For example, if the NiDAQ is configured to acquire 1000 clock samples, but the caller
53
+ wishes to read 10000 samples, then this function may be called with n_batches=10.
54
+
55
+ For each batch read (of size `num_data_samples_per_batch`), the
56
+ total counts are summed. Because it's possible (though unlikely)
57
+ for the hardware to return fewer than `num_data_samples_per_batch` measurements,
58
+ the actual number of data samples per batch are also recorded.
59
+
60
+ If sum_counts is False, a numpy array of shape (n_batches, 2) is returned, where
61
+ the first element is the sum of the counts, and the second element is
62
+ the actual number of clock samples per batch. This may be useful for the caller if
63
+ they wish to perform their own averaging or other statistical analysis that may be time dependent.
64
+
65
+ For example, if `num_data_samples_per_batch` is 5 and n_batches is 3,
66
+ (typical values are 100 and 10, 100 and 1, 1000 and 1, etc)
42
67
43
- Example, if n_samples = 3
68
+ reading counts from the NiDAQ may return
69
+
70
+ #sample 1
71
+ raw_counts_1 = [3,5,4,6,4]
72
+ sum_counts_1 = 22
73
+ size_counts_1 = 5
74
+ (22, 5)
75
+ #sample 2
76
+ raw_counts_2 = [5,5,7,3,4]
77
+ sum_counts_2 = 24
78
+ size_counts_2 = 5
79
+ (24, 5)
80
+ #sample 3
81
+ raw_counts_3 = [5,3,5,7]
82
+ sum_counts_3 = 20
83
+ size_counts_2 = 4
84
+ (20, 4)
85
+
86
+ In this example, the numpy array is of shape (3, 2) and will be
44
87
data = [
45
- [22, 5], # 22 counts were observed in 5 clock samples
88
+ [22, 5],
46
89
[24, 5],
47
- [20, 4] # this data indicates there was an error with data acquisition - 4 clock samples were observed.
90
+ [20, 4]
48
91
]
92
+
93
+ If sum_counts is True, then will the total number of counts and total number of
94
+ clock samples read will be returned.
95
+
96
+ np.sum(data, axis=0, keepdims=True).
97
+
98
+ In the example above, this would be [[66, 14]].
99
+
100
+ With these data, and knowing the clock_rate, one can easily compute
101
+ the count rate. See sample_count_rate.
49
102
"""
50
- pass
103
+
104
+ data = np .zeros ((n_batches , 2 ))
105
+ for i in range (n_batches ):
106
+ data_sample , samples_read = self ._read_samples ()
107
+ if samples_read > 0 :
108
+ data [i ][0 ] = np .sum (data_sample [:samples_read ])
109
+ data [i ][1 ] = samples_read
110
+ logger .info (f'batch data (sum counts, num clock cycles per batch): { data [i ]} ' )
111
+
112
+ if sum_counts :
113
+ return np .sum (data , axis = 0 , keepdims = True )
114
+ else :
115
+ return data
51
116
52
117
def sample_count_rate (self , data_counts : np .ndarray ):
53
118
"""
54
119
Converts the output of sample_counts to a count rate. Expects data_counts to be a 2d numpy array
55
- of [[counts, clock_samples], [counts, clock_samples], ...] as is returned by sample_counts.
120
+ of [[counts, clock_samples], [counts, clock_samples], ...] or a 2d array with one row: [[counts, clock_samples]]
121
+ as is returned by sample_counts.
56
122
57
- Under normal conditions, will return a single value
123
+ Returns the count rate in counts/second = clock_rate * total counts/ total clock_samples)
58
124
59
125
If the sum of all clock_samples is 0, will return np.nan.
60
126
"""
@@ -64,7 +130,6 @@ def sample_count_rate(self, data_counts: np.ndarray):
64
130
else :
65
131
return np .nan
66
132
67
-
68
133
def yield_count_rate (self ):
69
134
while self .running :
70
135
count_data = self .sample_counts ()
@@ -78,32 +143,41 @@ class RandomRateCounter(RateCounterBase):
78
143
79
144
This is similar to a PL source moving in and out of focus.
80
145
'''
81
- def __init__ (self ):
146
+ def __init__ (self , simulate_single_light_source = False , num_data_samples_per_batch = 10 ):
82
147
super ().__init__ ()
83
148
self .default_offset = 100
84
- self .signal_noise_amp = 0.2
85
- self .possible_offset_values = np .arange (0 , 1000 , 50 )
149
+ self .signal_noise_amp = 0.2
86
150
87
151
self .current_offset = self .default_offset
88
152
self .current_direction = 1
89
- self .running = False
153
+ self .clock_rate = 0.9302010 # a totally random number :P
154
+ self .simulate_single_light_source = simulate_single_light_source
155
+ self .possible_offset_values = np .arange (5000 , 100000 , 1000 ) # these create the "bright" positions
156
+ self .num_data_samples_per_batch = num_data_samples_per_batch
90
157
91
- def sample_counts (self , n_samples = 1 ):
158
+ def _read_samples (self ):
92
159
"""
93
160
Returns a random number of counts
94
161
"""
95
- if np .random .random (1 )[0 ] < 0.05 :
96
- if np .random .random (1 )[0 ] < 0.1 :
97
- self .current_direction = - 1 * self .current_direction
98
- self .current_offset += self .current_direction * np .random .choice (self .possible_offset_values )
162
+ if self .simulate_single_light_source :
163
+ if np .random .random (1 )[0 ] < 0.005 :
164
+ self .current_offset = np .random .choice (self .possible_offset_values )
165
+ else :
166
+ self .current_offset = self .default_offset
167
+
168
+ else :
169
+ if np .random .random (1 )[0 ] < 0.05 :
170
+ if np .random .random (1 )[0 ] < 0.1 :
171
+ self .current_direction = - 1 * self .current_direction
172
+ self .current_offset += self .current_direction * np .random .choice (self .possible_offset_values )
99
173
100
- if self .current_offset < self .default_offset :
101
- self .current_offset = self .default_offset
102
- self .current_direction = 1
174
+ if self .current_offset < self .default_offset :
175
+ self .current_offset = self .default_offset
176
+ self .current_direction = 1
103
177
104
- counts = self .signal_noise_amp * self .current_offset * np .random .random (n_samples ) + self .current_offset
105
- count_size = np . ones ( n_samples )
106
- return np . column_stack (( counts , count_size ))
178
+ counts = self .signal_noise_amp * self .current_offset * np .random .random (self . num_data_samples_per_batch ) + self .current_offset
179
+
180
+ return counts , self . num_data_samples_per_batch
107
181
108
182
109
183
class NiDaqDigitalInputRateCounter (RateCounterBase ):
@@ -126,7 +200,6 @@ def __init__(self, daq_name = 'Dev1',
126
200
self .read_write_timeout = read_write_timeout
127
201
self .num_data_samples_per_batch = num_data_samples_per_batch
128
202
self .trigger_terminal = trigger_terminal
129
- self .running = False
130
203
131
204
self .read_lock = False
132
205
@@ -188,7 +261,6 @@ def _read_samples(self):
188
261
self .read_lock = False
189
262
return data_buffer , samples_read
190
263
191
-
192
264
def start (self ):
193
265
if self .running :
194
266
self .stop ()
@@ -208,76 +280,19 @@ def _burn_and_log_exception(self, f):
208
280
def stop (self ):
209
281
if self .running :
210
282
while self .read_lock :
211
- time .sleep (0.1 ) #wait for current read to complete
283
+ time .sleep (0.1 ) # wait for current read to complete
212
284
213
285
if self .nidaq_config .clock_task :
214
286
self ._burn_and_log_exception (self .nidaq_config .clock_task .stop )
215
- self ._burn_and_log_exception (self .nidaq_config .clock_task .close ) #close the task to free resource on NIDAQ
216
- #self._burn_and_log_exception(self.nidaq_config.counter_task.stop) #will need to stop task if we move to continuous buffered acquisition
287
+ self ._burn_and_log_exception (self .nidaq_config .clock_task .close ) # close the task to free resource on NIDAQ
288
+ # self._burn_and_log_exception(self.nidaq_config.counter_task.stop) # will need to stop task if we move to continuous buffered acquisition
217
289
self ._burn_and_log_exception (self .nidaq_config .counter_task .close )
218
290
219
291
self .running = False
220
292
221
293
def close (self ):
222
294
self .stop ()
223
295
224
- def sample_counts (self , n_samples = 1 ):
225
- '''
226
- Performs n_samples of batch reads from the NiDAQ.
227
-
228
- For each batch read (of size `num_data_samples_per_batch`), the
229
- total counts are summed. Additionally, because it's possible (though unlikely)
230
- for the NiDAQ to return fewer than `num_data_samples_per_batch` measurements,
231
- the actual number of data samples per batch are also recorded.
232
-
233
- Finally, a numpy array of shape (n_samples, 2) is returned, where
234
- the first element is the sum of the counts, and the second element is
235
- the actual number of data samples per batch.
236
-
237
- For example, if `num_data_samples_per_batch` is 5 and n_samples is 3,
238
- (typical values are 100 and 10, 100 and 1, 1000 and 1, etc)
239
-
240
- reading counts from the NiDAQ may return
241
-
242
- #sample 1
243
- raw_counts_1 = [3,5,4,6,4]
244
- sum_counts_1 = 22
245
- size_counts_1 = 5
246
- (22, 5)
247
- #sample 2
248
- raw_counts_2 = [5,5,7,3,4]
249
- sum_counts_2 = 24
250
- size_counts_2 = 5
251
- (24, 5)
252
- #sample 3
253
- raw_counts_3 = [5,3,5,7]
254
- sum_counts_3 = 20
255
- size_counts_2 = 4
256
- (20, 4)
257
-
258
- In this example, the numpy array is of shape (3, 2) and will be
259
- data = [
260
- [22, 5],
261
- [24, 5],
262
- [20, 4]
263
- ]
264
-
265
- With these data, and knowing the clock_rate, one can easily compute
266
- the count rate
267
296
268
- #removes rows where num samples per batch were zero (which would be a bug in the code)
269
- data = data[np.where(data[:,1] > 0)]
270
297
271
- #count rate is the mean counts per clock cycle multiplied by the clock rate.
272
- count_rate = clock_rate * data[:,0]/data[:,1]
273
- '''
274
-
275
- data = np .zeros ((n_samples , 2 ))
276
- for i in range (n_samples ):
277
- data_sample , samples_read = self ._read_samples ()
278
- if samples_read > 0 :
279
- data [i ][0 ] = np .sum (data_sample [:samples_read ])
280
- data [i ][1 ] = samples_read
281
- logger .info (f'batch data (sum counts, num clock cycles per batch): { data [i ]} ' )
282
- return data
283
298
0 commit comments