1
1
import argparse
2
2
import logging
3
- from time import time , perf_counter , sleep
4
- from dataclasses import dataclass
3
+ from time import time , ctime , perf_counter , sleep
5
4
import socket # udp networking
6
5
import struct # binary packing
7
6
from picamera2 import Picamera2 , Preview , MappedArray # Raspberry Pi camera
8
7
from libcamera import Transform # taking selfies, so used to mirror image
9
8
import cv2 # OpenCV, for blob detection
10
9
11
- print ("\n \n SERVER: Starting PhilNav\n " )
12
10
13
- preview_text = "Adjust the camera controls listed with --help such that you get a mostly black picture with bright white reflective IR sticker in the center. The controls default to what worked for me via trial and error."
11
+ text = {}
12
+ text .intro = "\n \n SERVER: Starting PhilNav\n \n Welcome to PhilNav, I'm Phil!\n \n If running PhilNav for the first time, use --help and --preview to set up your camera.\n "
13
+ text .preview = "\n Adjust the camera controls (listed with --help) until you get a mostly black picture with bright white reflective IR sticker in the center. The controls default to what worked for Phil via trial and error.\n "
14
+ print (text .intro )
15
+
14
16
15
17
# parse command line arguments
16
18
parser = argparse .ArgumentParser ()
17
19
parser .add_argument (
18
20
"--ip" ,
19
21
required = True ,
20
22
type = str ,
21
- help = "remote ip address of PC that will receive mouse movements" ,
23
+ help = "remote ip address of PC that will receive mouse movements (find your PC's home network ip, not internet ip; usually 192.x.x.x, 172.x.x.x, or 10.x.x.x) " ,
22
24
)
23
25
parser .add_argument (
24
26
"-p" , "--port" , type = int , default = 4245 , help = "send to remote port, default 4245"
25
27
)
26
28
parser .add_argument (
27
- "-v" , "--verbose" , action = "store_true" , help = "provide verbose logging"
29
+ "-v" , "--verbose" , action = "store_true" , help = "enable verbose logging"
28
30
)
29
31
parser .add_argument (
30
- "--preview" , action = "store_true" , help = "Use when logged into Raspberry Pi Gui; will show camera preview. " + preview_text
32
+ "--preview" , action = "store_true" , help = "Use when logged into Raspberry Pi Gui; will show camera preview."
31
33
)
32
34
parser .add_argument (
33
35
"--fps" , type = float , default = 75.0 , help = "camera FrameRate, default 75"
57
59
"--no-hflip" , action = "store_true" , help = "images are selfies and flipped horizontally by default"
58
60
)
59
61
parser .add_argument (
60
- "--blob-color" , type = int , default = 255 , help = "OpenCV blob detection color, default 255 (white; I believe it's grayscale 0-255)"
62
+ "--blob-size" , type = int , default = 15 , help = "OpenCV blob minimum size, default 15"
63
+ )
64
+ parser .add_argument (
65
+ "--blob-color" , type = int , default = 255 , help = "OpenCV blob detection color, default 255 (white = 255, or black = 0)"
66
+ )
67
+ parser .add_argument (
68
+ "--timeout" , type = int , default = (60 * 60 * 8 ), help = "exit after n seconds, default 60*60*8 = 8 hours, one workday"
61
69
)
62
70
args = parser .parse_args ()
63
71
64
72
if args .verbose :
65
73
logging .getLogger ().setLevel (logging .DEBUG )
66
- logging .info (" Logging verbosely\n " )
74
+ logging .info ("\n >>>>> Logging verbosely <<<<< \n " )
67
75
68
76
if args .preview :
69
- print (preview_text + "\n " )
70
- else :
71
- print ("If running PhilNav for the first time, use --help and --preview to set up your camera.\n " )
77
+ print (text .preview )
72
78
73
- # The camera can be configured and controlled with different settings in each.
74
- # Not entirely sure the difference.
75
- config_main = {
76
- "size" : (args .width , args .height )
77
- }
78
- picam2 = Picamera2 ()
79
- # Not entirely sure how configurations work, preview/main etc.
80
79
hflip_num = 1
81
80
if args .no_hflip :
82
81
hflip_num = 0
83
- config = picam2 .create_preview_configuration (main = config_main , transform = Transform (hflip = hflip_num ))
82
+
83
+
84
+ picam2 = Picamera2 ()
85
+ # The camera can be "configured" and "controlled" with different settings in each.
86
+ config_main = {"size" : (args .width , args .height )}
87
+ # Not entirely sure how configurations work, preview/main etc.
88
+ config = picam2 .create_preview_configuration (
89
+ main = config_main , transform = Transform (hflip = hflip_num ))
84
90
picam2 .configure (config )
85
91
86
- controls_main = {
92
+ controls = {
87
93
"AnalogueGain" : args .gain ,
88
94
"Brightness" : args .brightness ,
89
95
"Contrast" : args .contrast ,
90
96
"ExposureValue" : args .exposure ,
91
97
"Saturation" : args .saturation ,
92
98
"FrameRate" : args .fps
93
99
}
94
- picam2 .set_controls (controls_main )
100
+ picam2 .set_controls (controls )
95
101
96
102
if args .preview :
97
103
picam2 .start_preview (Preview .QT )
98
104
else :
99
105
picam2 .start_preview (Preview .NULL )
100
106
101
- # Not entirely sure the difference between start_preview and start.
107
+ # Not sure if we need both start_preview and start.
102
108
picam2 .start ()
103
109
sleep (1 ) # let camera warm up
104
110
111
+
112
+ # show intro again
113
+ print (text .intro )
114
+ if args .preview :
115
+ print (text .preview )
116
+
117
+
105
118
# OpenCV blob detection config
106
119
params = cv2 .SimpleBlobDetector_Params ()
107
- params .filterByColor = True
108
120
params .filterByArea = True
121
+ params .minArea = args .blob_size
122
+ params .filterByColor = True
109
123
params .blobColor = args .blob_color
110
- params .minArea = 15
111
124
params .minThreshold = 200
112
- params .minRepeatability = 1
125
+ params .thresholdStep = 50
126
+ params .minRepeatability = 2
127
+ params .minDistBetweenBlobs = 100
113
128
params .filterByCircularity = False
114
129
params .filterByConvexity = False
115
130
params .filterByInertia = False
116
- params .minDistBetweenBlobs = 100
117
131
detector = cv2 .SimpleBlobDetector_create (params )
118
132
133
+
119
134
# Set up UDP socket to receiving computer
120
135
sock = socket .socket (socket .AF_INET , socket .SOCK_DGRAM ) # datagrams over UDP
136
+ sock_addr = (args .ip , args .port )
121
137
138
+ # (pn = PhilNav) Global for storing data from loop-to-loop, also stats for debugging
139
+ pn = {}
140
+ pn .started_at = time ()
141
+ pn .frame_started_at = time ()
142
+ pn .frame_perf = perf_counter ()
143
+ pn .frame_between = perf_counter ()
144
+ pn .frame_num = 0
145
+ pn .x = 0.0
146
+ pn .y = 0.0
147
+ pn .debug_num = 0
148
+ pn .keypoint = None # for debugging inspection
122
149
123
- # Globals for storing data from loop-to-loop, also stats for debugging
124
- @dataclass
125
- class PhilNav :
126
- started_at = time ()
127
- frame_started_at = time ()
128
- frame_start = perf_counter ()
129
- frame_num = 0
130
- x = 0.0
131
- y = 0.0
132
- keypoint = None # for debugging inspection
133
150
134
151
# This is where the Magic happens! The camera should pick up nothing but a white
135
152
# dot from your reflective IR sticker. I use opencv blob detection to track its
136
153
# (x, y) coordinates and send the changes to the receiving computer, which moves
137
154
# the mouse.
138
155
def blobby (request ):
156
+ global pn , detector , args , sock_addr
157
+ pn .frame_perf = perf_counter ()
158
+ pn .frame_started_at = time ()
159
+ pn .frame_num += 1
160
+ ms_frame_between = (perf_counter () - pn .frame_between ) * 1000
161
+ x_diff = 0.0
162
+ y_diff = 0.0
163
+
139
164
# MappedArray gives direct access to the captured camera frame
140
165
with MappedArray (request , "main" ) as m :
141
- PhilNav .frame_num += 1
142
- x_diff = 0.0
143
- y_diff = 0.0
144
-
145
166
# Track the IR sticker
146
167
keypoints = detector .detect (m .array )
147
168
if args .preview :
@@ -156,17 +177,17 @@ def blobby(request):
156
177
cv2 .DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ,
157
178
)
158
179
159
- # Ideally should be exactly one keypoint
180
+ # Ideally should be exactly one keypoint, or use biggest
160
181
if len (keypoints ) > 0 :
182
+ kp = pn .keypoint = max (keypoints , key = "size" )
161
183
# Compare the (x, y) coordinates from last frame
162
- kp = PhilNav .keypoint = keypoints [0 ]
163
184
x_new , y_new = kp .pt
164
- x_diff = x_new - PhilNav .x
165
- y_diff = y_new - PhilNav .y
166
- PhilNav .x = x_new
167
- PhilNav .y = y_new
185
+ x_diff = x_new - pn .x
186
+ y_diff = y_new - pn .y
187
+ pn .x = x_new
188
+ pn .y = y_new
168
189
169
- # If the mouse has moved smoothly, but not "jumped"...
190
+ # If the IR sticker has moved smoothly, but not "jumped"...
170
191
# Jumping can occur if multiple blobs are detected, such as other
171
192
# IR reflective surfaces in the camera's view, like glasses lenses.
172
193
if (
@@ -175,43 +196,48 @@ def blobby(request):
175
196
and y_diff ** 2 < 50
176
197
):
177
198
# Send the (x_diff, y_diff) to the receiving computer.
178
- # For performance stats, I'm also sending the frame time on
179
- # Raspberry Pi; both absolute and relative. Absolute time doesn't
180
- # work well because the Raspberry Pi clock and PC clock will not
181
- # be synced to within 1 ms of each other.
199
+ # For performance stats, I'm also sending the time spent on
200
+ # Raspberry Pi.
182
201
#
183
202
# 48 bytes of 6 doubles in binary C format. Why? Because it's
184
203
# OpenTrack's protocol.
185
204
# struct.pack('dddddd', x, y, z, pitch, yaw, roll)
186
205
# PhilNav uses x, y as x_diff, y_diff and moves the mouse
187
206
# relative to its current position.
188
207
# https://github.com/opentrack/opentrack/issues/747
189
- time_spent = perf_counter () - PhilNav .frame_start
190
- MESSAGE = struct .pack (
191
- "dddddd" , x_diff , y_diff , 0 , 0 , time_spent , PhilNav .frame_started_at )
192
- sock .sendto (MESSAGE , (args .ip , args .port ))
208
+ ms_time_spent = (perf_counter () - pn .frame_perf )* 1000
209
+ msg = struct .pack ("dddddd" ,
210
+ x_diff , y_diff ,
211
+ 0 , 0 ,
212
+ pn .frame_started_at , ms_time_spent )
213
+ sock .sendto (msg , sock_addr )
193
214
194
215
# Log once per second
195
- if PhilNav .frame_num % args .fps == 0 :
196
- fps = PhilNav .frame_num / (time () - PhilNav .started_at )
197
- ms = (perf_counter () - PhilNav .frame_start ) * 1000
216
+ if args .verbose and (pn .frame_num % int (args .fps ) == 0 ):
217
+ pn .debug_num += 1
218
+ c_time = ctime ()
219
+ fps_measured = pn .frame_num / (time () - pn .started_at )
220
+ ms_measured = (perf_counter () - pn .frame_perf ) * 1000
221
+ # display legend every 5 seconds
222
+ if pn .debug_num % 5 == 1 :
223
+ logging .info (
224
+ f"{ c_time } - { "Frame" :>8} , ({ "x_diff" :>8} , { "y_diff" :>8} ), { "FPS" :>8} , { "cv ms" :>8} , { "btw ms" :>8} " )
198
225
logging .info (
199
- f"Frame: { PhilNav .frame_num } , Diff: ({ int (x_diff )} , { int (y_diff )} ), FPS: { int (fps )} , loc ms: { int (ms )} "
200
- )
226
+ f"{ c_time } - { pn .frame_num :>8} , ({ x_diff :> 8.2f} , { y_diff :> 8.2f} ), { int (fps_measured ):>8} , { int (ms_measured ):>8} , { int (ms_frame_between ):>8} " )
201
227
202
- # I'm setting these at the end rather than the beginning, because I want
203
- # to make sure to include the time capturing the image from the camera.
204
- PhilNav .frame_started_at = time ()
205
- PhilNav .frame_start = perf_counter ()
228
+ # Time between capturing frames from the camera.
229
+ pn .frame_between = perf_counter ()
206
230
207
231
208
- # Run the loop forever until Ctrl-C
232
+ # Run the loop until timeout or Ctrl-C
209
233
try :
210
234
picam2 .pre_callback = blobby
211
- sleep (60 * 60 * 8 ) # run for 8 hours, or 1 workday
235
+ sleep (args . timeout ) # turn off at some point
212
236
except KeyboardInterrupt :
213
237
pass
214
238
239
+
240
+ # cleanup
215
241
picam2 .stop_preview ()
216
242
picam2 .stop ()
217
243
picam2 .close ()
0 commit comments