Skip to content

Commit 7ad1392

Browse files
refactoring server
1 parent 82bcd62 commit 7ad1392

File tree

1 file changed

+93
-67
lines changed

1 file changed

+93
-67
lines changed

server_raspberrypi/main.py

+93-67
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,35 @@
11
import argparse
22
import logging
3-
from time import time, perf_counter, sleep
4-
from dataclasses import dataclass
3+
from time import time, ctime, perf_counter, sleep
54
import socket # udp networking
65
import struct # binary packing
76
from picamera2 import Picamera2, Preview, MappedArray # Raspberry Pi camera
87
from libcamera import Transform # taking selfies, so used to mirror image
98
import cv2 # OpenCV, for blob detection
109

11-
print("\n\nSERVER: Starting PhilNav\n")
1210

13-
preview_text = "Adjust the camera controls listed with --help such that you get a mostly black picture with bright white reflective IR sticker in the center. The controls default to what worked for me via trial and error."
11+
text = {}
12+
text.intro = "\n\nSERVER: Starting PhilNav\n\nWelcome to PhilNav, I'm Phil!\n\nIf running PhilNav for the first time, use --help and --preview to set up your camera.\n"
13+
text.preview = "\nAdjust the camera controls (listed with --help) until you get a mostly black picture with bright white reflective IR sticker in the center. The controls default to what worked for Phil via trial and error.\n"
14+
print(text.intro)
15+
1416

1517
# parse command line arguments
1618
parser = argparse.ArgumentParser()
1719
parser.add_argument(
1820
"--ip",
1921
required=True,
2022
type=str,
21-
help="remote ip address of PC that will receive mouse movements",
23+
help="remote ip address of PC that will receive mouse movements (find your PC's home network ip, not internet ip; usually 192.x.x.x, 172.x.x.x, or 10.x.x.x)",
2224
)
2325
parser.add_argument(
2426
"-p", "--port", type=int, default=4245, help="send to remote port, default 4245"
2527
)
2628
parser.add_argument(
27-
"-v", "--verbose", action="store_true", help="provide verbose logging"
29+
"-v", "--verbose", action="store_true", help="enable verbose logging"
2830
)
2931
parser.add_argument(
30-
"--preview", action="store_true", help="Use when logged into Raspberry Pi Gui; will show camera preview. " + preview_text
32+
"--preview", action="store_true", help="Use when logged into Raspberry Pi Gui; will show camera preview."
3133
)
3234
parser.add_argument(
3335
"--fps", type=float, default=75.0, help="camera FrameRate, default 75"
@@ -57,91 +59,110 @@
5759
"--no-hflip", action="store_true", help="images are selfies and flipped horizontally by default"
5860
)
5961
parser.add_argument(
60-
"--blob-color", type=int, default=255, help="OpenCV blob detection color, default 255 (white; I believe it's grayscale 0-255)"
62+
"--blob-size", type=int, default=15, help="OpenCV blob minimum size, default 15"
63+
)
64+
parser.add_argument(
65+
"--blob-color", type=int, default=255, help="OpenCV blob detection color, default 255 (white = 255, or black = 0)"
66+
)
67+
parser.add_argument(
68+
"--timeout", type=int, default=(60*60*8), help="exit after n seconds, default 60*60*8 = 8 hours, one workday"
6169
)
6270
args = parser.parse_args()
6371

6472
if args.verbose:
6573
logging.getLogger().setLevel(logging.DEBUG)
66-
logging.info(" Logging verbosely\n")
74+
logging.info("\n>>>>> Logging verbosely <<<<<\n")
6775

6876
if args.preview:
69-
print(preview_text + "\n")
70-
else:
71-
print("If running PhilNav for the first time, use --help and --preview to set up your camera.\n")
77+
print(text.preview)
7278

73-
# The camera can be configured and controlled with different settings in each.
74-
# Not entirely sure the difference.
75-
config_main = {
76-
"size": (args.width, args.height)
77-
}
78-
picam2 = Picamera2()
79-
# Not entirely sure how configurations work, preview/main etc.
8079
hflip_num = 1
8180
if args.no_hflip:
8281
hflip_num = 0
83-
config = picam2.create_preview_configuration(main=config_main, transform=Transform(hflip=hflip_num))
82+
83+
84+
picam2 = Picamera2()
85+
# The camera can be "configured" and "controlled" with different settings in each.
86+
config_main = {"size": (args.width, args.height)}
87+
# Not entirely sure how configurations work, preview/main etc.
88+
config = picam2.create_preview_configuration(
89+
main=config_main, transform=Transform(hflip=hflip_num))
8490
picam2.configure(config)
8591

86-
controls_main = {
92+
controls = {
8793
"AnalogueGain": args.gain,
8894
"Brightness": args.brightness,
8995
"Contrast": args.contrast,
9096
"ExposureValue": args.exposure,
9197
"Saturation": args.saturation,
9298
"FrameRate": args.fps
9399
}
94-
picam2.set_controls(controls_main)
100+
picam2.set_controls(controls)
95101

96102
if args.preview:
97103
picam2.start_preview(Preview.QT)
98104
else:
99105
picam2.start_preview(Preview.NULL)
100106

101-
# Not entirely sure the difference between start_preview and start.
107+
# Not sure if we need both start_preview and start.
102108
picam2.start()
103109
sleep(1) # let camera warm up
104110

111+
112+
# show intro again
113+
print(text.intro)
114+
if args.preview:
115+
print(text.preview)
116+
117+
105118
# OpenCV blob detection config
106119
params = cv2.SimpleBlobDetector_Params()
107-
params.filterByColor = True
108120
params.filterByArea = True
121+
params.minArea = args.blob_size
122+
params.filterByColor = True
109123
params.blobColor = args.blob_color
110-
params.minArea = 15
111124
params.minThreshold = 200
112-
params.minRepeatability = 1
125+
params.thresholdStep = 50
126+
params.minRepeatability = 2
127+
params.minDistBetweenBlobs = 100
113128
params.filterByCircularity = False
114129
params.filterByConvexity = False
115130
params.filterByInertia = False
116-
params.minDistBetweenBlobs = 100
117131
detector = cv2.SimpleBlobDetector_create(params)
118132

133+
119134
# Set up UDP socket to receiving computer
120135
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # datagrams over UDP
136+
sock_addr = (args.ip, args.port)
121137

138+
# (pn = PhilNav) Global for storing data from loop-to-loop, also stats for debugging
139+
pn = {}
140+
pn.started_at = time()
141+
pn.frame_started_at = time()
142+
pn.frame_perf = perf_counter()
143+
pn.frame_between = perf_counter()
144+
pn.frame_num = 0
145+
pn.x = 0.0
146+
pn.y = 0.0
147+
pn.debug_num = 0
148+
pn.keypoint = None # for debugging inspection
122149

123-
# Globals for storing data from loop-to-loop, also stats for debugging
124-
@dataclass
125-
class PhilNav:
126-
started_at = time()
127-
frame_started_at = time()
128-
frame_start = perf_counter()
129-
frame_num = 0
130-
x = 0.0
131-
y = 0.0
132-
keypoint = None # for debugging inspection
133150

134151
# This is where the Magic happens! The camera should pick up nothing but a white
135152
# dot from your reflective IR sticker. I use opencv blob detection to track its
136153
# (x, y) coordinates and send the changes to the receiving computer, which moves
137154
# the mouse.
138155
def blobby(request):
156+
global pn, detector, args, sock_addr
157+
pn.frame_perf = perf_counter()
158+
pn.frame_started_at = time()
159+
pn.frame_num += 1
160+
ms_frame_between = (perf_counter() - pn.frame_between) * 1000
161+
x_diff = 0.0
162+
y_diff = 0.0
163+
139164
# MappedArray gives direct access to the captured camera frame
140165
with MappedArray(request, "main") as m:
141-
PhilNav.frame_num += 1
142-
x_diff = 0.0
143-
y_diff = 0.0
144-
145166
# Track the IR sticker
146167
keypoints = detector.detect(m.array)
147168
if args.preview:
@@ -156,17 +177,17 @@ def blobby(request):
156177
cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS,
157178
)
158179

159-
# Ideally should be exactly one keypoint
180+
# Ideally should be exactly one keypoint, or use biggest
160181
if len(keypoints) > 0:
182+
kp = pn.keypoint = max(keypoints, key="size")
161183
# Compare the (x, y) coordinates from last frame
162-
kp = PhilNav.keypoint = keypoints[0]
163184
x_new, y_new = kp.pt
164-
x_diff = x_new - PhilNav.x
165-
y_diff = y_new - PhilNav.y
166-
PhilNav.x = x_new
167-
PhilNav.y = y_new
185+
x_diff = x_new - pn.x
186+
y_diff = y_new - pn.y
187+
pn.x = x_new
188+
pn.y = y_new
168189

169-
# If the mouse has moved smoothly, but not "jumped"...
190+
# If the IR sticker has moved smoothly, but not "jumped"...
170191
# Jumping can occur if multiple blobs are detected, such as other
171192
# IR reflective surfaces in the camera's view, like glasses lenses.
172193
if (
@@ -175,43 +196,48 @@ def blobby(request):
175196
and y_diff**2 < 50
176197
):
177198
# Send the (x_diff, y_diff) to the receiving computer.
178-
# For performance stats, I'm also sending the frame time on
179-
# Raspberry Pi; both absolute and relative. Absolute time doesn't
180-
# work well because the Raspberry Pi clock and PC clock will not
181-
# be synced to within 1 ms of each other.
199+
# For performance stats, I'm also sending the time spent on
200+
# Raspberry Pi.
182201
#
183202
# 48 bytes of 6 doubles in binary C format. Why? Because it's
184203
# OpenTrack's protocol.
185204
# struct.pack('dddddd', x, y, z, pitch, yaw, roll)
186205
# PhilNav uses x, y as x_diff, y_diff and moves the mouse
187206
# relative to its current position.
188207
# https://github.com/opentrack/opentrack/issues/747
189-
time_spent = perf_counter() - PhilNav.frame_start
190-
MESSAGE = struct.pack(
191-
"dddddd", x_diff, y_diff, 0, 0, time_spent, PhilNav.frame_started_at)
192-
sock.sendto(MESSAGE, (args.ip, args.port))
208+
ms_time_spent = (perf_counter() - pn.frame_perf)*1000
209+
msg = struct.pack("dddddd",
210+
x_diff, y_diff,
211+
0, 0,
212+
pn.frame_started_at, ms_time_spent)
213+
sock.sendto(msg, sock_addr)
193214

194215
# Log once per second
195-
if PhilNav.frame_num % args.fps == 0:
196-
fps = PhilNav.frame_num / (time() - PhilNav.started_at)
197-
ms = (perf_counter() - PhilNav.frame_start) * 1000
216+
if args.verbose and (pn.frame_num % int(args.fps) == 0):
217+
pn.debug_num += 1
218+
c_time = ctime()
219+
fps_measured = pn.frame_num / (time() - pn.started_at)
220+
ms_measured = (perf_counter() - pn.frame_perf) * 1000
221+
# display legend every 5 seconds
222+
if pn.debug_num % 5 == 1:
223+
logging.info(
224+
f"{c_time} - {"Frame":>8}, ({"x_diff":>8}, {"y_diff":>8}), {"FPS":>8}, {"cv ms":>8}, {"btw ms":>8}")
198225
logging.info(
199-
f"Frame: {PhilNav.frame_num}, Diff: ({int(x_diff)}, {int(y_diff)}), FPS: {int(fps)}, loc ms: {int(ms)}"
200-
)
226+
f"{c_time} - {pn.frame_num:>8}, ({x_diff:> 8.2f}, {y_diff:> 8.2f}), {int(fps_measured):>8}, {int(ms_measured):>8}, {int(ms_frame_between):>8}")
201227

202-
# I'm setting these at the end rather than the beginning, because I want
203-
# to make sure to include the time capturing the image from the camera.
204-
PhilNav.frame_started_at = time()
205-
PhilNav.frame_start = perf_counter()
228+
# Time between capturing frames from the camera.
229+
pn.frame_between = perf_counter()
206230

207231

208-
# Run the loop forever until Ctrl-C
232+
# Run the loop until timeout or Ctrl-C
209233
try:
210234
picam2.pre_callback = blobby
211-
sleep(60 * 60 * 8) # run for 8 hours, or 1 workday
235+
sleep(args.timeout) # turn off at some point
212236
except KeyboardInterrupt:
213237
pass
214238

239+
240+
# cleanup
215241
picam2.stop_preview()
216242
picam2.stop()
217243
picam2.close()

0 commit comments

Comments
 (0)