|
57 | 57 | "outputs": [],
|
58 | 58 | "source": [
|
59 | 59 | "def euclidean_distance(x,y):\n",
|
60 |
| - " eud = math.sqrt((x[0]-y[0])**2 + (x[1]-y[1])**2)\n", |
61 |
| - " return eud" |
| 60 | + " eud = math.sqrt((x[0]-y[0])**2 + (x[1]-y[1])**2)\n", |
| 61 | + " return eud" |
62 | 62 | ]
|
63 | 63 | },
|
64 | 64 | {
|
|
122 | 122 | },
|
123 | 123 | {
|
124 | 124 | "cell_type": "code",
|
125 |
| - "execution_count": 7, |
| 125 | + "execution_count": 5, |
126 | 126 | "id": "1441a0e2-c044-4550-9f1f-1fe900bb3b0c",
|
127 | 127 | "metadata": {},
|
128 | 128 | "outputs": [],
|
|
138 | 138 | },
|
139 | 139 | {
|
140 | 140 | "cell_type": "code",
|
141 |
| - "execution_count": 8, |
| 141 | + "execution_count": 6, |
142 | 142 | "id": "b228ba22-f80d-423c-b95b-5d12e1fb316c",
|
143 | 143 | "metadata": {},
|
144 | 144 | "outputs": [
|
|
148 | 148 | "82.81924421854173"
|
149 | 149 | ]
|
150 | 150 | },
|
151 |
| - "execution_count": 8, |
| 151 | + "execution_count": 6, |
152 | 152 | "metadata": {},
|
153 | 153 | "output_type": "execute_result"
|
154 | 154 | }
|
|
168 | 168 | },
|
169 | 169 | {
|
170 | 170 | "cell_type": "code",
|
171 |
| - "execution_count": 9, |
| 171 | + "execution_count": 7, |
172 | 172 | "id": "8883e4db-acc0-4d5c-85db-e864757f4b5d",
|
173 | 173 | "metadata": {},
|
174 | 174 | "outputs": [
|
175 | 175 | {
|
176 |
| - "ename": "AttributeError", |
177 |
| - "evalue": "'NoneType' object has no attribute 'shape'", |
178 |
| - "output_type": "error", |
179 |
| - "traceback": [ |
180 |
| - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", |
181 |
| - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", |
182 |
| - "\u001b[0;32m/tmp/ipykernel_271863/4086475475.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mimread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'samples/obt.png'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m#im = cv2.resize(im, (600,480))\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mheight\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mwidth\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mim\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mim_cx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mim_cy\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwidth\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mheight\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mim_cx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mim_cy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
183 |
| - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'shape'" |
| 176 | + "name": "stdout", |
| 177 | + "output_type": "stream", |
| 178 | + "text": [ |
| 179 | + "319 238\n" |
184 | 180 | ]
|
| 181 | + }, |
| 182 | + { |
| 183 | + "data": { |
| 184 | + "text/plain": [ |
| 185 | + "113" |
| 186 | + ] |
| 187 | + }, |
| 188 | + "execution_count": 7, |
| 189 | + "metadata": {}, |
| 190 | + "output_type": "execute_result" |
185 | 191 | }
|
186 | 192 | ],
|
187 | 193 | "source": [
|
|
200 | 206 | "res = cv2.GaussianBlur(res, (1,1), 0)\n",
|
201 | 207 | "res_gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)\n",
|
202 | 208 | "_, threshold = cv2.threshold(res_gray, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU)\n",
|
203 |
| - "_, contours, _ = cv2.findContours(threshold, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n", |
| 209 | + "contours, _ = cv2.findContours(threshold, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n", |
204 | 210 | "min_area = 30**2\n",
|
205 | 211 | "for contour in contours:\n",
|
206 | 212 | " area = cv2.contourArea(contour)\n",
|
|
257 | 263 | },
|
258 | 264 | {
|
259 | 265 | "cell_type": "code",
|
260 |
| - "execution_count": 11, |
| 266 | + "execution_count": 8, |
261 | 267 | "id": "ccb6f227-8abb-4975-99dc-1da2ba06e729",
|
262 | 268 | "metadata": {},
|
263 |
| - "outputs": [ |
264 |
| - { |
265 |
| - "name": "stderr", |
266 |
| - "output_type": "stream", |
267 |
| - "text": [ |
268 |
| - "[ WARN:0] global /tmp/pip-req-build-h45n7_hz/opencv/modules/videoio/src/cap_v4l.cpp (890) open VIDEOIO(V4L2:/dev/video0): can't open camera by index\n" |
269 |
| - ] |
270 |
| - }, |
271 |
| - { |
272 |
| - "ename": "AttributeError", |
273 |
| - "evalue": "'NoneType' object has no attribute 'shape'", |
274 |
| - "output_type": "error", |
275 |
| - "traceback": [ |
276 |
| - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", |
277 |
| - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", |
278 |
| - "\u001b[0;32m/tmp/ipykernel_271863/154838433.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;32mwhile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcap\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0misOpened\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mframe\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcap\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mheight\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mwidth\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mframe\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0mim_cx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mim_cy\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwidth\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mheight\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", |
279 |
| - "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'shape'" |
280 |
| - ] |
281 |
| - } |
282 |
| - ], |
| 269 | + "outputs": [], |
283 | 270 | "source": [
|
284 | 271 | "cap = cv2.VideoCapture(0)\n",
|
285 |
| - "cap.set(cv2.CAP_PROP_FRAME_WIDTH, 2000)\n", |
286 |
| - "cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 2000)\n", |
| 272 | + "# cap.set(cv2.CAP_PROP_FRAME_WIDTH, 2000)\n", |
| 273 | + "# cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 2000)\n", |
287 | 274 | "min_area = 50**2\n",
|
288 | 275 | "while(cap.isOpened):\n",
|
289 | 276 | " _,frame = cap.read()\n",
|
|
0 commit comments