|
155 | 155 | "\n", |
156 | 156 | " inferencer = OpenVINOInferencer(\n", |
157 | 157 | " path=openvino_model_path, # Path to the OpenVINO IR model.\n", |
158 | | - " metadata=metadata_path, # Path to the metadata file.\n", |
159 | 158 | " device=\"CPU\", # We would like to run it on an Intel CPU.\n", |
160 | 159 | " )\n", |
161 | 160 | "\n", |
162 | 161 | " if dataset_path.exists() is False:\n", |
163 | | - " print(\"Make sure you have the dataset in a proper folder or it i already created\")\n", |
| 162 | + " print(\n", |
| 163 | + " \"Make sure you have the dataset in a proper folder or it i already created\"\n", |
| 164 | + " )\n", |
164 | 165 | "else:\n", |
165 | 166 | " dataset_path.mkdir(parents=True, exist_ok=True)" |
166 | 167 | ] |
|
270 | 271 | " # Dobot/general imports\n", |
271 | 272 | " # pylint: disable=wrong-import-order\n", |
272 | 273 | " import DobotDllType as dType\n", |
273 | | - " \n", |
| 274 | + "\n", |
274 | 275 | " CON_STR = {\n", |
275 | 276 | " dType.DobotConnect.DobotConnect_NoError: \"DobotConnect_NoError\",\n", |
276 | 277 | " dType.DobotConnect.DobotConnect_NotFound: \"DobotConnect_NotFound\",\n", |
277 | 278 | " dType.DobotConnect.DobotConnect_Occupied: \"DobotConnect_Occupied\",\n", |
278 | 279 | " }\n", |
279 | | - " \n", |
| 280 | + "\n", |
280 | 281 | " # Load Dll and get the CDLL object\n", |
281 | 282 | " api = dType.load()\n", |
282 | | - " \n", |
| 283 | + "\n", |
283 | 284 | " # Connect Dobot\n", |
284 | 285 | " state = dType.ConnectDobot(api, \"\", 115200)[0]\n", |
285 | 286 | " print(\"Connect status:\", CON_STR[state])\n", |
286 | | - " \n", |
| 287 | + "\n", |
287 | 288 | " use_popup = True # True\n", |
288 | | - " \n", |
| 289 | + "\n", |
289 | 290 | " if state == dType.DobotConnect.DobotConnect_NoError:\n", |
290 | 291 | " print(\n", |
291 | 292 | " \"[HOME] Restore to home position at first launch, please wait 30 seconds after turnning on the Dobot Magician.\",\n", |
|
295 | 296 | " \" the same side where the pick and place arm is.\",\n", |
296 | 297 | " )\n", |
297 | 298 | " print(\"[PLACING BLOCKS] Place the blocks by 3x3.\")\n", |
298 | | - " print(\"[CALIBRATION POINT] Looking from the back of Dobot, the top left block is the calibration point.\")\n", |
299 | | - " print(\"[CALIBRATION] Set the first variable to 0 to test the calibration point, then set 1 to start running.\")\n", |
| 299 | + " print(\n", |
| 300 | + " \"[CALIBRATION POINT] Looking from the back of Dobot, the top left block is the calibration point.\"\n", |
| 301 | + " )\n", |
| 302 | + " print(\n", |
| 303 | + " \"[CALIBRATION] Set the first variable to 0 to test the calibration point, then set 1 to start running.\"\n", |
| 304 | + " )\n", |
300 | 305 | " print(\n", |
301 | 306 | " \"[DIRECTION] Standing behind Dobot Magician facing its front direction, X is front and back direction, \"\n", |
302 | 307 | " \"Y is left and right direction. \",\n", |
303 | 308 | " )\n", |
304 | 309 | " print(\"[CONNECTION] Motor of the conveyor belt connects to port Stepper1.\")\n", |
305 | | - " \n", |
| 310 | + "\n", |
306 | 311 | " Calibration__0__Run__1 = 1\n", |
307 | 312 | " Calibration_X = 221.2288\n", |
308 | 313 | " Calibration_Y = -117.0036\n", |
|
325 | 330 | " MM_PER_CRICLE = 3.1415926535898 * 36.0\n", |
326 | 331 | " vel = float(0) * STEP_PER_CRICLE / MM_PER_CRICLE\n", |
327 | 332 | " dType.SetEMotorEx(api, 1, 0, int(vel), 1)\n", |
328 | | - " \n", |
| 333 | + "\n", |
329 | 334 | " if Calibration__0__Run__1:\n", |
330 | 335 | " for _ in range(9):\n", |
331 | 336 | " # initializing and starting multi-threaded webcam input stream\n", |
332 | 337 | " cam_stream = CameraStream(stream_id=0) # 0 id for main camera\n", |
333 | 338 | " cam_stream.start()\n", |
334 | | - " \n", |
335 | | - " dType.SetPTPCmdEx(api, 0, (Calibration_X - j), (Calibration_Y - k), (Calibration_Z - 10), 0, 1)\n", |
| 339 | + "\n", |
| 340 | + " dType.SetPTPCmdEx(\n", |
| 341 | + " api,\n", |
| 342 | + " 0,\n", |
| 343 | + " (Calibration_X - j),\n", |
| 344 | + " (Calibration_Y - k),\n", |
| 345 | + " (Calibration_Z - 10),\n", |
| 346 | + " 0,\n", |
| 347 | + " 1,\n", |
| 348 | + " )\n", |
336 | 349 | " dType.SetEndEffectorSuctionCupEx(api, 1, 1)\n", |
337 | | - " dType.SetPTPCmdEx(api, 0, (Place_X - 0), (Place_Y - 0), (Place_Z + 90), 0, 1)\n", |
338 | | - " \n", |
| 350 | + " dType.SetPTPCmdEx(\n", |
| 351 | + " api, 0, (Place_X - 0), (Place_Y - 0), (Place_Z + 90), 0, 1\n", |
| 352 | + " )\n", |
| 353 | + "\n", |
339 | 354 | " # adding a delay for simulating video processing time\n", |
340 | 355 | " delay = 0.3 # delay value in seconds\n", |
341 | 356 | " time.sleep(delay)\n", |
342 | 357 | " # Capture a frame from the video player - start thread\n", |
343 | 358 | " frame = cam_stream.read()\n", |
344 | | - " \n", |
| 359 | + "\n", |
345 | 360 | " if acquisition:\n", |
346 | 361 | " # create filename to next frame\n", |
347 | 362 | " filename = create_filename(path=(dataset_path / folder))\n", |
348 | 363 | " cv2.imwrite(filename, frame)\n", |
349 | 364 | " dType.SetPTPCmdEx(api, 0, Place_X, Place_Y, Place_Z, 0, 1)\n", |
350 | | - " \n", |
| 365 | + "\n", |
351 | 366 | " else:\n", |
352 | 367 | " # Get the inference results.\n", |
353 | 368 | " frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n", |
354 | 369 | " # INFERENCE WITH OPENVINO\n", |
355 | 370 | " predictions = inferencer.predict(image=frame)\n", |
356 | 371 | " print(predictions.pred_score)\n", |
357 | | - " if predictions.pred_score > 0.48: # modify the threshold depending of your needs\n", |
358 | | - " dType.SetPTPCmdEx(api, 0, Anomaly_X, Anomaly_Y, Anomaly_Z, 0, 1) # define point for abnormalities\n", |
| 372 | + " if (\n", |
| 373 | + " predictions.pred_score > 0.48\n", |
| 374 | + " ): # modify the threshold depending of your needs\n", |
| 375 | + " dType.SetPTPCmdEx(\n", |
| 376 | + " api, 0, Anomaly_X, Anomaly_Y, Anomaly_Z, 0, 1\n", |
| 377 | + " ) # define point for abnormalities\n", |
359 | 378 | " else:\n", |
360 | 379 | " dType.SetPTPCmdEx(api, 0, Place_X, Place_Y, Place_Z, 0, 1)\n", |
361 | | - " \n", |
| 380 | + "\n", |
362 | 381 | " dType.SetEndEffectorSuctionCupEx(api, 0, 1)\n", |
363 | 382 | " j = j + 25\n", |
364 | 383 | " if j == 75:\n", |
|
373 | 392 | " filename = None\n", |
374 | 393 | " score = 0\n", |
375 | 394 | " while True:\n", |
376 | | - " if (dType.gettime()[0]) - time_start >= 0.5: # Time over conveyor belt\n", |
| 395 | + " if (\n", |
| 396 | + " (dType.gettime()[0]) - time_start >= 0.5\n", |
| 397 | + " ): # Time over conveyor belt\n", |
377 | 398 | " STEP_PER_CRICLE = 360.0 / 1.8 * 10.0 * 16.0\n", |
378 | 399 | " MM_PER_CRICLE = 3.1415926535898 * 36.0\n", |
379 | 400 | " vel = float(0) * STEP_PER_CRICLE / MM_PER_CRICLE\n", |
|
397 | 418 | ], |
398 | 419 | "metadata": { |
399 | 420 | "kernelspec": { |
400 | | - "display_name": "Python 3 (ipykernel)", |
| 421 | + "display_name": ".venv", |
401 | 422 | "language": "python", |
402 | 423 | "name": "python3" |
403 | 424 | }, |
|
411 | 432 | "name": "python", |
412 | 433 | "nbconvert_exporter": "python", |
413 | 434 | "pygments_lexer": "ipython3", |
414 | | - "version": "3.12.7" |
415 | | - }, |
416 | | - "vscode": { |
417 | | - "interpreter": { |
418 | | - "hash": "ae223df28f60859a2f400fae8b3a1034248e0a469f5599fd9a89c32908ed7a84" |
419 | | - } |
| 435 | + "version": "3.12.8" |
420 | 436 | } |
421 | 437 | }, |
422 | 438 | "nbformat": 4, |
|
0 commit comments