From 47d915d71b244881c58b644e7b615d1dcaaf9dcc Mon Sep 17 00:00:00 2001 From: Kenzo Lobos-Tsunekawa Date: Wed, 9 Apr 2025 11:12:58 +0900 Subject: [PATCH] feat: added a pickle dump for the intrinsics Signed-off-by: Kenzo Lobos-Tsunekawa --- .../camera_calibrator.py | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/calibrators/intrinsic_camera_calibrator/intrinsic_camera_calibrator/intrinsic_camera_calibrator/camera_calibrator.py b/calibrators/intrinsic_camera_calibrator/intrinsic_camera_calibrator/intrinsic_camera_calibrator/camera_calibrator.py index 5d253826..f00174ad 100644 --- a/calibrators/intrinsic_camera_calibrator/intrinsic_camera_calibrator/intrinsic_camera_calibrator/camera_calibrator.py +++ b/calibrators/intrinsic_camera_calibrator/intrinsic_camera_calibrator/intrinsic_camera_calibrator/camera_calibrator.py @@ -947,8 +947,16 @@ def on_save_clicked(self): if not os.path.exists(evaluation_folder): os.mkdir(evaluation_folder) + database_dict = {} + database_dict["camera_name"] = self.data_source.get_camera_name() + database_dict["training_folder"] = training_folder + database_dict["evaluation_folder"] = evaluation_folder + database_dict["training_samples"] = [] + database_dict["evaluation_samples"] = [] + for index, image in enumerate(self.data_collector.get_training_images()): - cv2.imwrite(os.path.join(training_folder, f"{index:04d}.jpg"), image) # noqa E231 + img_name = f"{index:04d}.jpg" + cv2.imwrite(os.path.join(training_folder, img_name), image) # noqa E231 np.savetxt( os.path.join(training_folder, f"{index:04d}_training_img_points.txt"), self.data_collector.get_training_detection(index).get_flattened_image_points(), @@ -958,8 +966,19 @@ def on_save_clicked(self): self.data_collector.get_training_detection(index).get_flattened_object_points(), ) + sample = {} + sample["image_name"] = img_name + sample["img_points"] = self.data_collector.get_training_detection( + index + ).get_flattened_image_points() + sample["obj_points"] = self.data_collector.get_training_detection( + index + ).get_flattened_object_points() + database_dict["training_samples"].append(sample) + for index, image in enumerate(self.data_collector.get_evaluation_images()): - cv2.imwrite(os.path.join(evaluation_folder, f"{index:04d}.jpg"), image) # noqa E231 + img_name = f"{index:04d}.jpg" + cv2.imwrite(os.path.join(evaluation_folder, img_name), image) # noqa E231 np.savetxt( os.path.join(evaluation_folder, f"{index:04d}_eval_img_points.txt"), self.data_collector.get_evaluation_detection(index).get_flattened_image_points(), @@ -969,6 +988,25 @@ def on_save_clicked(self): self.data_collector.get_evaluation_detection(index).get_flattened_object_points(), ) + sample = {} + sample["image_name"] = img_name + sample["img_points"] = self.data_collector.get_evaluation_detection( + index + ).get_flattened_image_points() + sample["obj_points"] = self.data_collector.get_evaluation_detection( + index + ).get_flattened_object_points() + database_dict["evaluation_samples"].append(sample) + + try: + import pickle + + with open(os.path.join(output_folder, "database.pkl"), "wb") as f: + pickle.dump(database_dict, f) + + except Exception: + logging.warning("Pickle not available, skipping pickle output") + def process_detection_results(self, img: np.array, detection: BoardDetection, img_stamp: float): """Process the results from an object detection.""" # Signal that the detector is free