From dc699e987ee04ad0d579d20aeb53e982887e1f16 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Fri, 27 Feb 2026 14:50:03 -0500 Subject: [PATCH 01/12] reorganizing calibration to incorporate Lu's work from bdai_calibration --- .github/workflows/ci.yml | 2 +- spot_wrapper/calibration/README.md | 216 --- spot_wrapper/calibration/__init__.py | 0 ...te_multistereo_cameras_with_charuco_cli.py | 332 ---- .../calibrate_spot_hand_camera_cli.py | 290 ---- spot_wrapper/calibration/calibration_clis.py | 429 +++++ .../calibration/calibration_helpers.py | 229 +++ spot_wrapper/calibration/calibration_util.py | 1442 +++++------------ .../calibration/charuco_board_detection.py | 949 +++++++++++ .../calibration/in_depth_calibration_doc.md | 324 ---- .../registration_qualitative_example.jpg | Bin 47508 -> 0 bytes .../calibration/spot_eye_in_hand_setup.jpg | Bin 39733 -> 0 bytes .../spot_hand_camera_calibration.py | 124 ++ .../spot_in_hand_camera_calibration.py | 42 +- 14 files changed, 2141 insertions(+), 2238 deletions(-) delete mode 100644 spot_wrapper/calibration/README.md delete mode 100644 spot_wrapper/calibration/__init__.py delete mode 100644 spot_wrapper/calibration/calibrate_multistereo_cameras_with_charuco_cli.py delete mode 100644 spot_wrapper/calibration/calibrate_spot_hand_camera_cli.py create mode 100644 spot_wrapper/calibration/calibration_clis.py create mode 100644 spot_wrapper/calibration/calibration_helpers.py create mode 100644 spot_wrapper/calibration/charuco_board_detection.py delete mode 100644 spot_wrapper/calibration/in_depth_calibration_doc.md delete mode 100644 spot_wrapper/calibration/registration_qualitative_example.jpg delete mode 100644 spot_wrapper/calibration/spot_eye_in_hand_setup.jpg create mode 100644 spot_wrapper/calibration/spot_hand_camera_calibration.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab21b565..d2655233 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -69,7 +69,7 @@ jobs: uses: coverallsapp/github-action@v2 with: github-token: ${{ secrets.GITHUB_TOKEN }} - fail-on-error: true + fail-on-error: false flag-name: unittests debug: true files: $(find ../../ -name "coverage.xml" -type f) diff --git a/spot_wrapper/calibration/README.md b/spot_wrapper/calibration/README.md deleted file mode 100644 index 418a28e1..00000000 --- a/spot_wrapper/calibration/README.md +++ /dev/null @@ -1,216 +0,0 @@ -# Automatic Robotic Stereo Camera Calibration with Charuco Target - -# Table of Contents - -1. [***Overview***](#overview) -2. [***Quick Start***](#quick-start) -3. [***Check if you have a Legacy Charuco Board***](#check-if-you-have-a-legacy-charuco-board) -4. [***All Options***](#all-options) - -# Overview - -![side by side comparison](registration_qualitative_example.jpg) - -This utility streamlines automatic -camera calibration to **solve for the intrinsic and extrinsic parameters for two or more -cameras mounted in a fixed pose relative to each other on a robot** -based off of moving the robot to view a Charuco target from different poses. You can also calculate the calibration from an existing dataset of photos or make a new dataset from a pre-defined list of poses (if available). Additionally, the new calibrations can be directly written to the robot. - -The CLI tool's saving capability allows to store multiple unique calibration runs in one configuration file -with calibration metadata, to document related runs with different setups or parameters. - -For more info, see ```calibration_util.py```, where the functions -```get_multiple_perspective_camera_calibration_dataset``` and ```multistereo_calibration_charuco``` -do most of the heavy lifting. - -> [!NOTE] -> For a more in-depth explanation of this calibration tool, see ![this document](in_depth_calibration_doc.md). - -# Quick Start - -## Full Calibration - -To run through the full calibration script to calibrate your Spot's hand camera (depth to rgb, with rgb at default resolution), first set up your robot and charuco board like below - -![spot eye in hand cal](spot_eye_in_hand_setup.jpg) - -You can then run the following command: - -> [!WARNING] -> The robot will move. Be sure the robot is sitting (not docked) in front of the charuco board and that no one else is holding the lease before beginning (you will have to release control from the tablet). - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw -``` - -To save the collected images, add the ```--save_data``` and ```--data_path``` flags: - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data -``` - -To also save the calculated calibrations, add the ```--result_path``` flag: - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data --result_path -``` - -To overwrite the robot's internal calibration with your newly calculated one, also add the ```--save_to_robot``` flag: - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data --result_path --save_to_robot -``` - -> [!NOTE] -> If you are using a legacy charuco board, add ```--legacy_charuco_pattern True```. If you're not sure, go to [Check if you have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). - -Other options are listed in [All Options](#all-options) below. - -## Calibration with Existing Dataset - -If you already have a folder of images to calibrate on, then add the ```--from_data``` flag and remove the ```--save_data``` flag. This will *not* move the robot: - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --from_data -``` - -You can still optionally add the ```--result_path ``` and/or ```--save_to_robot``` flags to save the calculated calibration to a yaml file and/or overwrite the robot's internally saved calibrations, respectively. - -## Overwrite Robot Calibration with Existing Yaml - -If you have previously run the calibration script and saved the results, you can send those directly to the robot to be saved on its internal hardware without needing to run the entire calibration script by including the ```--from_yaml``` and ```--data_path``` flags and ensuring the `````` goes to the correct calibration yaml file: - -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---from_yaml --data_path --save_to_robot -``` - -## All Options - -Below are all the various flags available to you to further fine-tune your calibration. You can also see them all in your terminal by running: - -```bash -python3 calibrate_spot_hand_camera_cli.py -h -``` - -```bash -options: - -h, --help show this help message and exit - --stereo_pairs STEREO_PAIRS, -sp STEREO_PAIRS - Capture images returns a list of images. Stereo pairs correspond towhat - index in the list of images' corresponding camerato calibrate to what - camera. Say capture_images returns [rgb_img, depth_img]and you want to - register depth to rgb, then the desired stereo pairis "[(1,0)]". If you - want to register more than one pair, you can do it like "[(1, 0), - (2,0)]."Make sure to put the stereo pairs in quotes so bash doesn't - complain - --legacy_charuco_pattern LEGACY_CHARUCO_PATTERN, -l LEGACY_CHARUCO_PATTERN - Whether to use the legacy charuco pattern. For Spot Default board, this - should be True.If you aren't sure if your board is legacy, try supplying - the --check_board_patternarg to verify that the cv2 board matches your - board. - --check_board_pattern - Whether to visually verify the board pattern (to check legacy and internal - corner ordering. - --allow_default_internal_corner_ordering - Whether to allow default internal corner ordering. For new versions of - OpenCV, it is recommended to NOT set this parameter to make sure that - corners are ordered in a known pattern. Try supplying the - --check_board_pattern flag to check whether you should enable this flag - When checking, Z-Axis should point out of board. - --photo_utilization_ratio PHOTO_UTILIZATION_RATIO, -pur PHOTO_UTILIZATION_RATIO - Photos that are collected/loaded vs. used for calibration are in a 1 - tophoto utilization ratio. For getting a rough guess on cheaper - hardwarewithout losing collection fidelity. For example, set to 2 to only - use half the photos. - --num_checkers_width NUM_CHECKERS_WIDTH, -ncw NUM_CHECKERS_WIDTH - How many checkers wide is your board - --num_checkers_height NUM_CHECKERS_HEIGHT, -nch NUM_CHECKERS_HEIGHT - How many checkers tall is your board - --dict_size {DICT_4X4_50,DICT_4X4_100,DICT_4X4_250,DICT_4X4_1000,DICT_5X5_50,DICT_5X5_100,DICT_5X5_250,DICT_5X5_1000,DICT_6X6_50,DICT_6X6_100,DICT_6X6_250,DICT_6X6_1000,DICT_7X7_50,DICT_7X7_100,DICT_7X7_250,DICT_7X7_1000,DICT_ARUCO_ORIGINAL} - Choose the ArUco dictionary size. - --checker_dim CHECKER_DIM, -cd CHECKER_DIM - Checker size in meters - --marker_dim MARKER_DIM, -md MARKER_DIM - Aruco Marker size in meters - --data_path DATA_PATH, -dp DATA_PATH - The path in which to save images - --result_path RESULT_PATH, -rp RESULT_PATH - Where to store calibration result as yaml file - --tag TAG, -t TAG What heading to put for the calibration in the config file.If this is your - first time running, the tag should be set to default for the sake of - interoperability with other functionality.If this is a shared config file - with other people, perhaps puta unique identifier, or default, if you'd - like to overridefor everyone. - --unsafe_tag_save If set, skips safety checks for tagging calibration. - --dist_from_board_viewpoint_range DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...], -dfbvr DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...] - What distances to conduct calibrations at relative to the board. (along the - normal vector) Three value array arg defines the [Start, Stop), step. for - the viewpoint sweep. These are used to sample viewpoints for automatic - collection. - --degrees, -d Use degrees for rotation ranges (default) - --radians, -r Use radians for rotation ranges - --x_axis_rot_viewpoint_range X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...], -xarvr X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around x-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --y_axis_rot_viewpoint_range Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...], -yarvr Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around y-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --z_axis_rot_viewpoint_range Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...], -zarvr Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around z-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --max_num_images MAX_NUM_IMAGES - The maximum number of images - --settle_time SETTLE_TIME, -st SETTLE_TIME - How long to wait after movement to take a picture; don't want motion blur - --save_data, -sd whether to save the images to file - --from_data, -fd Whether to only calibrate from recorded dataset on file. - --save_to_robot, -send - Whether to save the calibration to the robot. - --from_yaml, -yaml Whether the data is from a yaml file. Use this and the '--from_data' and ' - --send' args to send a previously saved calibration yaml to the robot - --ip IP, -i IP, -ip IP - The IP address of the Robot to calibrate - --user USERNAME, -u USERNAME, --username USERNAME - Robot Username - --pass PASSWORD, -pw PASSWORD, --password PASSWORD - Robot Password - --spot_rgb_photo_width SPOT_RGB_PHOTO_WIDTH, -dpw SPOT_RGB_PHOTO_WIDTH - What resolution use for Spot's RGB Hand Camera (width). Currently, only 640 - and 1920 are supported - --spot_rgb_photo_height SPOT_RGB_PHOTO_HEIGHT, -dph SPOT_RGB_PHOTO_HEIGHT - What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 - and 1080 are supported -``` - -# Check if you have a Legacy Charuco Board - -You only need to do this if using an opencv version after ```4.7```( -check with```python3 -c "import cv2; print(cv2.__version__)"```) - -Through using the CLI tool (```python3 calibrate_multistereo_cameras_with_charuco_cli.py -h```), you can check if you have a legacy board through visually comparing the generated drawn virtual board to your physical charuco board target. Some legacy boards have an aruco tag in the top -left corner, whether as some non-legacy boards have a checker in the top left corner. -Also, check to see that the aruco tags match between virtual and physical boards. -It is important that the virtual board matches the physical board, otherwise this calibration -will not work. - -``` -python3 calibrate_multistereo_cameras_with_charuco_cli.py --check_board_pattern --legacy_charuco_pattern t -``` - -There should be an axis at the center of the board, where the Y axis (green) -points upwards, the X axis (red) points to the right, and the figure should be labelled -as Z-axis out of board. If it isn't then try without legacy (```--legacy_charuco_pattern f```). - -If you are using the default Spot Calibation board, and there is an aruco marker -in the top left corner, then it legacy (so supply true argument to legacy.) diff --git a/spot_wrapper/calibration/__init__.py b/spot_wrapper/calibration/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/spot_wrapper/calibration/calibrate_multistereo_cameras_with_charuco_cli.py b/spot_wrapper/calibration/calibrate_multistereo_cameras_with_charuco_cli.py deleted file mode 100644 index c070fe4d..00000000 --- a/spot_wrapper/calibration/calibrate_multistereo_cameras_with_charuco_cli.py +++ /dev/null @@ -1,332 +0,0 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. - -import argparse -import ast -import logging -from typing import List, Tuple, Union - -import cv2 -import numpy as np - -from spot_wrapper.calibration.calibration_util import ( - charuco_pose_sanity_check, - create_charuco_board, - create_ideal_charuco_image, - detect_charuco_corners, - load_dataset_from_path, - multistereo_calibration_charuco, - save_calibration_parameters, -) - -logging.basicConfig( - level=logging.INFO, -) - -logger = logging.getLogger(__name__) -# - - -def setup_calibration_param( - parser: argparse.ArgumentParser, -) -> Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: - args = parser.parse_args() - if hasattr(cv2.aruco, args.dict_size): - aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, args.dict_size)) - else: - raise ValueError(f"Invalid ArUco dictionary: {args.dict_size}") - charuco = create_charuco_board( - num_checkers_width=args.num_checkers_width, - num_checkers_height=args.num_checkers_height, - checker_dim=args.checker_dim, - marker_dim=args.marker_dim, - aruco_dict=aruco_dict, - legacy=args.legacy_charuco_pattern, - ) - - if not args.allow_default_internal_corner_ordering: - logger.warning("Enforcing bottom up charuco ordering. Pre-computing correlation now...") - detect_charuco_corners( - create_ideal_charuco_image(charuco_board=charuco), - charuco_board=charuco, - aruco_dict=aruco_dict, - enforce_ascending_ids_from_bottom_left_corner=True, - ) - if args.check_board_pattern: - logger.warning("Checking board, you'll need to close a window in a sec (press any key)") - charuco_pose_sanity_check( - create_ideal_charuco_image(charuco_board=charuco, colorful=True), - charuco_board=charuco, - aruco_dict=aruco_dict, - ) - return args, aruco_dict, charuco - - -def calibration_helper( - images: Union[List[np.ndarray], np.ndarray], - args: argparse.Namespace, - charuco: cv2.aruco_CharucoBoard, - aruco_dict: cv2.aruco_Dictionary, - poses: np.ndarray, - result_path: str = None, -) -> dict: - logger.warning( - f"Calibrating from {len(images)} images.. for every " - f"{args.photo_utilization_ratio} recorded photos 1 is used to calibrate" - ) - if not args.allow_default_internal_corner_ordering: - logger.warning("Turning off corner swap (needed for localization) for calibration solution...") - logger.warning("Corner swap needed for initial localization, but breaks calibration.") - logger.warning("See https://github.com/opencv/opencv/issues/26126") - detect_charuco_corners( - create_ideal_charuco_image(charuco_board=charuco), - charuco_board=charuco, - aruco_dict=aruco_dict, - enforce_ascending_ids_from_bottom_left_corner=False, - ) - calibration = multistereo_calibration_charuco( - images[:: args.photo_utilization_ratio], - desired_stereo_pairs=args.stereo_pairs, - charuco_board=charuco, - aruco_dict=aruco_dict, - poses=poses, - ) - logger.info(f"Finished script, obtained {calibration}") - logger.info("Saving calibration param...") - - if result_path is None: - result_path = input("Please provide a path to save the calibration results (or type 'No' to skip): ") - - args.result_path = result_path - - # Save the calibration parameters if a valid result path is provided - calibration_dict = save_calibration_parameters( - data=calibration, - output_path=args.result_path, - num_images=len(images[:: args.photo_utilization_ratio]), - tag=args.tag, - parser_args=args, - unsafe=args.unsafe_tag_save, - ) - return calibration_dict - - -def main(): - parser = calibrator_cli() - args, aruco_dict, charuco = setup_calibration_param(parser) - logger.info(f"Loading images from {args.data_path}") - - if args.data_path is not None: - images, poses = load_dataset_from_path(args.data_path) - calibration_helper( - images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path - ) - else: - logger.warning("Could not load any images to calibrate from, supply --data_path") - - -def calibrator_cli() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser("Calibrate Eye-In Hand Camera System :)") - # Calculations - parser.add_argument( - "--stereo_pairs", - "-sp", - type=parse_tuple_list, - required=False, - default=[(1, 0)], - help=( - "Capture images returns a list of images. Stereo pairs correspond to" - "what index in the list of images' corresponding camera" - "to calibrate to what camera. Say capture_images returns [rgb_img, depth_img]" - "and you want to register depth to rgb, then the desired stereo pair" - 'is "[(1,0)]". If you want to register more than one pair, you can do it like "[(1, 0), (2,0)]."' - "Make sure to put the stereo pairs in quotes so bash doesn't complain" - ), - ) - - parser.add_argument( - "--legacy_charuco_pattern", - "-l", - type=str2bool, - required=True, - help=( - "Whether to use the legacy charuco pattern. For Spot Default board, this should be True." - "If you aren't sure if your board is legacy, try supplying the --check_board_pattern" - "arg to verify that the cv2 board matches your board." - ), - ) - parser.add_argument( - "--check_board_pattern", - action="store_true", - default=False, - help="Whether to visually verify the board pattern (to check legacy and internal corner ordering.", - ) - - parser.add_argument( - "--allow_default_internal_corner_ordering", - action="store_true", - default=False, - help=( - "Whether to allow default internal corner ordering. " - "For new versions of OpenCV, it is recommended " - "to NOT set this parameter to make sure that corners " - "are ordered in a known pattern. " - "Try supplying the --check_board_pattern flag " - "to check whether you should enable this flag " - "When checking, Z-Axis should point out of board. " - ), - ) - - parser.add_argument( - "--photo_utilization_ratio", - "-pur", - dest="photo_utilization_ratio", - type=int, - default=1, - help=( - "Photos that are collected/loaded vs. used for calibration are in a 1 to" - "photo utilization ratio. For getting a rough guess on cheaper hardware" - "without losing collection fidelity. For example, set to 2 to only use half the photos." - ), - ) - - # calibration param - parser.add_argument( - "--num_checkers_width", - "-ncw", - dest="num_checkers_width", - type=int, - help="How many checkers wide is your board", - default=9, - ) - - parser.add_argument( - "--num_checkers_height", - "-nch", - dest="num_checkers_height", - type=int, - help="How many checkers tall is your board", - default=4, - ) - - parser.add_argument( - "--dict_size", - type=str, - choices=[ - "DICT_4X4_50", - "DICT_4X4_100", - "DICT_4X4_250", - "DICT_4X4_1000", - "DICT_5X5_50", - "DICT_5X5_100", - "DICT_5X5_250", - "DICT_5X5_1000", - "DICT_6X6_50", - "DICT_6X6_100", - "DICT_6X6_250", - "DICT_6X6_1000", - "DICT_7X7_50", - "DICT_7X7_100", - "DICT_7X7_250", - "DICT_7X7_1000", - "DICT_ARUCO_ORIGINAL", - ], - default="DICT_4X4_50", - help="Choose the ArUco dictionary size.", - ) - - parser.add_argument( - "--checker_dim", - "-cd", - dest="checker_dim", - type=float, - default=0.115, - help="Checker size in meters", - ) - - parser.add_argument( - "--marker_dim", - "-md", - dest="marker_dim", - type=float, - default=0.09, - help="Aruco Marker size in meters", - ) - - # path and saving - parser.add_argument( - "--data_path", - "-dp", - dest="data_path", - type=str, - help="The path in which to save images", - default=None, - ) - - parser.add_argument( - "--result_path", - "-rp", - dest="result_path", - default="", - type=str, - required=False, - help="Where to store calibration result as yaml file", - ) - - parser.add_argument( - "--tag", - "-t", - dest="tag", - type=str, - required=False, - default="default", - help=( - "What heading to put for the calibration in the config file." - "If this is your first time running, the tag should be set to default " - "for the sake of interoperability with other functionality." - "If this is a shared config file with other people, perhaps put" - "a unique identifier, or default, if you'd like to override" - "for everyone." - ), - ) - - parser.add_argument( - "--unsafe_tag_save", - action="store_true", - help="If set, skips safety checks for tagging calibration.", - ) - - return parser - - -def str2bool(value: str) -> bool: - """ - Convert a string to a boolean. - Accepts 'true', 'false', 'yes', 'no', '1', '0', etc. - """ - if isinstance(value, bool): - return value - if value.lower() in ("yes", "true", "t", "y", "1"): - return True - elif value.lower() in ("no", "false", "f", "n", "0"): - return False - else: - raise argparse.ArgumentTypeError("Boolean value expected.") - - -# collection -def parse_tuple_list(string: str) -> List[Tuple[int, int]]: - try: - # Use ast.literal_eval to safely evaluate the string as a Python expression - value = ast.literal_eval(string) - if isinstance(value, list) and all(isinstance(t, tuple) and len(t) == 2 for t in value): - return value - else: - raise argparse.ArgumentTypeError(f"Expected a list of tuples, but got: {string}") - except (ValueError, SyntaxError): - raise argparse.ArgumentTypeError(f"Invalid tuple list format: {string}") - - -if __name__ == "__main__": - main() diff --git a/spot_wrapper/calibration/calibrate_spot_hand_camera_cli.py b/spot_wrapper/calibration/calibrate_spot_hand_camera_cli.py deleted file mode 100644 index 74727063..00000000 --- a/spot_wrapper/calibration/calibrate_spot_hand_camera_cli.py +++ /dev/null @@ -1,290 +0,0 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. - -import argparse -import logging -from typing import Tuple - -import cv2 -import numpy as np -import yaml - -from spot_wrapper.calibration.automatic_camera_calibration_robot import AutomaticCameraCalibrationRobot -from spot_wrapper.calibration.calibrate_multistereo_cameras_with_charuco_cli import ( - calibration_helper, - calibrator_cli, - setup_calibration_param, -) -from spot_wrapper.calibration.calibration_util import ( - get_multiple_perspective_camera_calibration_dataset, - load_dataset_from_path, -) -from spot_wrapper.calibration.spot_in_hand_camera_calibration import ( - SpotInHandCalibration, -) - -logging.basicConfig( - level=logging.INFO, -) - -logger = logging.getLogger(__name__) - - -def create_robot( - args: argparse.ArgumentParser, charuco: cv2.aruco_CharucoBoard, aruco_dict: cv2.aruco_Dictionary -) -> Tuple[AutomaticCameraCalibrationRobot, argparse.Namespace]: - # Replace with your AutomaticCameraCalibrationRobot - in_hand_bot = SpotInHandCalibration(args.ip, args.username, args.password) - in_hand_bot._set_localization_param( - charuco_board=charuco, - aruco_dict=aruco_dict, - resolution=( - args.spot_rgb_photo_width, - args.spot_rgb_photo_height, - ), - ) - try: - args.robot_name = in_hand_bot.robot.get_cached_robot_id().nickname - except Exception: - logger.warning("Could not determine cached robot nickname, saving name as unknown") - args.robot_name = "unknown" - return in_hand_bot, args - - -def create_robot_parser() -> argparse.ArgumentParser: - parser = calibrate_robot_cli() - return spot_cli(parser=parser) # Replace with robot specific parsing - - -def spot_main() -> None: - parser = create_robot_parser() - args, aruco_dict, charuco = setup_calibration_param(parser) - in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) - - # Collect new data and calibrate - if not args.from_data: - logger.warning("This script moves the robot around. !!! USE AT YOUR OWN RISK !!!") - logger.warning("HOLD Ctrl + C NOW TO CANCEL") - logger.warning("The calibration board should be about a meter away with nothing within a meter of the robot.") - logger.warning("The robot should NOT be docked, and nobody should have robot control") - # sleep(5) - - images, poses = get_multiple_perspective_camera_calibration_dataset( - auto_cam_cal_robot=in_hand_bot, - max_num_images=args.max_num_images, - distances_z=np.arange(*args.dist_from_board_viewpoint_range), - distances_x=np.arange(*args.dist_along_board_width), - x_axis_rots=np.arange(*args.x_axis_rot_viewpoint_range), - y_axis_rots=np.arange(*args.y_axis_rot_viewpoint_range), - z_axis_rots=np.arange(*args.z_axis_rot_viewpoint_range), - use_degrees=args.use_degrees, - settle_time=args.settle_time, - data_path=args.data_path, - save_data=args.save_data, - ) - calibration = calibration_helper( - images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path - ) - if args.save_to_robot: - logger.info("Saving calibration to robot...") - in_hand_bot.write_calibration_to_robot(calibration) - in_hand_bot.shutdown() - # Send previously computed and saved calibration data to the robot - elif args.from_yaml: - try: - with open(args.data_path, "r") as file: - calibration = yaml.safe_load(file) - logger.info(f"Loaded calibration data:\n{calibration}") - if args.save_to_robot: - logger.info("Saving calibration to robot...") - in_hand_bot.write_calibration_to_robot(calibration) - except Exception as e: - raise ValueError(f"Failed to load calibration from {args.data_path}: {e}\nIs it a calibration yaml file?") - # Load previously collected data and compute calibration - else: - logger.info(f"Loading images from {args.data_path}") - images, poses = load_dataset_from_path(args.data_path) - calibration = calibration_helper( - images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path - ) - if args.save_to_robot: - logger.info("Saving calibration to robot...") - in_hand_bot.write_calibration_to_robot(calibration) - - logger.info("Calibration complete!") - - -def calibrate_robot_cli(parser: argparse.ArgumentParser = None) -> argparse.ArgumentParser: - if parser is None: - parser = calibrator_cli() - - parser.add_argument( - "--dist_from_board_viewpoint_range", - "-dfbvr", - nargs="+", - type=float, - dest="dist_from_board_viewpoint_range", - default=[0.5, 0.6, 0.1], - help=( - "What distances to conduct calibrations at relative to the board. (along the normal vector) " - "Three value array arg defines the [Start, Stop), step. for the viewpoint sweep. " - "These are used to sample viewpoints for automatic collection. " - ), - ) - parser.add_argument( - "--dist_along_board_width", - "-dabw", - nargs="+", - type=float, - dest="dist_along_board_width", - default=[-0.3, 0.4, 0.2], - help=( - "What distances to conduct calibrations at relative to the board. (along the board width) " - "Three value array arg defines the [Start, Stop), step. for the viewpoint sweep. " - "These are used to sample viewpoints for automatic collection. " - ), - ) - group = parser.add_mutually_exclusive_group() - group.add_argument( - "--degrees", - "-d", - dest="use_degrees", - action="store_true", - default=True, - help="Use degrees for rotation ranges (default)", - ) - group.add_argument( - "--radians", - "-r", - dest="use_degrees", - action="store_false", - help="Use radians for rotation ranges", - ) - defaults = [[-10, 11, 10], [-10, 11, 10], [-10, 11, 10]] - for idx, axis in enumerate(["x", "y", "z"]): - parser.add_argument( - f"--{axis}_axis_rot_viewpoint_range", - f"-{axis}arvr", - nargs="+", - type=float, - default=defaults[idx], - dest=f"{axis}_axis_rot_viewpoint_range", - help=( - f"What range of viewpoints around {axis}-axis to sample relative to boards normal vector. " - "Three value array arg defines the [Start, Stop), step. for the viewpoint sweep " - "These are used to sample viewpoints for automatic collection. " - "Assuming that the camera pose is in opencv/ROS format. " - ), - ) - - parser.add_argument( - "--max_num_images", - dest="max_num_images", - type=int, - default=10000, - help="The maximum number of images", - required=False, - ) - - parser.add_argument( - "--settle_time", - "-st", - dest="settle_time", - type=float, - default=1.0, - help="How long to wait after movement to take a picture; don't want motion blur", - ) - - # path things - parser.add_argument( - "--save_data", - "-sd", - dest="save_data", - action="store_true", - help="whether to save the images to file", - ) - - parser.add_argument( - "--from_data", - "-fd", - dest="from_data", - action="store_true", - help="Whether to only calibrate from recorded dataset on file.", - ) - - parser.add_argument( - "--save_to_robot", - "-send", - dest="save_to_robot", - action="store_true", - help="Whether to save the calibration to the robot.", - ) - - parser.add_argument( - "--from_yaml", - "-yaml", - dest="from_yaml", - action="store_true", - help=( - "Whether the data is from a yaml file. Use this and the '--from_data' and '--send' args to send a" - " previously saved calibration yaml to the robot" - ), - ) - - return parser - - -def spot_cli(parser: argparse.ArgumentParser = None) -> argparse.ArgumentParser: - if parser is None: - parser = calibrate_robot_cli() - - parser.add_argument( - "--ip", - "-i", - "-ip", - dest="ip", - type=str, - help="The IP address of the Robot to calibrate", - required=True, - ) - parser.add_argument( - "--user", - "-u", - "--username", - dest="username", - type=str, - help="Robot Username", - required=True, - ) - parser.add_argument( - "--pass", - "-pw", - "--password", - dest="password", - type=str, - help="Robot Password", - required=True, - ) - - parser.add_argument( - "--spot_rgb_photo_width", - "-dpw", - type=int, - default=640, - dest="spot_rgb_photo_width", - help="What resolution use for Spot's RGB Hand Camera (width). Currently, only 640 and 1920 are supported", - ) - - parser.add_argument( - "--spot_rgb_photo_height", - "-dph", - type=int, - default=480, - help="What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 and 1080 are supported", - ) - return parser - - -if __name__ == "__main__": - spot_main() diff --git a/spot_wrapper/calibration/calibration_clis.py b/spot_wrapper/calibration/calibration_clis.py new file mode 100644 index 00000000..c2c0f900 --- /dev/null +++ b/spot_wrapper/calibration/calibration_clis.py @@ -0,0 +1,429 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. + + +import argparse +import logging +from typing import Tuple + +import cv2 + +from spot_wrapper.calibration.charuco_board_detection import ( + charuco_pose_sanity_check, + create_charuco_board, + create_ideal_charuco_image, + detect_charuco_corners, +) + +logging.basicConfig( + level=logging.INFO, +) + +logger = logging.getLogger(__name__) + + +def setup_calibration_param( + args: argparse.Namespace, +) -> Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: + """Set up calibration parameters from command line arguments. + + Args: + parser (argparse.ArgumentParser): The argument parser to set up from command line. + + Raises: + ValueError: If the provided ArUco dictionary is invalid. + + Returns: + Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: + The parsed arguments, ArUco dictionary, and Charuco board. + """ + if hasattr(cv2.aruco, args.dict_size): + aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, args.dict_size)) + else: + raise ValueError(f"Invalid ArUco dictionary: {args.dict_size}") + charuco = create_charuco_board( + num_checkers_width=args.num_checkers_width, + num_checkers_height=args.num_checkers_height, + checker_dim=args.checker_dim, + marker_dim=args.marker_dim, + aruco_dict=aruco_dict, + legacy=args.legacy_charuco_pattern, + ) + + if not args.allow_default_internal_corner_ordering: + logger.warning("Enforcing bottom up charuco ordering. Pre-computing correlation now...") + detect_charuco_corners( + create_ideal_charuco_image(charuco_board=charuco), + charuco_board=charuco, + aruco_dict=aruco_dict, + enforce_ascending_ids_from_bottom_left_corner=True, + ) + if args.show_board_pattern: + logger.warning("Checking board, you'll need to close a window in a sec (press any key)") + charuco_pose_sanity_check( + create_ideal_charuco_image(charuco_board=charuco, colorful=True), + charuco_board=charuco, + aruco_dict=aruco_dict, + ) + return args, aruco_dict, charuco + + +def calibrator_cli() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser("CalibrateEyeInHandCameraSystem") + # Calculations + + parser.add_argument( + "--legacy_charuco_pattern", + action="store_true", + help=( + "Whether to use the legacy charuco pattern. For Spot Default board, this should be set." + "If you aren't sure if your board is legacy, try supplying the --show_board_pattern" + "arg to verify that the cv2 board matches your board." + ), + ) + parser.add_argument( + "--show_board_pattern", + action="store_true", + default=False, + help="Whether to visually verify the board pattern (to check legacy and internal corner ordering.", + ) + + parser.add_argument( + "--allow_default_internal_corner_ordering", + action="store_true", + default=False, + help=( + "Whether to allow default internal corner ordering. " + "For new versions of OpenCV, it is recommended " + "to NOT set this parameter to make sure that corners " + "are ordered in a known pattern. " + "Try supplying the --show_board_pattern flag " + "to check whether you should enable this flag " + "When checking, Z-Axis should point out of board. " + ), + ) + + parser.add_argument( + "--photo_utilization_ratio", + "-pur", + dest="photo_utilization_ratio", + type=int, + default=1, + help=( + "Photos that are collected/loaded vs. used for calibration are in a 1 to" + "photo utilization ratio. For getting a rough guess on cheaper hardware" + "without losing collection fidelity. For example, set to 2 to only use half the photos." + ), + ) + + # calibration param + parser.add_argument( + "--num_checkers_width", + "-ncw", + dest="num_checkers_width", + type=int, + help="How many checkers is the widest dimension of your board", + default=9, + ) + + parser.add_argument( + "--num_checkers_height", + "-nch", + dest="num_checkers_height", + type=int, + help="How many checkers is the other dimension of your board", + default=4, + ) + + parser.add_argument( + "--dict_size", + type=str, + choices=[ + "DICT_4X4_50", + "DICT_4X4_100", + "DICT_4X4_250", + "DICT_4X4_1000", + "DICT_5X5_50", + "DICT_5X5_100", + "DICT_5X5_250", + "DICT_5X5_1000", + "DICT_6X6_50", + "DICT_6X6_100", + "DICT_6X6_250", + "DICT_6X6_1000", + "DICT_7X7_50", + "DICT_7X7_100", + "DICT_7X7_250", + "DICT_7X7_1000", + "DICT_ARUCO_ORIGINAL", + ], + default="DICT_4X4_50", + help="Choose the ArUco dictionary size.", + ) + + parser.add_argument( + "--checker_dim_meters", + "-cd", + dest="checker_dim", + type=float, + default=0.115, + help="Checker size in meters", + ) + + parser.add_argument( + "--marker_dim_meters", + "-md", + dest="marker_dim", + type=float, + default=0.09, + help="Aruco Marker size in meters", + ) + + # path and saving + parser.add_argument( + "--data_path", + "-dp", + dest="data_path", + type=str, + help="The absolute path in which to save images", + default=None, + ) + + parser.add_argument( + "--result_path", + "-rp", + dest="result_path", + type=str, + required=False, + help="The absolute path where to store calibration result file", + ) + + parser.add_argument( + "--tag", + "-t", + dest="tag", + type=str, + required=False, + default="default", + help=( + "What heading to put for the calibration in the config file." + "If this is your first time running, the tag should be set to default " + "for the sake of interoperability with other functionality." + "If this is a shared config file with other people, perhaps put" + "a unique identifier, or default, if you'd like to override" + "for everyone." + ), + ) + + parser.add_argument( + "--unsafe_tag_save", + action="store_true", + help="If set, skips safety checks for tagging calibration.", + ) + + parser.add_argument( + "--use_kabsch", + action="store_true", + default=False, + help="Whether to use the Kabsch algorithm for rotation estimation.", + ) + + return parser + + +def calibrate_robot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: + if parser is None: + parser = calibrator_cli() + + parser.add_argument( + "--dist_from_board_viewpoint_range", + "-dfbvr", + nargs="+", + type=float, + dest="dist_from_board_viewpoint_range", + default=[0.5, 0.6, 0.1], + help=( + "What distances to conduct calibrations at relative to the board. (along the normal vector) " + "Three value array arg defines the [Start, Stop), step. for the viewpoint sweep. " + "These are used to sample viewpoints for automatic collection. " + ), + ) + group = parser.add_mutually_exclusive_group() + group.add_argument( + "--degrees", + "-d", + dest="use_degrees", + action="store_true", + default=True, + help="Use degrees for rotation ranges (default)", + ) + group.add_argument( + "--radians", + "-r", + dest="use_degrees", + action="store_false", + help="Use radians for rotation ranges", + ) + defaults = [[-10, 11, 10], [-10, 11, 10], [-10, 11, 10]] + for idx, axis in enumerate(["x", "y", "z"]): + parser.add_argument( + f"--{axis}_axis_rot_viewpoint_range", + f"-{axis}arvr", + nargs="+", + type=float, + default=defaults[idx], + dest=f"{axis}_axis_rot_viewpoint_range", + help=( + f"What range of viewpoints around {axis}-axis to sample relative to boards normal vector. " + "Three value array arg defines the [Start, Stop), step. for the viewpoint sweep " + "These are used to sample viewpoints for automatic collection. " + "Assuming that the camera pose is in opencv/ROS format. " + ), + ) + + parser.add_argument( + "--max_num_images", + dest="max_num_images", + type=int, + default=10000, + help="The maximum number of images", + required=False, + ) + + parser.add_argument( + "--settle_time", + "-st", + dest="settle_time", + type=float, + default=1.0, + help="How long to wait after movement to take a picture; don't want motion blur", + ) + + parser.add_argument( + "--from_data", + "-fd", + dest="from_data", + action="store_true", + help="Whether to only calibrate from recorded dataset on file.", + ) + + return parser + + +def spot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: + if parser is None: + parser = calibrate_robot_cli(None) + + parser.add_argument( + "--robot_name", + "-rn", + dest="robot_name", + type=str, + help="The name of the Robot to calibrate", + required=True, + ) + + parser.add_argument( + "--ip", + "-i", + "-ip", + dest="ip", + type=str, + help="The IP address of the Robot to calibrate", + required=True, + ) + parser.add_argument( + "--user", + "-u", + "--username", + dest="username", + type=str, + help="Robot Username", + required=True, + ) + parser.add_argument( + "--pass", + "-pw", + "--password", + dest="password", + type=str, + help="Robot Password", + required=True, + ) + + parser.add_argument( + "--spot_rgb_photo_width", + "-dpw", + type=int, + default=640, + dest="spot_rgb_photo_width", + help="What resolution use for Spot's RGB Hand Camera (width). Currently, only 640 and 1920 are supported", + ) + + parser.add_argument( + "--spot_rgb_photo_height", + "-dph", + type=int, + default=480, + help="What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 and 1080 are supported", + ) + + parser.add_argument("--ext_cam_topic", help="External Camera topic", required=True) + + parser.add_argument("--ext_cam_info_topic", help="External camera info topic", required=True) + + parser.add_argument( + "--ext_cam_tf_name", help="name of the camera link of the RS camera", default="pole_camera_link" + ) + + parser.add_argument( + "--save_to_robot", + "-save", + dest="save_to_robot", + action="store_true", + help="Whether to save the calibration to the robot.", + ) + + return parser + + +def ext_cli() -> argparse.ArgumentParser: + """Command-line interface.""" + parser = calibrator_cli() + parser.add_argument("--robot", help="Name of the robot ROS2 namespce.", default="Ethernet0") + parser.add_argument( + "--ext_cam_topic", help="External Camera topic", default="/pole_rs/camera/pole_camera/color/image_raw" + ) + parser.add_argument( + "--ext_cam_info_topic", + help="External camera info topic", + default="/pole_rs/camera/pole_camera/color/camera_info", + ) + parser.add_argument("--start_dist", help="Starting distance from the board", default=1.5, type=float) + parser.add_argument( + "--dist_step", help="Step size of the distance to the board till max_dist", default=0.5, type=float + ) + parser.add_argument( + "--max_dist", help="Step size of the distance to the board till max_dist", default=2.5, type=float + ) + + parser.add_argument( + "--max_circle_angle", help="Max angle of the circle around the board", default=0.872665, type=float + ) + parser.add_argument("--circle_step_size", help="Step size around the circle", default=0.174533, type=float) + parser.add_argument( + "--ext_cam_tf_name", help="name of the camera link of the RS camera", default="pole_camera_link" + ) + parser.add_argument("--parent_frame", help="name of frame the camera link is rigidly attached to", default="body") + parser.add_argument("--camera_name", help="name of the camera", default="front_realsense") + parser.add_argument("--spot_name", help="name of spot", default="quinn") + parser.add_argument("--open_gripper", help="Whether to open the gripper during calibration", action="store_true") + + parser.add_argument( + "--collect_only", + action="store_true", + default=False, + help="Whether to only collect data, saved off to data_path.", + ) + return parser diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py new file mode 100644 index 00000000..2eb17858 --- /dev/null +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -0,0 +1,229 @@ +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. + +from __future__ import annotations + +import logging +from dataclasses import dataclass + +import numpy as np +import numpy.typing as npt +import torch +from jaxtyping import Float, Shaped + +logger = logging.getLogger(__name__) + +DISTORTION_COEFFICIENT_SIZES = [0, 4, 5, 8, 12, 14] + + +@dataclass +class Image: + # Note: this is a jaxtyping-style annotation; + # when jaxtyping is active (currently only under pytest), + # the dimensions of the input are bound to the names provided, + # but only in the context of dynamic type-checking of the + # generated __init__ method. + # These same names are later defined as @property methods. + image_data: Shaped[torch.Tensor, "channels height width"] # noqa: F722 + """Image tensor with shape (C, H, W), dtype uint8, values in [0,255]""" + + def __post_init__(self) -> None: + # allow for single channel images + if len(self.image_data.shape) < 2: + raise ValueError(f"Not enough dimensions in image data: {self.image_data.shape}. Should be 2d or 3d.") + elif len(self.image_data.shape) == 2: + self.image_data = self.image_data.unsqueeze(0) + + elif len(self.image_data.shape) > 3: + raise ValueError(f"Too many dimensions in image data: {self.image_data.shape}. Should be 2d or 3d.") + + def to_image(self) -> Image: + return self + + @property + def device(self): + return self.image_data.device + + @classmethod + def from_numpy( + cls, + np_image: np.ndarray, + dtype: torch.dtype | None = None, + force_copy: bool = True, + ) -> Image: + """ + Create an Image from numpy array. + + Note that conventions differ between torch and numpy for how to index an image; + this accepts a numpy array with channels as the optional last index. + + Because this method uses `torch.Tensor.contiguous()`, it does not interact well + with multiprocessing, specifically using `fork`. See the README for details. + """ + if not force_copy: + logger.warning(f"Cannot pass without copy between numpy and {cls}") + + tensor = torch.as_tensor(np_image, dtype=dtype) + + if len(tensor.shape) == 2: + tensor = tensor.unsqueeze(-1) + + tensor = tensor.permute((2, 0, 1)).contiguous() + + return cls(tensor) + + @classmethod + def zeros(cls, width: int, height: int, channels: int) -> Image: + """Create an all black image.""" + image_data = torch.zeros((channels, height, width)) + return cls.from_tensor(image_data, force_copy=False) + + @property + def channels(self) -> int: + """ + Gets the number of channels in the image + """ + return self.image_data.shape[0] + + @property + def width(self) -> int: + """ + Gets the width of the image + """ + return self.image_data.shape[2] + + @property + def height(self) -> int: + """ + Gets the height of the image + """ + return self.image_data.shape[1] + + @property + def shape(self) -> tuple[int, int]: + """ + Gets a tuple of (height, width) + """ + return self.height, self.width + + @property + def pixel_count(self) -> int: + """ + Gets the number of pixels in the image. This is (height * width), _not_ (height * width * channels). + """ + return self.height * self.width + + +class CameraIntrinsics: + """This is an interface to OpenCV's camera model. It contains a 3x3 camera matrix, and a vector of distortion + coefficients, which must be of length 0, 4, 5, 8, 12 or 14, matching OpenCV's convention. If this vector is too + short, then it is padded with zeros to the next largest size. If the vector is too long, then an exception is + thrown. + (see https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html). + + There are also conversions to and from Open3D's CameraPinholeIntrinsic, with two caveats: + 1. Open3d does _not_ support lens distortion, while CameraIntrinsics does. An exception is thrown if a + CameraIntrinsics object with a non-zero-length distortion vector is converted to an Open3D representation. + 2. Open3d _does_ support skewed pixels (i.e., camera matrices with non-zero entries in (0,1) and (1,0)). while + CameraIntrinsics does not. An exception is thrown if a CameraIntrinsics object with any skews. + """ + + # jaxtyping, but not in a dataclass, so they're not checked + camera_matrix: Float[np.ndarray, "3 3"] # noqa: F722 + distortion_coeffs: Float[np.ndarray, "*N"] # noqa: F722 F821 + + def __init__( + self, + camera_matrix: npt.ArrayLike | None = None, + distortion_coeffs: npt.ArrayLike | None = None, + width: int = -1, + height: int = -1, + ): + self.camera_matrix = np.array(camera_matrix) if camera_matrix is not None else np.eye(3) + self.distortion_coeffs = np.array(distortion_coeffs) if distortion_coeffs is not None else np.empty(0) + self.width = width + self.height = height + + if self.camera_matrix.shape != (3, 3): + raise ValueError(f"Camera matrix must be 3x3, got {self.camera_matrix}.") + elif not np.allclose(self.camera_matrix[2, :], np.array([0, 0, 1])): + raise ValueError(f"Camera matrix must be affine, got {self.camera_matrix}.") + CameraIntrinsics._check_skews(self.camera_matrix) + + if len(self.distortion_coeffs.shape) != 1: + raise ValueError(f"Distortion coefficients must be Kx1, got {self.distortion_coeffs.shape}") + elif self.distortion_coeffs.shape[0] > DISTORTION_COEFFICIENT_SIZES[-1]: + raise ValueError( + f"Distortion coefficients list too long, got {self.distortion_coeffs.shape} but the maximum length is" + f" {DISTORTION_COEFFICIENT_SIZES[-1]}." + ) + + if self.distortion_coeffs.shape[0] not in DISTORTION_COEFFICIENT_SIZES: + for next_greatest_dist_size in DISTORTION_COEFFICIENT_SIZES: + if next_greatest_dist_size >= self.distortion_coeffs.shape[0]: + break + + self.distortion_coeffs = np.pad( + self.distortion_coeffs, (0, next_greatest_dist_size - self.distortion_coeffs.shape[0]), mode="constant" + ) + + @staticmethod + def _check_skews(camera_matrix: Float[np.ndarray, "3 3"]): # noqa: F722 + if not np.allclose(camera_matrix[np.array([0, 1]), np.array([1, 0])], np.zeros(2)): + raise ValueError(f"Skewed pixels are not supported, got {camera_matrix}.") + + @property + def fx(self) -> float: + return self.camera_matrix[0, 0] + + @fx.setter + def fx(self, val: float) -> None: + if val <= 0.0: + raise ValueError(f"Focal lengths must be positive; got {val}.") + else: + self.camera_matrix[0, 0] = val + + @property + def fy(self) -> float: + return self.camera_matrix[1, 1] + + @fy.setter + def fy(self, val: float) -> None: + if val <= 0.0: + raise ValueError(f"Focal lengths must be positive; got {val}.") + else: + self.camera_matrix[1, 1] = val + + @property + def cx(self) -> float: + return self.camera_matrix[0, 2] + + @cx.setter + def cx(self, val: float) -> None: + if val <= 0.0: + raise ValueError(f"Principal point values lengths must be positive; got {val}.") + elif self.width != -1 and val >= self.width: + raise ValueError( + f"Principal point values lengths must be less than the corresponding image dimension; got {val} but" + f" width is {self.width}." + ) + else: + self.camera_matrix[0, 2] = val + + @property + def cy(self) -> float: + return self.camera_matrix[1, 2] + + @cy.setter + def cy(self, val: float) -> None: + if val <= 0.0: + raise ValueError(f"Principal point values lengths must be positive; got {val}.") + elif self.height != -1 and val >= self.width: + raise ValueError( + f"Principal point values lengths must be less than the corresponding image dimension; got {val} but" + f" height is {self.height}." + ) + else: + self.camera_matrix[1, 2] = val + + def scale(self, scale_factor: float) -> None: + self.camera_matrix = self.camera_matrix * scale_factor diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 19cc33a7..0ac37605 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -1,1076 +1,293 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. +# Copyreference (c) 2024 Robotics and AI Institute LLC dba RAI Institute. All references reserved. import argparse import logging import os import re -from copy import deepcopy from datetime import datetime from glob import glob -from math import radians +from pathlib import Path from time import sleep -from typing import Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, TypedDict, Union import cv2 import numpy as np import yaml +from cv_bridge import CvBridge +from sensor_msgs.msg import CameraInfo +from sensor_msgs.msg import Image as RosImage from spot_wrapper.calibration.automatic_camera_calibration_robot import ( AutomaticCameraCalibrationRobot, ) - -logging.basicConfig( - level=logging.INFO, +from spot_wrapper.calibration.calibration_helpers import CameraIntrinsics, Image +from spot_wrapper.calibration.charuco_board_detection import ( + create_ideal_charuco_image, + detect_charuco_corners, + get_relative_viewpoints_from_board_pose_and_param, + multistereo_calibration_charuco, ) logger = logging.getLogger(__name__) -SPOT_DEFAULT_ARUCO_DICT = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50) -OPENCV_VERSION = tuple(map(int, cv2.__version__.split("."))) -OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION = (4, 7, 0) - - -def create_charuco_board( - num_checkers_width: int, - num_checkers_height: int, - checker_dim: float, - marker_dim: float, - aruco_dict: cv2.aruco_Dictionary, - legacy: bool = True, -) -> cv2.aruco_CharucoBoard: - """ - Create a Charuco board using the provided parameters and Aruco dictionary. - Issues a deprecation warning if using the older 'CharucoBoard_create' method. - - Args: - num_checkers_width (int): Number of checkers along the width of the board. - num_checkers_height (int): Number of checkers along the height of the board. - checker_dim (float): Size of the checker squares. - marker_dim (float): Size of the Aruco marker squares. - aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary to use for marker generation. - - Returns: - charuco (cv2.aruco_CharucoBoard): The generated Charuco board. - """ - - opencv_version = tuple(map(int, cv2.__version__.split("."))) - - if opencv_version < (4, 7, 0): - logger.warning( - ( - "Creating Charuco Board..." - "You're using an older version of OpenCV requires the additional OpenCV Modules. " - "This will not work without the additional modules (opencv-contrib-python). " - "Consider upgrading to OpenCV >= 4.7.0 to enable " - "the use of this tool with base opencv (opencv-python) " - "without relying on additional modules." - ), - ) - - # Create Charuco board using the older method - charuco = cv2.aruco.CharucoBoard_create( - num_checkers_width, - num_checkers_height, - checker_dim, - marker_dim, - aruco_dict, - ) - - else: - # Create Charuco board using the newer method - charuco = cv2.aruco_CharucoBoard( - (num_checkers_width, num_checkers_height), - checker_dim, - marker_dim, - aruco_dict, - ) - charuco.setLegacyPattern(legacy) - - return charuco - - -SPOT_DEFAULT_CHARUCO = create_charuco_board( - num_checkers_width=9, - num_checkers_height=4, - checker_dim=0.115, - marker_dim=0.09, - aruco_dict=SPOT_DEFAULT_ARUCO_DICT, - legacy=True, -) - - -def get_multiple_perspective_camera_calibration_dataset( - auto_cam_cal_robot: AutomaticCameraCalibrationRobot, - max_num_images: int = 10000, - distances_x: Optional[np.ndarray] = None, - distances_z: Optional[np.ndarray] = None, - x_axis_rots: Optional[np.ndarray] = None, - y_axis_rots: Optional[np.ndarray] = None, - z_axis_rots: Optional[np.ndarray] = None, - use_degrees: bool = True, - settle_time: float = 0.1, - data_path: str = os.path.expanduser("~"), - save_data: Optional[bool] = True, -) -> Tuple[np.ndarray, np.ndarray]: - """ - Move the robot around to look at the calibration target from different viewpoints, - capturing time-synchronized - consistent-ordering images from every camera at each viewpoint. - - Args: - auto_cam_cal_robot (AutomaticCameraCalibrationRobot): The robot to automatically - calibrate. - max_num_images (int, optional): The maximum amount of images to take with EACH camera, - (so max total number of captures) cuts out the calibration early if this number is - reached prior to all viewpoints being reached . Defaults to 100. - distances (np.ndarray, optional): What distances - away from the calibration board's pose (along the Z axis) - to sample calibration viewpoints from. Defaults to None. - x_axis_rots (np.ndarray, optional): What - x-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to None. - y_axis_rots (np.ndarray, optional): What - y-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to None. - z_axis_rots (np.ndarray, optional): What - z-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to None. - use_degrees (bool, optional): Whether to use degrees for the rotations - about the axis to sample viewpoints from. Defaults to True. - settle_time (float, optional): How long to wait in seconds after moving the robot - to a new viewpoint prior to taking a picture. Defaults to 0.1. - data_path (str, optional): where to save - the calibration dataset, if it's being saved. Defaults to os.path.expanduser("~"). - save_data (Optional[bool], optional): whether to save the calibration - dataset. Defaults to True. - - Returns: - np.ndarray: A list of lists, where the internal list is a time-synchronized - camera-ordering-consistent list of images of the calibration target. - """ - primed_pose = auto_cam_cal_robot.move_cameras_to_see_calibration_target() - logger.info("Primed arm...") - sleep(settle_time) - images = auto_cam_cal_robot.capture_images() - capture_data = {} - capture_data["primed_images"] = images - capture_data["primed_pose"] = primed_pose - - R_vision_to_target, tvec_vision_to_target = auto_cam_cal_robot.localize_target_to_principal_camera(images) - viewpoints = get_relative_viewpoints_from_board_pose_and_param( - R_vision_to_target, - tvec_vision_to_target, - distances_x=distances_x, - distances_z=distances_z, - x_axis_rots=x_axis_rots, - y_axis_rots=y_axis_rots, - z_axis_rots=z_axis_rots, - degree_offset_rotations=use_degrees, - ) - calibration_images = [] - logger.info("Beginning Calibration") - idx = 0 - poses = [] - while idx < max_num_images and idx < len(viewpoints): - logger.info(f"Visiting viewpoint {idx+1} of {min(len(viewpoints), max_num_images)}") - viewpoint = viewpoints[idx] - initial_pose, new_pose = auto_cam_cal_robot.offset_cameras_from_current_view( - transform_offset=viewpoint, - origin_t_planning_frame=primed_pose, - duration_sec=0.1, - ) - poses.append(new_pose) - logger.info("At viewpoint, waiting to settle") - sleep(settle_time) - images = auto_cam_cal_robot.capture_images() - logger.info("Snapped pics ;)") - calibration_images.append(images) - idx = len(calibration_images) - if save_data: - if idx == 1: - create_calibration_save_folders(data_path, len(images)) - logger.info(f"Saving image batch {idx+1}") - for jdx, image in enumerate(images): - cv2.imwrite( - os.path.join(data_path, str(jdx), f"{idx+1}.png"), - image, - ) - np.save(os.path.join(data_path, "poses", f"{idx+1}.npy"), new_pose) - calibration_images = np.array(calibration_images, dtype=object) - return (np.array(calibration_images, dtype=object), poses) - - -def multistereo_calibration_charuco( - images: np.ndarray, - desired_stereo_pairs: Optional[List[Tuple[int, int]]] = None, - charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, - aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, - camera_matrices: Optional[Dict] = {}, - dist_coeffs: Optional[Dict] = {}, - poses: Union[np.ndarray, None] = None, -) -> Dict: - """ - Calibrates the intrinsic and extrinsic parameters for multiple stereo camera pairs - using Charuco board markers. This function performs stereo calibration on a series of - time-synchronized images - captured by multiple cameras. It uses a Charuco board for calibration and returns - the calibrated intrinsic and extrinsic parameters for the specified stereo camera pairs. - - Args: - images (np.ndarray): A 2D array of time-synchronized camera captures. The array should - be of shape (n, m), where 'n' represents the number of time-synchronized captures - and 'm' represents the number of cameras. Each internal list corresponds to a set - of images captured at the same time across multiple cameras. - - desired_stereo_pairs (Optional[List[Tuple[int, int]]], optional): A list of tuples - specifying the pairs of camera indices to be calibrated. The indices refer to the - cameras based on their positions in the `images` array. For example, index `0` - corresponds to the first camera, index `1` to the second camera, and so forth, - up to `n-1` (where `n` is the number of cameras). - If None, all cameras are registered to the first camera (index `0`). Defaults to None. - - The relationship between `desired_stereo_pairs` and `images` is crucial. Each tuple in - `desired_stereo_pairs` should be in the form - `(origin_camera_idx, reference_camera_idx)`, - where `origin_camera_idx` and `reference_camera_idx` correspond to the - indices of the cameras in the `images` array. For each stereo pair, - the function attempts to calibrate the `origin_camera_idx` - relative to the `reference_camera_idx`. If no pairs are provided, - the function defaults to registering - all cameras to the first camera (index `0`). - - charuco_board (cv2.aruco_CharucoBoard, optional): The Charuco board configuration used - for calibration. Defaults to SPOT_DEFAULT_CHARUCO. - - aruco_dict (cv2.aruco_Dictionary, optional): The Aruco dictionary used - to detect the markers. Defaults to SPOT_DEFAULT_ARUCO_DICT. - - camera_matrices (Optional[Dict], optional): A dictionary mapping camera indices - to their existing camera matrices. If a matrix is not provided for a camera, - it will be computed during calibration - if that camera is part of a stereo pair. Defaults to an empty dictionary. - - dist_coeffs (Optional[Dict], optional): A dictionary mapping camera indices - to their distortion coefficients. - If coefficients are not provided for a camera, they will be computed during calibration. - Defaults to an empty dictionary. - poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which - pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. - (planning frame to base frame), or None - - Raises: - ValueError: Raised if fewer than two cameras are provided, as stereo calibration - requires at least two cameras. - ValueError: Raised if a stereo pair includes the same camera twice. - ValueError: Raised if a camera in a stereo pair cannot be calibrated. - - Returns: - Dict: A dictionary containing the calibrated intrinsic and extrinsic parameters - for the specified stereo pairs. - The keys are formatted as "origin_camera_idx_reference_camera_idx", - and the values contain the calibration data. - - """ - calibration = {} - num_cams = images.shape[1] - if num_cams == 0 or num_cams == 1: - raise ValueError("Stereo Madness requires more than one camera. Use the monocular method instead.") - else: # More than one camera - Multi-Stereo Madness ;p - for idx in range(num_cams): - if idx not in camera_matrices: - camera_matrices[idx] = None - if idx not in dist_coeffs: - dist_coeffs[idx] = None - - if desired_stereo_pairs is None: - desired_stereo_pairs = [] - for idx in range(1, num_cams): - """ - Desired stereo pairs is a mapping, where the 0th index is the origin (base) - frame, where the reference frame is defined within the origin (base) frame's - coordinate system. The reference frame is the 1st index. - - By Default, if desired_stereo_pairs is None, - then each camera's intrinsic is defined in the 0th camera's frame, - so this is set as the origin for all stereo calibrations - """ - desired_stereo_pairs.append((0, idx)) # just register all in 0th cam frame - - for idx, pair in enumerate(desired_stereo_pairs): - for index in pair: - if index < 0 or index > num_cams: - raise ValueError( - f"Requested to stereo register non-existent camera index {index} out of {num_cams}" - ) - origin_camera_idx = pair[0] - reference_camera_idx = pair[1] - - if pair[0] == pair[1]: - logging.warning( - "You requested to register a camera to itself. Ignoring,and continuing to the next stereo pair." - ) - continue - logging.info( - f"Attempting to register {origin_camera_idx}" - f"to {reference_camera_idx}, pair {idx} of" - f" {len(desired_stereo_pairs)}" - ) - try: - key = str(origin_camera_idx) + "_" + str(reference_camera_idx) - stereo_dict = stereo_calibration_charuco( - origin_images=images[:, origin_camera_idx], - reference_images=images[:, reference_camera_idx], - charuco_board=charuco_board, - aruco_dict=aruco_dict, - camera_matrix_origin=camera_matrices[origin_camera_idx], - dist_coeffs_origin=dist_coeffs[origin_camera_idx], - camera_matrix_reference=camera_matrices[reference_camera_idx], - dist_coeffs_reference=dist_coeffs[reference_camera_idx], - poses=poses, - ) - camera_matrices[origin_camera_idx] = stereo_dict["camera_matrix_origin"] - dist_coeffs[origin_camera_idx] = stereo_dict["dist_coeffs_origin"] - camera_matrices[reference_camera_idx] = stereo_dict["camera_matrix_reference"] - dist_coeffs[reference_camera_idx] = stereo_dict["dist_coeffs_reference"] - - calibration[key] = stereo_dict - except ValueError as ve: # maybe could keep going here instead for partial calibration? - raise ValueError(f"Failed to calibrate pair {pair} : {ve}") - - return calibration - +directories = ["parent", "child", "poses", "depth"] + + +class CalibrationResults(TypedDict): + dist_coeffs_origin: np.ndarray + camera_matrix_origin: np.ndarray + image_dim_origin: np.ndarray + dist_coeffs_reference: np.ndarray + camera_matrix_reference: np.ndarray + image_dim_reference: np.ndarray + R: np.ndarray + T: np.ndarray + R_handeye: Optional[np.ndarray] + T_handeye: Optional[np.ndarray] + average_reprojection_error: float + + +def camera_info_to_dict(camera_info: CameraInfo, camera_name: str) -> dict[str, Any]: + return { + "image_width": camera_info.width, + "image_height": camera_info.height, + "camera_name": camera_name, + "camera_matrix": {"rows": 3, "cols": 3, "data": camera_info.k}, + "distortion_model": camera_info.distortion_model, + "distortion_coefficients": {"rows": 1, "cols": len(camera_info.d), "data": camera_info.d}, + "rectification_matrix": {"rows": 3, "cols": 3, "data": camera_info.r}, + "projection_matrix": {"rows": 3, "cols": 4, "data": camera_info.p}, + "binning_x": camera_info.binning_x, + "binning_y": camera_info.binning_y, + "roi": { + "x_offset": camera_info.roi.x_offset, + "y_offset": camera_info.roi.y_offset, + "height": camera_info.roi.height, + "width": camera_info.roi.width, + "do_rectify": camera_info.roi.do_rectify, + }, + } -def get_correlation_map_to_enforce_ascending_ids_from_bottom_left_corner( - all_charuco_corners: List, - all_charuco_ids: List, - charuco_board: cv2.aruco_CharucoBoard, -) -> List: - """ - This is needed only for OpenCV versions > 4.7.0. Basically, - this determines the needed correlation - to ensure that internal corners are numbered left to right bottom - up as opposed to left to right top down. - For more info see https://github.com/opencv/opencv/issues/26126 +def save_CameraInfo_2_file(msg: CameraInfo, camera_name: str, file_path: Path) -> None: + """Saves a CameraInfo message to a YAML file.""" + cam_info_msg_dict = camera_info_to_dict(camera_info=msg, camera_name=camera_name) - Args: - all_charuco_corners (List): All charuco corners of the board. - all_charuco_ids (List): All charuco ids of the board, as returned by the Charuco detector - for an ideal board with a full view. Expected to be a list ascending - from 0 but could be otherwise - charuco_board (cv2.aruco_CharucoBoard): the charuco board to utilize - to construct the correlation map + with open(file_path, "w") as f: + yaml.dump(cam_info_msg_dict, f, default_flow_style=False) - Returns: - List: the correlation map where the indicies represent to the original - ordering of corners, and the values at each index represent the new - ordering index of a corner. Can be used to ensure bottom up - left to right ordering of internal corners. - """ - num_checker_width, num_checker_height = charuco_board.getChessboardSize() - num_interior_corners = (num_checker_width - 1) * (num_checker_height - 1) - correlation_map = np.array([i for i in range(num_interior_corners)]) - # Adjust indexing to account for nested arrays - first_corner_height = all_charuco_corners[all_charuco_ids[0][0]][0][1] - last_corner_height = all_charuco_corners[all_charuco_ids[-1][0]][0][1] - row_num_a = 0 - row_num_b = num_checker_height - 2 - - if first_corner_height < last_corner_height: - logger.warning( - "Detected Charuco Configuration " - "where internal corners (detected checker corners) are numbered top down, " - "(left to right) as opposed to bottom up (left to right). " - "Enforcing bottom up numbering instead. " - "This ensures that the Z axis points out of the board " - "As opposed to -180 degrees about the X axis " - "relative to the Z out of the board" - ) - else: - logger.warning( - "You have selected to enforce ascending ids from the bottom left corner " - "But it seems as if your ids are already in that form " - "Returning identity correlation map" - ) - return [int(mapping) for mapping in correlation_map] +def load_CameraInfo_from_file(file_path: Path) -> CameraInfo: + """Loads a CameraInfo message from a YAML file.""" + logging.info(f"Loading CameraInfo from {file_path}") + with open(file_path, "r") as f: + cam_info_msg_dict = yaml.unsafe_load(f) - while row_num_a < row_num_b: - row_num_a_correlation_slice = slice( - row_num_a * (num_checker_width - 1), (row_num_a * (num_checker_width - 1) + (num_checker_width - 1)), 1 - ) + cam_info_msg = CameraInfo() + cam_info_msg.width = cam_info_msg_dict["image_width"] + cam_info_msg.height = cam_info_msg_dict["image_height"] + cam_info_msg.header.frame_id = cam_info_msg_dict["camera_name"] + cam_info_msg.k = cam_info_msg_dict["camera_matrix"]["data"] + cam_info_msg.distortion_model = cam_info_msg_dict["distortion_model"] + cam_info_msg.d = cam_info_msg_dict["distortion_coefficients"]["data"] + cam_info_msg.r = cam_info_msg_dict["rectification_matrix"]["data"] + cam_info_msg.p = cam_info_msg_dict["projection_matrix"]["data"] + cam_info_msg.binning_x = cam_info_msg_dict["binning_x"] + cam_info_msg.binning_y = cam_info_msg_dict["binning_y"] + roi_dict = cam_info_msg_dict["roi"] + cam_info_msg.roi.x_offset = roi_dict["x_offset"] + cam_info_msg.roi.y_offset = roi_dict["y_offset"] + cam_info_msg.roi.height = roi_dict["height"] + cam_info_msg.roi.width = roi_dict["width"] + cam_info_msg.roi.do_rectify = roi_dict["do_rectify"] - row_num_b_correlation_slice = slice( - row_num_b * (num_checker_width - 1), ((row_num_b * (num_checker_width - 1)) + (num_checker_width - 1)), 1 - ) - # Record A - precopy_row_a = deepcopy(correlation_map[row_num_a_correlation_slice]) - # Copy B onto A - correlation_map[row_num_a_correlation_slice] = correlation_map[row_num_b_correlation_slice] - # copy old A into B - correlation_map[row_num_b_correlation_slice] = precopy_row_a - row_num_a += 1 - row_num_b -= 1 - return [int(mapping) for mapping in correlation_map] - - -def detect_charuco_corners( - img: np.ndarray, - charuco_board: cv2.aruco_CharucoBoard, - aruco_dict: cv2.aruco_Dictionary, - enforce_ascending_ids_from_bottom_left_corner: Union[bool, None] = None, -) -> Tuple[Optional[np.ndarray], Optional[np.ndarray]]: - """ - Detect the Charuco Corners and their IDs in an image, with support for OpenCV versions before and after 4.7.0. + return cam_info_msg - Args: - img (np.ndarray): The image to find Charuco corners in. - charuco_board (cv2.aruco_CharucoBoard, optional): The Charuco board to look for. - Defaults to SPOT_DEFAULT_CHARUCO. - aruco_dict (cv2.aruco_Dictionary, optional): The Aruco dictionary to use. - Defaults to SPOT_DEFAULT_ARUCO_DICT. - enforce_ascending_ids_from_bottom_left_corner (Union[bool, None]): whether to - ensure that internal charuco corners are numbered left to right bottom up - (only ensures bottom up, assumes already left to right). - Returns: - Tuple[Optional[np.ndarray], Optional[np.ndarray]]: The detected corners and their IDs, - or (None, None) if not found. +def load_images_from_path(path: Path) -> Dict[str, Dict[str, np.ndarray]]: """ - # Convert the image to grayscale if it's not already - if len(img.shape) == 2: - gray = img - else: - gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) - - if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: - # Older OpenCV version - corners, ids, _ = cv2.aruco.detectMarkers(gray, aruco_dict) - if ids is not None: - _, charuco_corners, charuco_ids = cv2.aruco.interpolateCornersCharuco( - corners, ids, gray, charuco_board, minMarkers=0 - ) - return charuco_corners, charuco_ids - else: - return None, None - else: - # Newer OpenCV version with charuco_detector - detector_params = cv2.aruco.CharucoParameters() - detector_params.minMarkers = 0 - detector_params.tryRefineMarkers = True - charuco_detector = cv2.aruco.CharucoDetector(charuco_board, detector_params) - charuco_detector.setBoard(charuco_board) - charuco_corners, charuco_ids, _, _ = charuco_detector.detectBoard(gray) - - if charuco_ids is None: - return None, None - - enforce_ids = enforce_ascending_ids_from_bottom_left_corner - if enforce_ids is not None and hasattr(detect_charuco_corners, "enforce_ids"): - logger.warning( - "Previously, for detecting internal charuco corners, the enforce " - "ascending from bottom left corner id policy was set to: " - f"{detect_charuco_corners.enforce_ids}" - f"it will now be set to {enforce_ids}" - ) - detect_charuco_corners.enforce_ids = enforce_ids - elif enforce_ids is not None and not hasattr(detect_charuco_corners, "enforce_ids"): - logger.warning( - "For detecting charuco corners, the enforce " - "ascending from bottom left corner id policy is set to: " - f"{enforce_ids}. For this call, and future calls until set otherwise." - ) - detect_charuco_corners.enforce_ids = enforce_ids - - # Create the identity correlation map... - num_checker_width, num_checker_height = charuco_board.getChessboardSize() - num_interior_corners = (num_checker_width - 1) * (num_checker_height - 1) - correlation_map = np.array([i for i in range(num_interior_corners)]) - - if ( - hasattr(detect_charuco_corners, "enforce_ids") - and detect_charuco_corners.enforce_ids - and not hasattr(detect_charuco_corners, "corr_map") - ): # correlation map not computed - ideal_charuco = charuco_board.generateImage(outSize=(1000, 1000)) - all_charuco_corners, all_charuco_ids, _, _ = charuco_detector.detectBoard(ideal_charuco) - - detect_charuco_corners.corr_map = get_correlation_map_to_enforce_ascending_ids_from_bottom_left_corner( - all_charuco_corners, all_charuco_ids, charuco_board - ) - - if ( - hasattr(detect_charuco_corners, "enforce_ids") - and detect_charuco_corners.enforce_ids - and hasattr(detect_charuco_corners, "corr_map") - ): # correlation map computed - logger.warning("Using cached correlation map to order IDs") - correlation_map = detect_charuco_corners.corr_map # grab the map - - reworked_charuco_ids = [] - for charuco_id in charuco_ids: - reworked_charuco_ids.append([correlation_map[charuco_id[0]]]) - - return charuco_corners, reworked_charuco_ids - - -def get_charuco_board_object_points( - charuco_board: cv2.aruco_CharucoBoard, - corners_ids: Union[List, np.ndarray], -) -> np.ndarray: - """ - From a charuco board, and corner ids, generate the object points for use - in a calibration sequence to solve the perspective and point problem. + Load images dataset from path in a way that's compatible with multistereo_calibration_charuco. - Args: - charuco_board (cv2.aruco_CharucoBoard): the charuco board - to generate object points for - corners_ids (Union[List, np.ndarray]): which corner ids to generate points for + Also, load the poses if they are available. - Returns: - np.ndarray: the object points - """ - if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: - corners = charuco_board.chessboardCorners - else: - corners = charuco_board.getChessboardCorners() - object_points = [] - for idx in corners_ids: - object_points.append(corners[idx]) - return np.array(object_points, dtype=np.float32) - - -def calibrate_single_camera_charuco( - images: List[np.ndarray], - charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, - aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, - debug_str: str = "", -) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: - """ - Calibrate a monocular camera from a charuco board. + See Using the CLI Tool To Calibrate On an Existing Dataset section in the README + to see the expected folder/data structure for this method to work Args: - images (List[np.ndarray]): A list of images of the board - charuco_board (cv2.aruco_CharucoBoard, optional): which - board to look for in the images. Defaults to SPOT_DEFAULT_CHARUCO. - aruco_dict (cv2.aruco_Dictionary, optional): which - aruco dict to look for in the images. Defaults to SPOT_DEFAULT_ARUCO_DICT. - debug_str (str, optional): what to add to the logging - to differentiate this monocular calibration from others, for the sake - of stereo calibrations. Defaults to "". - - Raises: - ValueError: Not enough data to calibrate - - Returns: - Tuple[np.ndarray, np.ndarray]: the camera matrix, distortion coefficients, - rotation matrices, tvecs - """ - all_corners = [] - all_ids = [] - img_size = None - for idx, img in enumerate(images): - if img_size is None: - img_size = img.shape[:2][::-1] - - charuco_corners, charuco_ids = detect_charuco_corners(img, charuco_board, aruco_dict) - - if charuco_corners is not None and len(charuco_corners) >= 8: - all_corners.append(charuco_corners) - all_ids.append(charuco_ids) - else: - logger.warning(f"Not enough corners detected in image index {idx} {debug_str}; ignoring") - - if len(all_corners) > 1: - obj_points_all = [] - img_points = [] - ids = [] - for corners, ids in zip(all_corners, all_ids): - obj_points = get_charuco_board_object_points(charuco_board, ids) - obj_points_all.append(obj_points) - img_points.append(corners) - logger.info(f"About to process {len(obj_points_all)} points for single camera cal | {debug_str}") - # Long term improvement: could pull tvec for use to localize camera relative to robot? - _, camera_matrix, dist_coeffs, rvecs, tvecs = cv2.calibrateCamera( # use LU flag critical for speed - obj_points_all, - img_points, - img_size, - None, - None, - flags=cv2.CALIB_USE_LU, - ) - rmats = np.array([cv2.Rodrigues(rvec)[0] for rvec in rvecs]) - return camera_matrix, dist_coeffs, rmats, tvecs - else: - raise ValueError(f"Not enough valid points to individually calibrate {debug_str}") - - -def stereo_calibration_charuco( - origin_images: List[np.ndarray], - reference_images: List[np.ndarray], - charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, - aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, - camera_matrix_origin: Optional[np.ndarray] = None, - dist_coeffs_origin: Optional[np.ndarray] = None, - camera_matrix_reference: Optional[np.ndarray] = None, - dist_coeffs_reference: Optional[np.ndarray] = None, - poses: Union[np.ndarray, None] = None, -) -> Dict: - """ - Perform a stereo calibration from a set of synchronized stereo images of a charuco calibration - board. + path (str): The parent path - Args: - origin_images (List[np.ndarray]): A list of images synced to reference images from camera 0 - reference_images (List[np.ndarray]): A list of images synced to origin images from camera 1 - charuco_board (cv2.aruco_CharucoBoard, optional): What charuco board to - use for the cal. Defaults to SPOT_DEFAULT_CHARUCO. - aruco_dict (cv2.aruco_Dictionary, optional): What aruco board to use for the cal. - Defaults to SPOT_DEFAULT_ARUCO_DICT. - camera_matrix_origin (Optional[np.ndarray], optional): What camera - matrix to assign to camera 0. If none, is computed. Defaults to None. - dist_coeffs_origin (Optional[np.ndarray], optional): What distortion coefficients - to assign to camera 0. If None, is computed. Defaults to None. - camera_matrix_reference (Optional[np.ndarray], optional): What camera - matrix to assign to camera 1. If none, is computed. . Defaults to None. - dist_coeffs_reference (Optional[np.ndarray], optional): What distortion coefficients - to assign to camera 1. If None, is computed. Defaults to None. - poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which - pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. - (planning frame to base frame), or None Raises: - ValueError: Could not calibrate a camera individually - ValueError: Not enough points to stereo calibrate - ValueError: Could not stereo calibrate + ValueError: Not possible to load the images Returns: - Dict: _description_ + dict[str, np.ndarray]: The image dataset """ - if camera_matrix_origin is None or dist_coeffs_origin is None: - logger.info("Calibrating Origin Camera individually") - (camera_matrix_origin, dist_coeffs_origin, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( - images=origin_images, - charuco_board=charuco_board, - aruco_dict=aruco_dict, - debug_str="for origin camera", - ) - if camera_matrix_reference is None or dist_coeffs_origin is None: - logger.info("Calibrating reference Camera individually ") - (camera_matrix_reference, dist_coeffs_reference, rmats_reference, tvecs_reference) = ( - calibrate_single_camera_charuco( - images=reference_images, - charuco_board=charuco_board, - aruco_dict=aruco_dict, - debug_str="for reference camera", - ) - ) - if camera_matrix_origin is None or camera_matrix_reference is None: - raise ValueError("Could not calibrate one of the cameras as desired") - - all_corners_origin = [] - all_corners_reference = [] - all_ids_origin = [] - all_ids_reference = [] - img_size = None - - no_poses = poses is None - if no_poses: - poses = [x for x in range(len(origin_images))] - else: - filtered_poses = [] - - no_poses = poses is None - if no_poses: # fill up poses with dummy values so that you can iterate over poses - # with images zip(origin_images, reference_images, poses) together regardless of if poses - # are actually supplied (for the sake of brevity) - poses = [x for x in range(len(origin_images))] - else: - filtered_poses = [] - - for origin_img, reference_img, pose in zip(origin_images, reference_images, poses): - if img_size is None: - img_size = origin_img.shape[:2][::-1] - - origin_charuco_corners, origin_charuco_ids = detect_charuco_corners(origin_img, charuco_board, aruco_dict) - reference_charuco_corners, reference_charuco_ids = detect_charuco_corners( - reference_img, charuco_board, aruco_dict + def alpha_numeric(x: str) -> Any: + matcha = re.search("(\\d+)(?=\\D*$)", x) + # \\d+ Matches one or more digits (0-9), + # \\D* Matches zero or more non-digit characters, + # $ asserts position at the end of the string. + if matcha: + return int(matcha.group()) + return x + + def load_images_from_dir(path: Path) -> Dict[str, np.ndarray]: + print(f"-----------------------Loading images from {path}") + files = sorted( + glob(os.path.join(path, "*.png")), + key=alpha_numeric, ) - - if not no_poses: - filtered_poses.append(pose) - if origin_charuco_corners is not None and reference_charuco_corners is not None: - all_corners_origin.append(origin_charuco_corners) - all_corners_reference.append(reference_charuco_corners) - all_ids_origin.append(origin_charuco_ids) - all_ids_reference.append(reference_charuco_ids) - - if len(all_corners_origin) > 0: - obj_points_all = [] - img_points_origin = [] - img_points_reference = [] - for ( - origin_corners, - reference_corners, - origin_ids, - reference_ids, - ) in zip( - all_corners_origin, - all_corners_reference, - all_ids_origin, - all_ids_reference, - ): - common_ids = np.intersect1d(origin_ids, reference_ids) - if len(common_ids) >= 6: # Ensure there are at least 6 points - obj_points = get_charuco_board_object_points(charuco_board, common_ids) - obj_points_all.append(obj_points) - img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) - img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) - - if len(obj_points_all) > 0: - logger.info(f"Collected {len(obj_points_all)} shared point sets for stereo calibration.") - _, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( - obj_points_all, - img_points_origin, - img_points_reference, - camera_matrix_origin, - dist_coeffs_origin, - camera_matrix_reference, - dist_coeffs_reference, - img_size, - criteria=( - cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, - 100, - 1e-6, - ), - flags=cv2.CALIB_USE_LU, - ) - logger.info("Stereo calibration completed.") - result_dict = { - "dist_coeffs_origin": dist_coeffs_origin, - "camera_matrix_origin": camera_matrix_origin, - "image_dim_origin": np.array(origin_images[0].shape[:2]), - "dist_coeffs_reference": dist_coeffs_reference, - "camera_matrix_reference": camera_matrix_reference, - "image_dim_reference": np.array(reference_images[0].shape[:2]), - "R": R, - "T": T, + try: + return { + Path(fn).name: cv2.imread(fn, cv2.IMREAD_GRAYSCALE).astype(np.uint8) + for fn in files + if fn.lower().endswith(".png") } + except Exception as e: + logging.error(f"Error loading images from {files}: {e}") + return {} - if not no_poses: - logger.info("Attempting hand-eye calibation....") - # filtered_poses = np.array([np.linalg.inv(pose) for pose in filtered_poses]) - filtered_poses = np.array(filtered_poses) - # Use the filtered poses for the target-to-camera transformation - R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) - t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) - - R_handeye, T_handeye = cv2.calibrateHandEye( - R_gripper2base=R_gripper2base, - t_gripper2base=t_gripper2base, - R_target2cam=rmats_origin, - t_target2cam=tvecs_origin, - method=cv2.CALIB_HAND_EYE_DANIILIDIS, - ) - robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix - robot_to_cam[:3, :3] = R_handeye # Populate rotation - robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation - - # Invert the homogeneous matrix - cam_to_robot = np.linalg.inv(robot_to_cam) - - # Extract rotation and translation from the inverted matrix - camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation - camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation - logger.info("Hand-eye calibration completed.") - - # Add the hand-eye calibration results to the final result dictionary - result_dict["R_handeye"] = camera_to_robot_R - result_dict["T_handeye"] = camera_to_robot_T - return result_dict - else: - raise ValueError("Not enough valid points for stereo calibration.") - else: - raise ValueError("Not enough shared points for stereo calibration.") + # Initialize an empty dict to store images + images = dict() + # directories we care about here + parent_path = os.path.join(path, "parent") + child_path = os.path.join(path, "child") -def est_camera_t_charuco_board_center( - img: np.ndarray, - charuco_board: cv2.aruco_CharucoBoard, - aruco_dict: cv2.aruco_Dictionary, - camera_matrix: np.ndarray, - dist_coeffs: np.ndarray, -) -> Tuple[np.ndarray, np.ndarray]: - """ - Localizes the 6D pose of the checkerboard center using Charuco corners with OpenCV's solvePnP. + # load images from both directories + images["parent"] = load_images_from_dir(Path(parent_path)) + images["child"] = load_images_from_dir(Path(child_path)) - The board pose's translation should be at the center of the board, with the orientation in OpenCV format, - where the +Z points out of the board with the other axes being parallel to the sides of the board. + return images - Args: - img (np.ndarray): The input image containing the checkerboard. - charuco_board (cv2.aruco_CharucoBoard): The Charuco board configuration. - aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary used to detect markers. - camera_matrix (np.ndarray): The camera matrix from calibration. - dist_coeffs (np.ndarray): The distortion coefficients from calibration. - Returns: - Optional[Tuple[np.ndarray, np.ndarray]]: The rotation vector and translation vector - representing the 6D pose of the checkerboard center if found, else None. +def load_calibration_parameters(input_path: Path) -> CalibrationResults: """ - charuco_corners, charuco_ids = detect_charuco_corners(img, charuco_board, aruco_dict) - if charuco_corners is not None and charuco_ids is not None: - object_points = get_charuco_board_object_points(charuco_board, charuco_ids) - image_points = charuco_corners - - # Use solvePnP to estimate the pose of the Charuco board - success, rvec, tvec = cv2.solvePnP(object_points, np.array(image_points), camera_matrix, dist_coeffs) - - if success: - # Convert to the camera frame: Adjust the translation vector to be relative to the center of the board - x_trans = (charuco_board.getSquareLength() * charuco_board.getChessboardSize()[0]) / 2.0 - y_trans = (charuco_board.getSquareLength() * charuco_board.getChessboardSize()[1]) / 2.0 - center_trans = np.array([x_trans, y_trans, 0.0]).reshape((3, 1)) - rmat, _ = cv2.Rodrigues(rvec) - - tvec = tvec + rmat.dot(center_trans) - tvec_to_camera = tvec - return np.array(rmat), np.array(tvec_to_camera).ravel() - else: - raise ValueError("Pose estimation failed. You likely primed the robot too close to the board.") - else: - raise ValueError( - "Couldn't detect any Charuco boards in the image, " - "localization failed. Ensure the board is visible from the" - " primed pose." - ) - - -def convert_camera_t_viewpoint_to_origin_t_planning_frame( - origin_t_planning_frame: np.ndarray = np.eye(4), - planning_frame_t_opencv_camera: np.ndarray = np.eye(4), - opencv_camera_t_viewpoint: np.ndarray = np.eye(4), -) -> np.ndarray: - """ - Convert a camera viewpoint given in a camera frame, to the planning frame. - - Camera frame should be in OpenCV/ ROS format, where - +x should point to the right in the image - +y should point down in the image - +z should point into to plane of the image. - - Assuming that origin_t_planning_frame isn't aligned with this format, align it - so that the viewpoint can be localized as a hand pose command. + Load calibration parameters from a YAML file. Args: - origin_t_planning_frame (np.ndarray, optional): 4x4 homogenous transform - from origin to planning frame. Defaults to np.eye(4). - planning_frame_t_opencv_camera (np.ndarray, optional): 4x4 homogenous transform - from planning frame to the camera. Defaults to np.eye(4). - opencv_camera_t_viewpoint (np.ndarray, optional): 4x4 homogenous transform - from camera to the future viewpoint. Defaults to np.eye(4). - + input_path (Path): The path to the YAML file containing calibration parameters. Returns: - np.ndarray: 4x4 homogenous transform of the planning frame of a future viewpoint - expressed in the origin frame. + CalibrationResults: The loaded calibration parameters. + Throws: + FileNotFoundError: If the specified file does not exist. + KeyError: If required keys are missing in the YAML file. """ - origin_t_opencv_camera_pose = origin_t_planning_frame @ planning_frame_t_opencv_camera - origin_t_viewpoint = origin_t_opencv_camera_pose @ opencv_camera_t_viewpoint - origin_t_planning_frame_next = origin_t_viewpoint @ np.linalg.inv(planning_frame_t_opencv_camera) - return origin_t_planning_frame_next + with open(input_path, "r") as file: + calib_data = yaml.safe_load(file) + + parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) + parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) + parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) + child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) + child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) + child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) + R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) + T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) + + # saving out reproj err not supported, currently. + # save_calibration_parameters in calibration_utils.py in spot_wrapper + # does not save out reproj err. + # So we set it to 0 here. + calib_results: CalibrationResults = { + "camera_matrix_origin": parent_camera, + "dist_coeffs_origin": parent_dist_coeffs, + "image_dim_origin": parent_image_dim, + "camera_matrix_reference": child_camera, + "dist_coeffs_reference": child_dist_coeffs, + "image_dim_reference": child_image_dim, + "R": R, + "T": T, + "R_handeye": np.eye(3), + "T_handeye": np.zeros((3, 1)), + "average_reprojection_error": 0, + } + return calib_results -def get_relative_viewpoints_from_board_pose_and_param( - R_board: np.ndarray, - tvec: np.ndarray, - distances_x: Optional[np.ndarray] = None, - distances_z: Optional[np.ndarray] = None, - x_axis_rots: Optional[np.ndarray] = None, - y_axis_rots: Optional[np.ndarray] = None, - z_axis_rots: Optional[np.ndarray] = None, - R_align_board_frame_with_camera: Optional[np.ndarray] = None, - degree_offset_rotations: bool = True, -) -> List[np.ndarray]: - """ - Given the position of charuco board, sample viewpoints facing the calibration board for - the robot's "principal" camera to visit. When the robot moves to these viewpoints, - it takes synchronized photos with all cameras to use for solving the calibration. - - ROS and OpenCV have the same camera format, where: - # +x should point to the right in the image - # +y should point down in the image - # +z should point into to plane of the image. - All transforms to cameras are in this format. +def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo, CameraInfo]: + """ + load the data for images, hand_cam_info, ext_cam_info Args: - R_board (np.ndarray): the rotation of the calibration board in opencv notation relative - to the "principal" camera. - (See AutomaticCameraCalibration.localize_target_to_principal_camera) - tvec (np.ndarray): the translation of the calibration board in opencv notation relative to - the principal camera - distances (np.ndarray, optional): What distances - away from the calibration board's pose (along the Z axis) - to sample calibration viewpoints from. Defaults to np.arange(0.5, 0.8, 0.1). - x_axis_rots (np.ndarray, optional): What - x-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to np.arange(-21, 21, 5). - y_axis_rots (np.ndarray, optional): What - y-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to np.arange(-21, 21, 5). - z_axis_rots (np.ndarray, optional): What - z-axis rotations relative to the camera viewing the board orthogonally - to apply to sample viewpoints. Defaults to np.array([-21, 21, 5]). - R_align_board_frame_with_camera (np.ndarray, optional): By default, - the charuco pose rotation Z-axis points out of the pattern, - and into the camera. However, to sample viewpoints, the camera must face the board, - so viewpoints can't be sampled directly from the board pose as they would face - away from the board. - This rotates the board frame such that the Z axis points away - from the pattern, so that camera frames facing the board may be sampled - after applying a distance and offset rotation. - Defaults to np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]). - degree_offset_rotations (bool, optional): whether to use degree for the - sampling rotation parameters. Defaults to True. + pathdir (Path): The absolute pathname to directory containing the dataset. Returns: - List[np.ndarray]: a list of 4x4 homogenous transforms to visit in the "principal" cameras - frame + Tuple[np.ndarray, CameraInfo, CameraInfo]: The loaded images, hand camera info, and external camera info. """ - if distances_x is None: - distances_x = np.arange(0.3, 0.7, 0.1) - if distances_z is None: - distances_z = np.arange(-0.2, 0.3, 0.1) - if x_axis_rots is None: - x_axis_rots = np.arange(-20, 21, 5) - if y_axis_rots is None: - y_axis_rots = np.arange(-20, 21, 5) - if z_axis_rots is None: - z_axis_rots = np.arange(-20, 21, 5) - if R_align_board_frame_with_camera is None: - R_align_board_frame_with_camera = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) - - if degree_offset_rotations: - x_axis_rots = [radians(deg) for deg in x_axis_rots] - y_axis_rots = [radians(deg) for deg in y_axis_rots] - z_axis_rots = [radians(deg) for deg in z_axis_rots] - - translations = [(tvec + R_board[:, 2] * dist + R_board[:, 0] * d2) for dist in distances_z for d2 in distances_x] - R_base = R_board @ R_align_board_frame_with_camera - - def euler_to_rotation_matrix(roll: float, pitch: float, yaw: float) -> np.ndarray: - """ - Convert Euler angles to a rotation matrix using OpenCV, for the sake - of CLI convenience, conciseness, while not depending on an external transform lib. - """ - R_x = cv2.Rodrigues(np.array([roll, 0, 0]))[0] - R_y = cv2.Rodrigues(np.array([0, pitch, 0]))[0] - R_z = cv2.Rodrigues(np.array([0, 0, yaw]))[0] - return R_z @ R_y @ R_x - - poses = [] - for t in translations: - for x_axis_rot in x_axis_rots: - for y_axis_rot in y_axis_rots: - for z_axis_rot in z_axis_rots: - # Get the rotation matrix from Euler angles - R_rot = euler_to_rotation_matrix(x_axis_rot, y_axis_rot, z_axis_rot) - - # Combine with the base rotation - R_base_modified = R_base @ R_rot - - transform = np.eye(4) - transform[:-1, -1] = t.reshape((3,)) - transform[:-1, :-1] = R_base_modified - poses.append(transform) + images = load_images_from_path(pathdir) + hciyaml = os.path.join(pathdir, Path("parent"), Path("camera_info.yaml")) + eciyaml = os.path.join(pathdir, Path("child"), Path("camera_info.yaml")) + hand_cam_info = load_CameraInfo_from_file(Path(hciyaml)) + ext_cam_info = load_CameraInfo_from_file(Path(eciyaml)) - logger.info(f"Calculated {len(poses)} relative viewpoints to visit relative to the target.") - return poses + return images, hand_cam_info, ext_cam_info -def create_calibration_save_folders(path: str, num_folders: int) -> None: +def create_calibration_save_folders(path: Path) -> None: """ Create a folder hierarchy to record a calibration Args: - path (str): The parent path - num_folders (int): How many cameras, a.k.a how many folder to create + path (Path): The parent path Raises: ValueError: Not possible to create the folders, or no path specified """ if path is None: - raise ValueError("No path to save to :(") + raise ValueError("No path to save to. you can do better than this.") else: - for idx in range(num_folders): - cam_path = os.path.join(path, str(idx)) + for folder in directories: + cam_path = os.path.join(path, folder) logger.info(f"Creating image folder at {cam_path}") os.makedirs(cam_path, exist_ok=True) os.makedirs(os.path.join(path, "poses"), exist_ok=True) - logger.info(f"Done creating {num_folders} folders.") + logger.info("Done creating folders.") -def load_dataset_from_path(path: str) -> Tuple[np.ndarray, Optional[np.ndarray]]: +def save_dataset_to_dir( + path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] +) -> None: """ - Load image dataset from path in a way that's compatible with multistereo_calibration_charuco. + Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. - Also, load the poses if they are available. + Also, save the camera infos. See Using the CLI Tool To Calibrate On an Existing Dataset section in the README to see the expected folder/data structure for this method to work Args: path (str): The parent path - - Raises: - ValueError: Not possible to load the images - - Returns: - np.ndarray: The image dataset + images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index + camera_info_dict (dict[int, CameraInfo]): The camera info by camera index """ - def alpha_numeric(x): - return re.search("(\\d+)(?=\\D*$)", x).group() if re.search("(\\d+)(?=\\D*$)", x) else x - - # List all directories within the given path and sort them - dirs = [d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))] - if len(dirs) == 0: - raise ValueError("No sub-dirs found in datapath from which to load images.") - dirs = sorted(dirs, key=alpha_numeric) # Assuming dir names are integers like "0", "1", etc. + create_calibration_save_folders(path) - # Initialize an empty list to store images - images = [] - poses = None - - for dir_name in dirs: - path_match = os.path.join(path, dir_name, "*") - files = sorted( - glob(path_match), - key=alpha_numeric, - ) - if dir_name != "poses": - # Read images and store them - images.append([cv2.imread(fn) for fn in files]) - else: - poses = np.array([np.load(fn) for fn in files]) - - # Convert the list of lists into a NumPy array - # The array shape will be (number_of_images, number_of_directories) - images = np.array(images, dtype=object) - # Transpose the array so that you can access it as images[:, axis] - images = np.transpose(images, (1, 0)) - - return images, poses + for cam_idx, images in images_dict.items(): + cam_dir = path / Path(str(cam_idx)) + for img_idx, img in enumerate(images): + img_path = cam_dir / Path(f"{img_idx}.png") + cv2.imwrite(str(img_path), img) + # np.save(cam_dir / Path("camera_info.npy"), camera_info_dict[cam_idx]) + save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) +# moved from calibration_utils.py in spot_wrapper +# i wanted to change a few names of fields in accord with this: +# https://www.notion.so/theaiinstitute/SE3-Math-d81fc740b66c4a09bece9f47b62506f1 def save_calibration_parameters( data: Dict, output_path: str, num_images: int, tag: str, + parent_frame: str, + child_frame: str, parser_args: Optional[argparse.Namespace] = None, unsafe: bool = False, ) -> Dict: @@ -1095,11 +312,12 @@ def flatten_matrix(matrix: np.ndarray) -> List: def process_data_with_nested_dictionaries( data: Dict, ) -> Tuple[Dict, Dict]: - cameras = {} - relations = {} + cameras: Dict[int, Dict[str, List]] = {} + relations: Dict[int, Dict[int, Dict[str, List]]] = {} - for key, value in data.items(): - origin_cam, reference_cam = int(key.split("_")[0]), int(key.split("_")[1]) + for value in iter(data.values()): + origin_cam = 0 + reference_cam = 1 # Process origin camera data if origin_cam not in cameras: @@ -1126,11 +344,11 @@ def process_data_with_nested_dictionaries( } # Now add R_handeye and T_handeye if they exist in the data - if "R_handeye" in value and "T_handeye" in value: + """ if "R_handeye" in value and "T_handeye" in value: relations[origin_cam]["planning_frame"] = { "R": flatten_matrix(value["R_handeye"]), "T": flatten_matrix(value["T_handeye"]), - } + } """ return cameras, relations @@ -1180,28 +398,34 @@ def process_data_with_nested_dictionaries( # Process the new calibration data cameras, relations = process_data_with_nested_dictionaries(data) + xform = np.eye(4) + xform[:3, :3] = next(iter(data.values()))["R"] + xform[:3, 3] = next(iter(data.values()))["T"].flatten() + # Prepare the output data under the specified tag - tagged_data = { - "intrinsic": cameras, - "extrinsic": relations, - "run_param": {}, - } - tagged_data["run_param"]["num_images"] = num_images - tagged_data["run_param"]["timestamp"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + run_params: Dict[str, Any] = {} + run_params["num_images"] = num_images + run_params["timestamp"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Include parser parameters, excluding 'password' and 'username' if parser_args is not None: for arg in vars(parser_args): if arg not in ["password", "username", "result_path"]: - tagged_data["run_param"][arg] = getattr(parser_args, arg) + run_params[arg] = getattr(parser_args, arg) else: logger.warning("Saving calibration, but not the parameters used to obtain it.") - # Convert any tuples in run_param (like stereo_pairs) to lists - if "stereo_pairs" in tagged_data["run_param"]: - tagged_data["run_param"]["stereo_pairs"] = [list(pair) for pair in tagged_data["run_param"]["stereo_pairs"]] + # Convert any tuples in run_params (like stereo_pairs) to lists + if "stereo_pairs" in run_params: + run_params["stereo_pairs"] = [list(pair) for pair in run_params["stereo_pairs"]] # Save the updated data under the specified tag + tagged_data = { + "intrinsic": cameras, + "extrinsic": relations, + "run_params": run_params, + f"{child_frame}_t_{parent_frame}": flatten_matrix(xform), + } existing_data[tag] = tagged_data # Ensure the directory exists @@ -1219,80 +443,176 @@ def process_data_with_nested_dictionaries( return existing_data -def charuco_pose_sanity_check( - img: np.ndarray, charuco_board: cv2.aruco_CharucoBoard, aruco_dict: cv2.aruco_Dictionary -) -> np.ndarray: +def get_multiple_perspective_camera_calibration_dataset( + auto_cam_cal_robot: AutomaticCameraCalibrationRobot, + max_num_images: int = 10000, + distances_x: Optional[np.ndarray] = None, + distances_z: Optional[np.ndarray] = None, + x_axis_rots: Optional[np.ndarray] = None, + y_axis_rots: Optional[np.ndarray] = None, + z_axis_rots: Optional[np.ndarray] = None, + use_degrees: bool = True, + settle_time: float = 0.1, + data_path: str = os.path.expanduser("~"), + save_data: Optional[bool] = True, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Move the robot to multiple viewpoints and capture time-synchronized images from all cameras + at each viewpoint for use in calibration. + + Args: + auto_cam_cal_robot (AutomaticCameraCalibrationRobot): The robot to automatically calibrate. + max_num_images (int, optional): Maximum number of captures before cutting off the + viewpoint sweep early. Defaults to 10000. + distances_x (np.ndarray, optional): X-axis distances from the board to sample. + Defaults to None (uses charuco_board_detection defaults). + distances_z (np.ndarray, optional): Z-axis distances from the board to sample. + Defaults to None (uses charuco_board_detection defaults). + x_axis_rots (np.ndarray, optional): X-axis rotations to sample. Defaults to None. + y_axis_rots (np.ndarray, optional): Y-axis rotations to sample. Defaults to None. + z_axis_rots (np.ndarray, optional): Z-axis rotations to sample. Defaults to None. + use_degrees (bool, optional): Whether rotation parameters are in degrees. Defaults to True. + settle_time (float, optional): Seconds to wait after moving before capturing. + Defaults to 0.1. + data_path (str, optional): Directory to save the dataset if save_data is True. + Defaults to the user home directory. + save_data (Optional[bool], optional): Whether to save captured images and poses. + Defaults to True. + + Returns: + Tuple[np.ndarray, np.ndarray]: Array of captured image sets and array of robot poses. """ - This method: - 1. Defines the camera matrix as identity. - 2. Uses zero distortion coefficients. - 3. Estimates the Charuco board pose using `est_camera_t_charuco_board_center`. - 4. Transforms the pose into the camera frame. - 5. Visualizes the pose with 3D axes on the image using cv2 window. + primed_pose = auto_cam_cal_robot.move_cameras_to_see_calibration_target() + logger.info("Primed arm...") + sleep(settle_time) + images = auto_cam_cal_robot.capture_images() + + R_vision_to_target, tvec_vision_to_target = auto_cam_cal_robot.localize_target_to_principal_camera(images) + viewpoints = get_relative_viewpoints_from_board_pose_and_param( + R_vision_to_target, + tvec_vision_to_target, + distances_x=distances_x, + distances_z=distances_z, + x_axis_rots=x_axis_rots, + y_axis_rots=y_axis_rots, + z_axis_rots=z_axis_rots, + degree_offset_rotations=use_degrees, + ) + calibration_images = [] + logger.info("Beginning Calibration") + idx = 0 + poses = [] + while idx < max_num_images and idx < len(viewpoints): + logger.info(f"Visiting viewpoint {idx + 1} of {min(len(viewpoints), max_num_images)}") + viewpoint = viewpoints[idx] + _initial_pose, new_pose = auto_cam_cal_robot.offset_cameras_from_current_view( + transform_offset=viewpoint, + origin_t_planning_frame=primed_pose, + duration_sec=0.1, + ) + poses.append(new_pose) + logger.info("At viewpoint, waiting to settle") + sleep(settle_time) + images = auto_cam_cal_robot.capture_images() + logger.info("Snapped pics ;)") + calibration_images.append(images) + idx = len(calibration_images) + if save_data: + if idx == 1: + # Create numerical camera-index folders + for jdx in range(len(images)): + os.makedirs(os.path.join(data_path, str(jdx)), exist_ok=True) + os.makedirs(os.path.join(data_path, "poses"), exist_ok=True) + logger.info(f"Saving image batch {idx}") + for jdx, image in enumerate(images): + cv2.imwrite( + os.path.join(data_path, str(jdx), f"{idx}.png"), + image, + ) + np.save(os.path.join(data_path, "poses", f"{idx}.npy"), new_pose) + return (np.array(calibration_images, dtype=object), poses) + + +def camera_info_to_intrinsics(msg: CameraInfo) -> CameraIntrinsics: + """Construct a `CameraIntrinsics` instance from a `CameraInfo` message. Args: - img (np.ndarray): The input image containing the Charuco board. - charuco_board (cv2.aruco_CharucoBoard): The Charuco board configuration. - aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary used to detect markers. + msg: The message to convert. The fields that are copied are the intrinsics (`k`), the distortion (`d`), width, + and height. All other fields are ignored. Returns: - img_with_axes (np.ndarray): The image with the pose axes drawn. + The converted camera intrinsics object. + + Raises: + ValueError if `k` contains non-zero skew values. """ + camera_matrix = np.array(msg.k).reshape((3, 3)) + distortion_coeffs = np.array(msg.d) + return CameraIntrinsics( + height=msg.height, width=msg.width, camera_matrix=camera_matrix, distortion_coeffs=distortion_coeffs + ) - def is_z_axis_out_of_board(tvec): - """Determine if the Z-axis points out of the Charuco board (towards the camera).""" - return tvec[2] > 0 # If Z is positive, it points out of the board - - def visualize_pose_with_axis(img, rmat, tvec, camera_matrix, dist_coeffs, axis_length=0.115): - """Draws the 3D pose axes on the image and displays if the Z-axis is out or into the board.""" - axis = np.float32([[axis_length, 0, 0], [0, axis_length, 0], [0, 0, axis_length], [0, 0, 0]]).reshape(-1, 3) - - rmat_camera, tvec_camera = rmat, tvec - imgpts, _ = cv2.projectPoints(axis, rmat_camera, tvec_camera, camera_matrix, dist_coeffs) - - z_out_of_board = is_z_axis_out_of_board(tvec) - img_with_axes = img.copy() - origin = tuple(imgpts[3].ravel().astype(int)) - img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[0].ravel().astype(int)), (0, 0, 255), 3) # X - img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[1].ravel().astype(int)), (0, 255, 0), 3) # Y - img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[2].ravel().astype(int)), (255, 0, 0), 3) # Z - - if not z_out_of_board: - logger.warning("This tool assumes that Z axis is out of board, but it was detected as into board.") - window_name = f'Charuco Board Pose ({"Z-axis out of board" if z_out_of_board else "Z-axis into board"})' - cv2.imshow(window_name, img_with_axes) - elapsed_time = 0 - wait_interval = 100 # Wait 100 ms in each loop iteration - max_wait_time = 20 # 20 sec - while elapsed_time < max_wait_time * 1000: # Convert max_wait_time to milliseconds - if cv2.getWindowProperty(window_name, cv2.WND_PROP_VISIBLE) < 1: - break # Exit loop if window is closed - key = cv2.waitKey(wait_interval) # Check every 100ms - if key != -1: # If any key is pressed - break - elapsed_time += wait_interval - - cv2.destroyAllWindows() - - return img_with_axes - - # Camera matrix as identity and zero distortion coefficients - camera_matrix = np.eye(3, dtype=np.float32) - dist_coeffs = np.zeros(5, dtype=np.float32) - - # Estimate pose using the existing est_camera_t_charuco_board_center - rmat, tvec = est_camera_t_charuco_board_center(img, charuco_board, aruco_dict, camera_matrix, dist_coeffs) - - # Visualize the pose with 3D axes - return visualize_pose_with_axis(img, rmat, tvec, camera_matrix, dist_coeffs) - - -def create_ideal_charuco_image(charuco_board: cv2.aruco_CharucoBoard, dim=(500, 700), colorful=False): - if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: - img = charuco_board.draw(outSize=dim) - else: - img = charuco_board.generateImage(outSize=dim) - if colorful: - return cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) - else: - return img + +def ros_image_to_image(ros_image: RosImage, cv_bridge: CvBridge | None = None, ros_encoding: str = "rgb8") -> Image: + """ + Converts from ros image to our generic image datatype + """ + if cv_bridge is None: + cv_bridge = CvBridge() + + img = cv_bridge.imgmsg_to_cv2(ros_image, desired_encoding=ros_encoding) + + return Image.from_numpy(img) + + +def calibration_helper( + images: Union[List[np.ndarray], np.ndarray], + args: argparse.Namespace, + charuco: cv2.aruco_CharucoBoard, + aruco_dict: cv2.aruco_Dictionary, + poses: np.ndarray, + result_path: str = None, + parent_frame: str = "body", + child_frame: str = "camera", +) -> dict: + logger.warning( + f"Calibrating from {len(images)} images.. for every " + f"{args.photo_utilization_ratio} recorded photos 1 is used to calibrate" + ) + if not args.allow_default_internal_corner_ordering: + logger.warning("Turning off corner swap (needed for localization) for calibration solution...") + logger.warning("Corner swap needed for initial localization, but breaks calibration.") + logger.warning("See https://github.com/opencv/opencv/issues/26126") + detect_charuco_corners( + create_ideal_charuco_image(charuco_board=charuco), + charuco_board=charuco, + aruco_dict=aruco_dict, + enforce_ascending_ids_from_bottom_left_corner=False, + ) + calibration = multistereo_calibration_charuco( + images[:: args.photo_utilization_ratio], + desired_stereo_pairs=args.stereo_pairs, + charuco_board=charuco, + aruco_dict=aruco_dict, + poses=poses, + ) + logger.info(f"Finished script, obtained {calibration}") + logger.info("Saving calibration param...") + + if result_path is None: + result_path = input("Please provide a path to save the calibration results (or type 'No' to skip): ") + + args.result_path = result_path + + # Save the calibration parameters if a valid result path is provided + calibration_dict = save_calibration_parameters( + data=calibration, + output_path=args.result_path, + num_images=len(images[:: args.photo_utilization_ratio]), + tag=args.tag, + parent_frame=parent_frame, + child_frame=child_frame, + parser_args=args, + unsafe=args.unsafe_tag_save, + ) + return calibration_dict diff --git a/spot_wrapper/calibration/charuco_board_detection.py b/spot_wrapper/calibration/charuco_board_detection.py new file mode 100644 index 00000000..65b206c5 --- /dev/null +++ b/spot_wrapper/calibration/charuco_board_detection.py @@ -0,0 +1,949 @@ +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. + +# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. + +import logging +from copy import deepcopy +from math import radians +from typing import Dict, List, Optional, Tuple, Union + +import cv2 +import numpy as np + +logging.basicConfig( + level=logging.INFO, +) + +logger = logging.getLogger(__name__) + +SPOT_DEFAULT_ARUCO_DICT = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50) +OPENCV_VERSION = tuple(map(int, cv2.__version__.split("."))) +OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION = (4, 7, 0) + + +def create_charuco_board( + num_checkers_width: int, + num_checkers_height: int, + checker_dim: float, + marker_dim: float, + aruco_dict: cv2.aruco_Dictionary, + legacy: bool = True, +) -> cv2.aruco_CharucoBoard: + """ + Create a Charuco board using the provided parameters and Aruco dictionary. + Issues a deprecation warning if using the older 'CharucoBoard_create' method. + + Args: + num_checkers_width (int): Number of checkers along the width of the board. + num_checkers_height (int): Number of checkers along the height of the board. + checker_dim (float): Size of the checker squares. + marker_dim (float): Size of the Aruco marker squares. + aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary to use for marker generation. + + Returns: + charuco (cv2.aruco_CharucoBoard): The generated Charuco board. + """ + + opencv_version = tuple(map(int, cv2.__version__.split("."))) + + if opencv_version < (4, 7, 0): + logger.warning( + ( + "Creating Charuco Board..." + "You're using an older version of OpenCV requires the additional OpenCV Modules. " + "This will not work without the additional modules (opencv-contrib-python). " + "Consider upgrading to OpenCV >= 4.7.0 to enable " + "the use of this tool with base opencv (opencv-python) " + "without relying on additional modules." + ), + ) + + # Create Charuco board using the older method + charuco = cv2.aruco.CharucoBoard_create( + num_checkers_width, + num_checkers_height, + checker_dim, + marker_dim, + aruco_dict, + ) + + else: + # Create Charuco board using the newer method + charuco = cv2.aruco_CharucoBoard( + (num_checkers_width, num_checkers_height), + checker_dim, + marker_dim, + aruco_dict, + ) + charuco.setLegacyPattern(legacy) + + return charuco + + +SPOT_DEFAULT_CHARUCO = create_charuco_board( + num_checkers_width=9, + num_checkers_height=4, + checker_dim=0.115, + marker_dim=0.09, + aruco_dict=SPOT_DEFAULT_ARUCO_DICT, + legacy=True, +) + +def multistereo_calibration_charuco( + images: np.ndarray, + desired_stereo_pairs: Optional[List[Tuple[int, int]]] = None, + charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, + aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, + camera_matrices: Optional[Dict] = {}, + dist_coeffs: Optional[Dict] = {}, + poses: Union[np.ndarray, None] = None, +) -> Dict: + """ + Calibrates the intrinsic and extrinsic parameters for multiple stereo camera pairs + using Charuco board markers. This function performs stereo calibration on a series of + time-synchronized images + captured by multiple cameras. It uses a Charuco board for calibration and returns + the calibrated intrinsic and extrinsic parameters for the specified stereo camera pairs. + + Args: + images (np.ndarray): A 2D array of time-synchronized camera captures. The array should + be of shape (n, m), where 'n' represents the number of time-synchronized captures + and 'm' represents the number of cameras. Each internal list corresponds to a set + of images captured at the same time across multiple cameras. + + desired_stereo_pairs (Optional[List[Tuple[int, int]]], optional): A list of tuples + specifying the pairs of camera indices to be calibrated. The indices refer to the + cameras based on their positions in the `images` array. For example, index `0` + corresponds to the first camera, index `1` to the second camera, and so forth, + up to `n-1` (where `n` is the number of cameras). + If None, all cameras are registered to the first camera (index `0`). Defaults to None. + + The relationship between `desired_stereo_pairs` and `images` is crucial. Each tuple in + `desired_stereo_pairs` should be in the form + `(origin_camera_idx, reference_camera_idx)`, + where `origin_camera_idx` and `reference_camera_idx` correspond to the + indices of the cameras in the `images` array. For each stereo pair, + the function attempts to calibrate the `origin_camera_idx` + relative to the `reference_camera_idx`. If no pairs are provided, + the function defaults to registering + all cameras to the first camera (index `0`). + + charuco_board (cv2.aruco_CharucoBoard, optional): The Charuco board configuration used + for calibration. Defaults to SPOT_DEFAULT_CHARUCO. + + aruco_dict (cv2.aruco_Dictionary, optional): The Aruco dictionary used + to detect the markers. Defaults to SPOT_DEFAULT_ARUCO_DICT. + + camera_matrices (Optional[Dict], optional): A dictionary mapping camera indices + to their existing camera matrices. If a matrix is not provided for a camera, + it will be computed during calibration + if that camera is part of a stereo pair. Defaults to an empty dictionary. + + dist_coeffs (Optional[Dict], optional): A dictionary mapping camera indices + to their distortion coefficients. + If coefficients are not provided for a camera, they will be computed during calibration. + Defaults to an empty dictionary. + poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which + pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. + (planning frame to base frame), or None + + Raises: + ValueError: Raised if fewer than two cameras are provided, as stereo calibration + requires at least two cameras. + ValueError: Raised if a stereo pair includes the same camera twice. + ValueError: Raised if a camera in a stereo pair cannot be calibrated. + + Returns: + Dict: A dictionary containing the calibrated intrinsic and extrinsic parameters + for the specified stereo pairs. + The keys are formatted as "origin_camera_idx_reference_camera_idx", + and the values contain the calibration data. + + """ + calibration = {} + num_cams = images.shape[1] + if num_cams == 0 or num_cams == 1: + raise ValueError("Stereo Madness requires more than one camera. Use the monocular method instead.") + else: # More than one camera - Multi-Stereo Madness ;p + for idx in range(num_cams): + if idx not in camera_matrices: + camera_matrices[idx] = None + if idx not in dist_coeffs: + dist_coeffs[idx] = None + + if desired_stereo_pairs is None: + desired_stereo_pairs = [] + for idx in range(1, num_cams): + """ + Desired stereo pairs is a mapping, where the 0th index is the origin (base) + frame, where the reference frame is defined within the origin (base) frame's + coordinate system. The reference frame is the 1st index. + + By Default, if desired_stereo_pairs is None, + then each camera's intrinsic is defined in the 0th camera's frame, + so this is set as the origin for all stereo calibrations + """ + desired_stereo_pairs.append((0, idx)) # just register all in 0th cam frame + + for idx, pair in enumerate(desired_stereo_pairs): + for index in pair: + if index < 0 or index > num_cams: + raise ValueError( + f"Requested to stereo register non-existent camera index {index} out of {num_cams}" + ) + origin_camera_idx = pair[0] + reference_camera_idx = pair[1] + + if pair[0] == pair[1]: + logging.warning( + "You requested to register a camera to itself. Ignoring,and continuing to the next stereo pair." + ) + continue + logging.info( + f"Attempting to register {origin_camera_idx}" + f"to {reference_camera_idx}, pair {idx} of" + f" {len(desired_stereo_pairs)}" + ) + try: + key = str(origin_camera_idx) + "_" + str(reference_camera_idx) + stereo_dict = stereo_calibration_charuco( + origin_images=images[:, origin_camera_idx], + reference_images=images[:, reference_camera_idx], + charuco_board=charuco_board, + aruco_dict=aruco_dict, + camera_matrix_origin=camera_matrices[origin_camera_idx], + dist_coeffs_origin=dist_coeffs[origin_camera_idx], + camera_matrix_reference=camera_matrices[reference_camera_idx], + dist_coeffs_reference=dist_coeffs[reference_camera_idx], + poses=poses, + ) + camera_matrices[origin_camera_idx] = stereo_dict["camera_matrix_origin"] + dist_coeffs[origin_camera_idx] = stereo_dict["dist_coeffs_origin"] + camera_matrices[reference_camera_idx] = stereo_dict["camera_matrix_reference"] + dist_coeffs[reference_camera_idx] = stereo_dict["dist_coeffs_reference"] + + calibration[key] = stereo_dict + except ValueError as ve: # maybe could keep going here instead for partial calibration? + raise ValueError(f"Failed to calibrate pair {pair} : {ve}") + + return calibration + + +def get_correlation_map_to_enforce_ascending_ids_from_bottom_left_corner( + all_charuco_corners: List, + all_charuco_ids: List, + charuco_board: cv2.aruco_CharucoBoard, +) -> List: + """ + This is needed only for OpenCV versions > 4.7.0. Basically, + this determines the needed correlation + to ensure that internal corners are numbered left to right bottom + up as opposed to left to right top down. + + For more info see https://github.com/opencv/opencv/issues/26126 + + Args: + all_charuco_corners (List): All charuco corners of the board. + all_charuco_ids (List): All charuco ids of the board, as returned by the Charuco detector + for an ideal board with a full view. Expected to be a list ascending + from 0 but could be otherwise + charuco_board (cv2.aruco_CharucoBoard): the charuco board to utilize + to construct the correlation map + + Returns: + List: the correlation map where the indicies represent to the original + ordering of corners, and the values at each index represent the new + ordering index of a corner. Can be used to ensure bottom up + left to right ordering of internal corners. + """ + + num_checker_width, num_checker_height = charuco_board.getChessboardSize() + num_interior_corners = (num_checker_width - 1) * (num_checker_height - 1) + correlation_map = np.array([i for i in range(num_interior_corners)]) + # Adjust indexing to account for nested arrays + first_corner_height = all_charuco_corners[all_charuco_ids[0][0]][0][1] + last_corner_height = all_charuco_corners[all_charuco_ids[-1][0]][0][1] + row_num_a = 0 + row_num_b = num_checker_height - 2 + + if first_corner_height < last_corner_height: + logger.warning( + "Detected Charuco Configuration " + "where internal corners (detected checker corners) are numbered top down, " + "(left to right) as opposed to bottom up (left to right). " + "Enforcing bottom up numbering instead. " + "This ensures that the Z axis points out of the board " + "As opposed to -180 degrees about the X axis " + "relative to the Z out of the board" + ) + else: + logger.warning( + "You have selected to enforce ascending ids from the bottom left corner " + "But it seems as if your ids are already in that form " + "Returning identity correlation map" + ) + return [int(mapping) for mapping in correlation_map] + + while row_num_a < row_num_b: + row_num_a_correlation_slice = slice( + row_num_a * (num_checker_width - 1), (row_num_a * (num_checker_width - 1) + (num_checker_width - 1)), 1 + ) + + row_num_b_correlation_slice = slice( + row_num_b * (num_checker_width - 1), ((row_num_b * (num_checker_width - 1)) + (num_checker_width - 1)), 1 + ) + # Record A + precopy_row_a = deepcopy(correlation_map[row_num_a_correlation_slice]) + # Copy B onto A + correlation_map[row_num_a_correlation_slice] = correlation_map[row_num_b_correlation_slice] + # copy old A into B + correlation_map[row_num_b_correlation_slice] = precopy_row_a + row_num_a += 1 + row_num_b -= 1 + return [int(mapping) for mapping in correlation_map] + + +def detect_charuco_corners( + img: np.ndarray, + charuco_board: cv2.aruco_CharucoBoard, + aruco_dict: cv2.aruco_Dictionary, + enforce_ascending_ids_from_bottom_left_corner: Union[bool, None] = None, +) -> Tuple[Optional[np.ndarray], Optional[np.ndarray]]: + """ + Detect the Charuco Corners and their IDs in an image, with support for OpenCV versions before and after 4.7.0. + + Args: + img (np.ndarray): The image to find Charuco corners in. + charuco_board (cv2.aruco_CharucoBoard, optional): The Charuco board to look for. + Defaults to SPOT_DEFAULT_CHARUCO. + aruco_dict (cv2.aruco_Dictionary, optional): The Aruco dictionary to use. + Defaults to SPOT_DEFAULT_ARUCO_DICT. + enforce_ascending_ids_from_bottom_left_corner (Union[bool, None]): whether to + ensure that internal charuco corners are numbered left to right bottom up + (only ensures bottom up, assumes already left to right). + + Returns: + Tuple[Optional[np.ndarray], Optional[np.ndarray]]: The detected corners and their IDs, + or (None, None) if not found. + """ + # Convert the image to grayscale if it's not already + if len(img.shape) == 2: + gray = img + else: + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: + # Older OpenCV version + corners, ids, _ = cv2.aruco.detectMarkers(gray, aruco_dict) + if ids is not None: + _, charuco_corners, charuco_ids = cv2.aruco.interpolateCornersCharuco( + corners, ids, gray, charuco_board, minMarkers=0 + ) + return charuco_corners, charuco_ids + else: + return None, None + else: + # Newer OpenCV version with charuco_detector + detector_params = cv2.aruco.CharucoParameters() + detector_params.minMarkers = 0 + detector_params.tryRefineMarkers = True + charuco_detector = cv2.aruco.CharucoDetector(charuco_board, detector_params) + charuco_detector.setBoard(charuco_board) + charuco_corners, charuco_ids, _, _ = charuco_detector.detectBoard(gray) + + if charuco_ids is None: + return None, None + + enforce_ids = enforce_ascending_ids_from_bottom_left_corner + if enforce_ids is not None and hasattr(detect_charuco_corners, "enforce_ids"): + logger.warning( + "Previously, for detecting internal charuco corners, the enforce " + "ascending from bottom left corner id policy was set to: " + f"{detect_charuco_corners.enforce_ids}" + f"it will now be set to {enforce_ids}" + ) + detect_charuco_corners.enforce_ids = enforce_ids + elif enforce_ids is not None and not hasattr(detect_charuco_corners, "enforce_ids"): + logger.warning( + "For detecting charuco corners, the enforce " + "ascending from bottom left corner id policy is set to: " + f"{enforce_ids}. For this call, and future calls until set otherwise." + ) + detect_charuco_corners.enforce_ids = enforce_ids + + # Create the identity correlation map... + num_checker_width, num_checker_height = charuco_board.getChessboardSize() + num_interior_corners = (num_checker_width - 1) * (num_checker_height - 1) + correlation_map = np.array([i for i in range(num_interior_corners)]) + + if ( + hasattr(detect_charuco_corners, "enforce_ids") + and detect_charuco_corners.enforce_ids + and not hasattr(detect_charuco_corners, "corr_map") + ): # correlation map not computed + ideal_charuco = charuco_board.generateImage(outSize=(1000, 1000)) + all_charuco_corners, all_charuco_ids, _, _ = charuco_detector.detectBoard(ideal_charuco) + + detect_charuco_corners.corr_map = get_correlation_map_to_enforce_ascending_ids_from_bottom_left_corner( + all_charuco_corners, all_charuco_ids, charuco_board + ) + + if ( + hasattr(detect_charuco_corners, "enforce_ids") + and detect_charuco_corners.enforce_ids + and hasattr(detect_charuco_corners, "corr_map") + ): # correlation map computed + logger.warning("Using cached correlation map to order IDs") + correlation_map = detect_charuco_corners.corr_map # grab the map + + reworked_charuco_ids = [] + for charuco_id in charuco_ids: + reworked_charuco_ids.append([correlation_map[charuco_id[0]]]) + + return charuco_corners, reworked_charuco_ids + + +def get_charuco_board_object_points( + charuco_board: cv2.aruco_CharucoBoard, + corners_ids: Union[List, np.ndarray], +) -> np.ndarray: + """ + From a charuco board, and corner ids, generate the object points for use + in a calibration sequence to solve the perspective and point problem. + + Args: + charuco_board (cv2.aruco_CharucoBoard): the charuco board + to generate object points for + corners_ids (Union[List, np.ndarray]): which corner ids to generate points for + + Returns: + np.ndarray: the object points + """ + if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: + corners = charuco_board.chessboardCorners + else: + corners = charuco_board.getChessboardCorners() + object_points = [] + for idx in corners_ids: + object_points.append(corners[idx]) + return np.array(object_points, dtype=np.float32) + + +def calibrate_single_camera_charuco( + images: List[np.ndarray], + charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, + aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, + debug_str: str = "", +) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """ + Calibrate a monocular camera from a charuco board. + + Args: + images (List[np.ndarray]): A list of images of the board + charuco_board (cv2.aruco_CharucoBoard, optional): which + board to look for in the images. Defaults to SPOT_DEFAULT_CHARUCO. + aruco_dict (cv2.aruco_Dictionary, optional): which + aruco dict to look for in the images. Defaults to SPOT_DEFAULT_ARUCO_DICT. + debug_str (str, optional): what to add to the logging + to differentiate this monocular calibration from others, for the sake + of stereo calibrations. Defaults to "". + + Raises: + ValueError: Not enough data to calibrate + + Returns: + Tuple[np.ndarray, np.ndarray]: the camera matrix, distortion coefficients, + rotation matrices, tvecs + """ + all_corners = [] + all_ids = [] + img_size = None + for idx, img in enumerate(images): + if img_size is None: + img_size = img.shape[:2][::-1] + + charuco_corners, charuco_ids = detect_charuco_corners(img, charuco_board, aruco_dict) + + if charuco_corners is not None and len(charuco_corners) >= 8: + all_corners.append(charuco_corners) + all_ids.append(charuco_ids) + else: + logger.warning(f"Not enough corners detected in image index {idx} {debug_str}; ignoring") + + if len(all_corners) > 1: + obj_points_all = [] + img_points = [] + ids = [] + for corners, ids in zip(all_corners, all_ids): + obj_points = get_charuco_board_object_points(charuco_board, ids) + obj_points_all.append(obj_points) + img_points.append(corners) + logger.info(f"About to process {len(obj_points_all)} points for single camera cal | {debug_str}") + # Long term improvement: could pull tvec for use to localize camera relative to robot? + _, camera_matrix, dist_coeffs, rvecs, tvecs = cv2.calibrateCamera( # use LU flag critical for speed + obj_points_all, + img_points, + img_size, + None, + None, + flags=cv2.CALIB_USE_LU, + ) + rmats = np.array([cv2.Rodrigues(rvec)[0] for rvec in rvecs]) + return camera_matrix, dist_coeffs, rmats, tvecs + else: + raise ValueError(f"Not enough valid points to individually calibrate {debug_str}") + + +def stereo_calibration_charuco( + origin_images: List[np.ndarray], + reference_images: List[np.ndarray], + charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, + aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, + camera_matrix_origin: Optional[np.ndarray] = None, + dist_coeffs_origin: Optional[np.ndarray] = None, + camera_matrix_reference: Optional[np.ndarray] = None, + dist_coeffs_reference: Optional[np.ndarray] = None, + poses: Union[np.ndarray, None] = None, +) -> Dict: + """ + Perform a stereo calibration from a set of synchronized stereo images of a charuco calibration + board. + + Args: + origin_images (List[np.ndarray]): A list of images synced to reference images from camera 0 + reference_images (List[np.ndarray]): A list of images synced to origin images from camera 1 + charuco_board (cv2.aruco_CharucoBoard, optional): What charuco board to + use for the cal. Defaults to SPOT_DEFAULT_CHARUCO. + aruco_dict (cv2.aruco_Dictionary, optional): What aruco board to use for the cal. + Defaults to SPOT_DEFAULT_ARUCO_DICT. + camera_matrix_origin (Optional[np.ndarray], optional): What camera + matrix to assign to camera 0. If none, is computed. Defaults to None. + dist_coeffs_origin (Optional[np.ndarray], optional): What distortion coefficients + to assign to camera 0. If None, is computed. Defaults to None. + camera_matrix_reference (Optional[np.ndarray], optional): What camera + matrix to assign to camera 1. If none, is computed. . Defaults to None. + dist_coeffs_reference (Optional[np.ndarray], optional): What distortion coefficients + to assign to camera 1. If None, is computed. Defaults to None. + poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which + pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. + (planning frame to base frame), or None + Raises: + ValueError: Could not calibrate a camera individually + ValueError: Not enough points to stereo calibrate + ValueError: Could not stereo calibrate + + Returns: + Dict: _description_ + """ + if camera_matrix_origin is None or dist_coeffs_origin is None: + logger.info("Calibrating Origin Camera individually") + (camera_matrix_origin, dist_coeffs_origin, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( + images=origin_images, + charuco_board=charuco_board, + aruco_dict=aruco_dict, + debug_str="for origin camera", + ) + if camera_matrix_reference is None or dist_coeffs_origin is None: + logger.info("Calibrating reference Camera individually ") + (camera_matrix_reference, dist_coeffs_reference, rmats_reference, tvecs_reference) = ( + calibrate_single_camera_charuco( + images=reference_images, + charuco_board=charuco_board, + aruco_dict=aruco_dict, + debug_str="for reference camera", + ) + ) + + if camera_matrix_origin is None or camera_matrix_reference is None: + raise ValueError("Could not calibrate one of the cameras as desired") + + all_corners_origin = [] + all_corners_reference = [] + all_ids_origin = [] + all_ids_reference = [] + img_size = None + + no_poses = poses is None + if no_poses: + poses = [x for x in range(len(origin_images))] + else: + filtered_poses = [] + + no_poses = poses is None + if no_poses: # fill up poses with dummy values so that you can iterate over poses + # with images zip(origin_images, reference_images, poses) together regardless of if poses + # are actually supplied (for the sake of brevity) + poses = [x for x in range(len(origin_images))] + else: + filtered_poses = [] + + for origin_img, reference_img, pose in zip(origin_images, reference_images, poses): + if img_size is None: + img_size = origin_img.shape[:2][::-1] + + origin_charuco_corners, origin_charuco_ids = detect_charuco_corners(origin_img, charuco_board, aruco_dict) + reference_charuco_corners, reference_charuco_ids = detect_charuco_corners( + reference_img, charuco_board, aruco_dict + ) + + if not no_poses: + filtered_poses.append(pose) + if origin_charuco_corners is not None and reference_charuco_corners is not None: + all_corners_origin.append(origin_charuco_corners) + all_corners_reference.append(reference_charuco_corners) + all_ids_origin.append(origin_charuco_ids) + all_ids_reference.append(reference_charuco_ids) + + if len(all_corners_origin) > 0: + obj_points_all = [] + img_points_origin = [] + img_points_reference = [] + for ( + origin_corners, + reference_corners, + origin_ids, + reference_ids, + ) in zip( + all_corners_origin, + all_corners_reference, + all_ids_origin, + all_ids_reference, + ): + common_ids = np.intersect1d(origin_ids, reference_ids) + if len(common_ids) >= 6: # Ensure there are at least 6 points + obj_points = get_charuco_board_object_points(charuco_board, common_ids) + obj_points_all.append(obj_points) + img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) + img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) + + if len(obj_points_all) > 0: + logger.info(f"Collected {len(obj_points_all)} shared point sets for stereo calibration.") + _, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( + obj_points_all, + img_points_origin, + img_points_reference, + camera_matrix_origin, + dist_coeffs_origin, + camera_matrix_reference, + dist_coeffs_reference, + img_size, + criteria=( + cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, + 100, + 1e-6, + ), + flags=cv2.CALIB_USE_LU, + ) + logger.info("Stereo calibration completed.") + result_dict = { + "dist_coeffs_origin": dist_coeffs_origin, + "camera_matrix_origin": camera_matrix_origin, + "image_dim_origin": np.array(origin_images[0].shape[:2]), + "dist_coeffs_reference": dist_coeffs_reference, + "camera_matrix_reference": camera_matrix_reference, + "image_dim_reference": np.array(reference_images[0].shape[:2]), + "R": R, + "T": T, + } + + if not no_poses: + logger.info("Attempting hand-eye calibation....") + # filtered_poses = np.array([np.linalg.inv(pose) for pose in filtered_poses]) + filtered_poses = np.array(filtered_poses) + # Use the filtered poses for the target-to-camera transformation + R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) + t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) + + R_handeye, T_handeye = cv2.calibrateHandEye( + R_gripper2base=R_gripper2base, + t_gripper2base=t_gripper2base, + R_target2cam=rmats_origin, + t_target2cam=tvecs_origin, + method=cv2.CALIB_HAND_EYE_DANIILIDIS, + ) + robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix + robot_to_cam[:3, :3] = R_handeye # Populate rotation + robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation + + # Invert the homogeneous matrix + cam_to_robot = np.linalg.inv(robot_to_cam) + + # Extract rotation and translation from the inverted matrix + camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation + camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation + logger.info("Hand-eye calibration completed.") + + # Add the hand-eye calibration results to the final result dictionary + result_dict["R_handeye"] = camera_to_robot_R + result_dict["T_handeye"] = camera_to_robot_T + return result_dict + else: + raise ValueError("Not enough valid points for stereo calibration.") + else: + raise ValueError("Not enough shared points for stereo calibration.") + + +def est_camera_t_charuco_board_center( + img: np.ndarray, + charuco_board: cv2.aruco_CharucoBoard, + aruco_dict: cv2.aruco_Dictionary, + camera_matrix: np.ndarray, + dist_coeffs: np.ndarray, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Localizes the 6D pose of the checkerboard center using Charuco corners with OpenCV's solvePnP. + + The board pose's translation should be at the center of the board, with the orientation in OpenCV format, + where the +Z points out of the board with the other axes being parallel to the sides of the board. + + Args: + img (np.ndarray): The input image containing the checkerboard. + charuco_board (cv2.aruco_CharucoBoard): The Charuco board configuration. + aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary used to detect markers. + camera_matrix (np.ndarray): The camera matrix from calibration. + dist_coeffs (np.ndarray): The distortion coefficients from calibration. + + Returns: + Optional[Tuple[np.ndarray, np.ndarray]]: The rotation vector and translation vector + representing the 6D pose of the checkerboard center if found, else None. + """ + charuco_corners, charuco_ids = detect_charuco_corners(img, charuco_board, aruco_dict) + if charuco_corners is not None and charuco_ids is not None: + object_points = get_charuco_board_object_points(charuco_board, charuco_ids) + image_points = charuco_corners + + # Use solvePnP to estimate the pose of the Charuco board + success, rvec, tvec = cv2.solvePnP(object_points, np.array(image_points), camera_matrix, dist_coeffs) + + if success: + # Convert to the camera frame: Adjust the translation vector to be relative to the center of the board + x_trans = (charuco_board.getSquareLength() * charuco_board.getChessboardSize()[0]) / 2.0 + y_trans = (charuco_board.getSquareLength() * charuco_board.getChessboardSize()[1]) / 2.0 + center_trans = np.array([x_trans, y_trans, 0.0]).reshape((3, 1)) + rmat, _ = cv2.Rodrigues(rvec) + + tvec = tvec + rmat.dot(center_trans) + tvec_to_camera = tvec + return np.array(rmat), np.array(tvec_to_camera).ravel() + else: + raise ValueError("Pose estimation failed. You likely primed the robot too close to the board.") + else: + raise ValueError( + "Couldn't detect any Charuco boards in the image, " + "localization failed. Ensure the board is visible from the" + " primed pose." + ) + + +def convert_camera_t_viewpoint_to_origin_t_planning_frame( + origin_t_planning_frame: np.ndarray = np.eye(4), + planning_frame_t_opencv_camera: np.ndarray = np.eye(4), + opencv_camera_t_viewpoint: np.ndarray = np.eye(4), +) -> np.ndarray: + """ + Convert a camera viewpoint given in a camera frame, to the planning frame. + + Camera frame should be in OpenCV/ ROS format, where + +x should point to the right in the image + +y should point down in the image + +z should point into to plane of the image. + + Assuming that origin_t_planning_frame isn't aligned with this format, align it + so that the viewpoint can be localized as a hand pose command. + + Args: + origin_t_planning_frame (np.ndarray, optional): 4x4 homogenous transform + from origin to planning frame. Defaults to np.eye(4). + planning_frame_t_opencv_camera (np.ndarray, optional): 4x4 homogenous transform + from planning frame to the camera. Defaults to np.eye(4). + opencv_camera_t_viewpoint (np.ndarray, optional): 4x4 homogenous transform + from camera to the future viewpoint. Defaults to np.eye(4). + + Returns: + np.ndarray: 4x4 homogenous transform of the planning frame of a future viewpoint + expressed in the origin frame. + """ + origin_t_opencv_camera_pose = origin_t_planning_frame @ planning_frame_t_opencv_camera + origin_t_viewpoint = origin_t_opencv_camera_pose @ opencv_camera_t_viewpoint + origin_t_planning_frame_next = origin_t_viewpoint @ np.linalg.inv(planning_frame_t_opencv_camera) + return origin_t_planning_frame_next + + +def get_relative_viewpoints_from_board_pose_and_param( + R_board: np.ndarray, + tvec: np.ndarray, + distances_x: Optional[np.ndarray] = None, + distances_z: Optional[np.ndarray] = None, + x_axis_rots: Optional[np.ndarray] = None, + y_axis_rots: Optional[np.ndarray] = None, + z_axis_rots: Optional[np.ndarray] = None, + R_align_board_frame_with_camera: Optional[np.ndarray] = None, + degree_offset_rotations: bool = True, +) -> List[np.ndarray]: + """ + Given the position of charuco board, sample viewpoints facing the calibration board for + the robot's "principal" camera to visit. When the robot moves to these viewpoints, + it takes synchronized photos with all cameras to use for solving the calibration. + + ROS and OpenCV have the same camera format, where: + # +x should point to the right in the image + # +y should point down in the image + # +z should point into to plane of the image. + + All transforms to cameras are in this format. + + Args: + R_board (np.ndarray): the rotation of the calibration board in opencv notation relative + to the "principal" camera. + (See AutomaticCameraCalibration.localize_target_to_principal_camera) + tvec (np.ndarray): the translation of the calibration board in opencv notation relative to + the principal camera + distances (np.ndarray, optional): What distances + away from the calibration board's pose (along the Z axis) + to sample calibration viewpoints from. Defaults to np.arange(0.5, 0.8, 0.1). + x_axis_rots (np.ndarray, optional): What + x-axis rotations relative to the camera viewing the board orthogonally + to apply to sample viewpoints. Defaults to np.arange(-21, 21, 5). + y_axis_rots (np.ndarray, optional): What + y-axis rotations relative to the camera viewing the board orthogonally + to apply to sample viewpoints. Defaults to np.arange(-21, 21, 5). + z_axis_rots (np.ndarray, optional): What + z-axis rotations relative to the camera viewing the board orthogonally + to apply to sample viewpoints. Defaults to np.array([-21, 21, 5]). + R_align_board_frame_with_camera (np.ndarray, optional): By default, + the charuco pose rotation Z-axis points out of the pattern, + and into the camera. However, to sample viewpoints, the camera must face the board, + so viewpoints can't be sampled directly from the board pose as they would face + away from the board. + This rotates the board frame such that the Z axis points away + from the pattern, so that camera frames facing the board may be sampled + after applying a distance and offset rotation. + Defaults to np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]). + degree_offset_rotations (bool, optional): whether to use degree for the + sampling rotation parameters. Defaults to True. + + Returns: + List[np.ndarray]: a list of 4x4 homogenous transforms to visit in the "principal" cameras + frame + """ + if distances_x is None: + distances_x = np.arange(0.3, 0.7, 0.1) + if distances_z is None: + distances_z = np.arange(-0.2, 0.3, 0.1) + if x_axis_rots is None: + x_axis_rots = np.arange(-20, 21, 5) + if y_axis_rots is None: + y_axis_rots = np.arange(-20, 21, 5) + if z_axis_rots is None: + z_axis_rots = np.arange(-20, 21, 5) + if R_align_board_frame_with_camera is None: + R_align_board_frame_with_camera = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) + + if degree_offset_rotations: + x_axis_rots = [radians(deg) for deg in x_axis_rots] + y_axis_rots = [radians(deg) for deg in y_axis_rots] + z_axis_rots = [radians(deg) for deg in z_axis_rots] + + translations = [(tvec + R_board[:, 2] * dist + R_board[:, 0] * d2) for dist in distances_z for d2 in distances_x] + R_base = R_board @ R_align_board_frame_with_camera + + def euler_to_rotation_matrix(roll: float, pitch: float, yaw: float) -> np.ndarray: + """ + Convert Euler angles to a rotation matrix using OpenCV, for the sake + of CLI convenience, conciseness, while not depending on an external transform lib. + """ + R_x = cv2.Rodrigues(np.array([roll, 0, 0]))[0] + R_y = cv2.Rodrigues(np.array([0, pitch, 0]))[0] + R_z = cv2.Rodrigues(np.array([0, 0, yaw]))[0] + return R_z @ R_y @ R_x + + poses = [] + for t in translations: + for x_axis_rot in x_axis_rots: + for y_axis_rot in y_axis_rots: + for z_axis_rot in z_axis_rots: + # Get the rotation matrix from Euler angles + R_rot = euler_to_rotation_matrix(x_axis_rot, y_axis_rot, z_axis_rot) + + # Combine with the base rotation + R_base_modified = R_base @ R_rot + + transform = np.eye(4) + transform[:-1, -1] = t.reshape((3,)) + transform[:-1, :-1] = R_base_modified + poses.append(transform) + + logger.info(f"Calculated {len(poses)} relative viewpoints to visit relative to the target.") + return poses + +def create_ideal_charuco_image(charuco_board: cv2.aruco_CharucoBoard, dim=(500, 700), colorful=False): + if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: + img = charuco_board.draw(outSize=dim) + else: + img = charuco_board.generateImage(outSize=dim) + if colorful: + return cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) + else: + return img + + +def charuco_pose_sanity_check( + img: np.ndarray, charuco_board: cv2.aruco_CharucoBoard, aruco_dict: cv2.aruco_Dictionary +) -> np.ndarray: + """ + Visualize the Charuco board pose detection on an image for sanity-checking. + + Uses an identity camera matrix and zero distortion to estimate and display + the 6D pose of the board center, overlaying 3D axes on the image. + + Args: + img (np.ndarray): The input image containing the Charuco board. + charuco_board (cv2.aruco_CharucoBoard): The Charuco board configuration. + aruco_dict (cv2.aruco_Dictionary): The Aruco dictionary used to detect markers. + + Returns: + img_with_axes (np.ndarray): The image with the pose axes drawn. + """ + + def is_z_axis_out_of_board(tvec: np.ndarray) -> bool: + """Determine if the Z-axis points out of the Charuco board (towards the camera).""" + return tvec[2] > 0 + + def visualize_pose_with_axis( + img: np.ndarray, + rmat: np.ndarray, + tvec: np.ndarray, + camera_matrix: np.ndarray, + dist_coeffs: np.ndarray, + axis_length: float = 0.115, + ) -> np.ndarray: + """Draws the 3D pose axes on the image and displays whether the Z-axis is out or into the board.""" + axis = np.float32([[axis_length, 0, 0], [0, axis_length, 0], [0, 0, axis_length], [0, 0, 0]]).reshape(-1, 3) + imgpts, _ = cv2.projectPoints(axis, rmat, tvec, camera_matrix, dist_coeffs) + z_out_of_board = is_z_axis_out_of_board(tvec) + img_with_axes = img.copy() + origin = tuple(imgpts[3].ravel().astype(int)) + img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[0].ravel().astype(int)), (0, 0, 255), 3) # X + img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[1].ravel().astype(int)), (0, 255, 0), 3) # Y + img_with_axes = cv2.line(img_with_axes, origin, tuple(imgpts[2].ravel().astype(int)), (255, 0, 0), 3) # Z + if not z_out_of_board: + logger.warning("This tool assumes that Z axis is out of board, but it was detected as into board.") + window_name = f'Charuco Board Pose ({"Z-axis out of board" if z_out_of_board else "Z-axis into board"})' + cv2.imshow(window_name, img_with_axes) + elapsed_time = 0 + wait_interval = 100 + max_wait_time = 20 + while elapsed_time < max_wait_time * 1000: + if cv2.getWindowProperty(window_name, cv2.WND_PROP_VISIBLE) < 1: + break + key = cv2.waitKey(wait_interval) + if key != -1: + break + elapsed_time += wait_interval + cv2.destroyAllWindows() + return img_with_axes + + camera_matrix = np.eye(3, dtype=np.float32) + dist_coeffs = np.zeros(5, dtype=np.float32) + rmat, tvec = est_camera_t_charuco_board_center(img, charuco_board, aruco_dict, camera_matrix, dist_coeffs) + return visualize_pose_with_axis(img, rmat, tvec, camera_matrix, dist_coeffs) \ No newline at end of file diff --git a/spot_wrapper/calibration/in_depth_calibration_doc.md b/spot_wrapper/calibration/in_depth_calibration_doc.md deleted file mode 100644 index ae8524ea..00000000 --- a/spot_wrapper/calibration/in_depth_calibration_doc.md +++ /dev/null @@ -1,324 +0,0 @@ -# Automatic Robotic Stereo Camera Calibration Utility with Charuco Target (a.k.a Multi-Stereo Madness) - -### Recommended Setup - -![spot eye in hand cal](spot_eye_in_hand_setup.jpg) - -### Reference Image - -![side by side comparison](registration_qualitative_example.jpg) - -# Table of Contents - -1. [***Overview***](#overview) -2. [***Adapting Automatic Collection and Calibration to Your Scenario***](#adapting-automatic-data-collection-and-calibration-to-your-scenario) -3. [***Check if you have a Legacy Charuco Board***](#check-if-you-have-a-legacy-charuco-board) -4. [***Calibrate Spot Manipulator Eye-In-Hand Cameras With the CLI Tool***](#calibrate-spot-manipulator-eye-in-hand-cameras-with-the-cli-tool) - - [Robot and Target Setup](#robot-and-target-setup) - - [Example Usage](#example-usage-aka-hand-specific-live-incantations) - - [Improving Calibration Quality](#improving-calibration-quality) - - [Using the Registered Information with Spot ROS 2](#using-the-registered-information-with-spot-ros-2) -5. [***Using the CLI Tool To Calibrate On an Existing Dataset***](#using-the-cli-tool-to-calibrate-on-an-existing-dataset) -6. [***Understanding the Output Calibration Config File from the CLI***](#understanding-the-output-calibration-config-file-from-the-cli) -7. [Recreate the Core Calibration CLI Tool Without Depending On Spot Wrapper](#recreate-the-core-calibration-cli-tool-without-depending-on-spot-wrapper) - -# Overview - -This utility streamlines automatic -camera calibration to **solve for the intrinsic and extrinsic parameters for two or more -cameras mounted in a fixed pose relative to each other on a robot** -based off of moving the robot to view a Charuco target from different poses. If you already -have an existing dataset of synchronized stereo (or multi-stereo) photos of a Charuco target from different viewpoints, -you can use the CLI tool to compute the intrinsic/extrinsic parameters. Additionally, if you have saved the poses the images are taken at (as homogenous 4x4 transforms from the "world" frame [most likely the robot base] to the robot planning frame [most likely the -robot end-effector]), you can also calibrate -the camera to robot extrinsic (eye-to-hand registration). If you don't have a dataset, -you can use this tool both to generate the dataset and calibrate the cameras. - -The CLI tool's saving capability allows to store multiple unique calibration runs in one configuration file -with calibration metadata, to document related runs with different setups or parameters. - -This was developed to calibrate the two cameras at the -end of Spot's optional manipulator payload, while being **as general as possible -to be easily adapted to new robots and camera configurations. The core calibration utilities depend only on NumPy and OpenCV.** - -This utility works by sampling viewpoints relative to the Charuco calibration board, -visiting those viewpoints, and snapping photos with all cameras at each viewpoint. Then, -these images are used to calibrate the desired cameras both individually and with respect to each -other. This utility is particularly good at handling partial views of the board -due to it's use of a charuco board. For more info, see ```calibration_util.py```, where the functions -```get_multiple_perspective_camera_calibration_dataset``` and ```multistereo_calibration_charuco``` -do most of the heavy lifting. - -# Adapting Automatic Data Collection and Calibration to Your Scenario - -Assuming that you have a charuco board you'd like to automatically calibrate your cameras/robot with: - -**To calibrate a new robot with new cameras using the utility**, implement the abstract class ```AutomaticCameraCalibrationRobot``` from ```automatic_camera_calibration_robot.py``` -(Only five methods, that are likely analogous to what's needed for automatic calibration even if this utility isn't used. -in Spot's case , excluding comments, it's under ~250 lines of code, see ```spot_in_hand_camera_calibration.py```), -and pass the implemented class as an argument to ```get_multiple_perspective_camera_calibration_dataset``` from ```calibration_util.py``` (see ```calibrate_spot_hand_camera_cli.py``` for an example). - -**Adding a new camera to register with Spot's existing hand cameras** is as easy as adding a call to append -the new camera image in ```SpotInHandCalibration.capture_images``` in ```spot_in_hand_camera_calibration.py``` to the existing -list of images obtained with the default cameras (assuming that the new camera -is fixed relative to the existing cameras.). - -# Check if you have a Legacy Charuco Board - -You only need to do this if using an opencv version after ```4.7```( -check with```python3 -c "import cv2; print(cv2.__version__)"```) - -Through using the CLI tool (```python3 calibrate_multistereo_cameras_with_charuco_cli.py -h```), you can check if you have a legacy board through visually comparing the generated drawn virtual board to your physical charuco board target. Some legacy boards have an aruco tag in the top -left corner, whether as some non-legacy boards have a checker in the top left corner. -Also, check to see that the aruco tags match between virtual and physical boards. -It is important that the virtual board matches the physical board, otherwise this calibration -will not work. - -``` -python3 calibrate_multistereo_cameras_with_charuco_cli.py --check_board_pattern --legacy_charuco_pattern t -``` - -There should be an axis at the center of the board, where the Y axis (green) -points upwards, the X axis (red) points to the right, and the figure should be labelled -as Z-axis out of board. If it isn't then try without legacy (```--legacy_charuco_pattern f```). - -If you are using the default Spot Calibation board, and there is an aruco marker -in the top left corner, then it legacy (so supply true argument to legacy.) - -# Calibrate Spot Manipulator Eye-In-Hand Cameras With the CLI tool - -There is an [existing method to calibrate the gripper cameras with the Spot Api](https://dev.bostondynamics.com/protos/bosdyn/api/proto_reference#bosdyn-api-spot-GripperCameraCalibrationCommandRequest). However, you don't have -as much control and extensibility in the existing method as with this custom procedure. - -In Spot's hand, there is an RGB camera, as well as a ToF camera, which ```calibrate_spot_hand_camera_cli.py``` -automatically co-registers using the calibration utility via the default calibration board included -with most Spots. You can also use a different board if you'd like, just set the right CLI parameters -(see example usage below) - -## Robot and Target Setup - -Have Spot sit on the ground with the arm stowed such that nothing is within a meter of the robot. -Spot should NOT be on its charging dock. - -No program should have robot control authority. Make sure Spot is sitting -with the arm stowed before releasing robot authority. -If you are using the ROS 2 Driver, disconnect from Spot. If you are using the tablet to control Spot, -select power icon (top) -> Advanced -> Release control. - -Place the Spot default calibration board on the ground leaning against something in front of Spot, so -that the calibration board's pattern is facing Spot front (where the stowed arm points). -The up arrow should point in the direction of the sky. The board should be tilted away from -Spot at a 45 degree angle, so that the bottom of the board is closer Spot than the top of the board. The ground beneath -the board should be at about a 45 degree angle from the board. The board's bottom should be about a meter away -from the front of Spot while sitting. Nothing should be within a meter of the robot. - -**See the first reference image at the top of this README to see good board placement relative to Spot.** - -When calibrating, Spot will stand up, ready its arm, lower its base slightly, and -lower its arm slightly. As soon as this happens, if Spot -can see a Charuco board it will start to move the arm around for the calibration. - -If the calibration board isn't placed at the right location, or there is more -than one board visible to the robot, this may lead to potentially unsafe behavior, so be ready to -E-Stop the robot at any moment. If the robot starts exhibiting unexpected behavior, stopping the program, -turning off the computer running the calibration, and hijacking control from the tablet can help stop robot movement. - -It is possible to calibrate at further distances -(see ```--dist_from_board_viewpoint_range``` arg), but the sampling of the viewpoint -distance from the board must be feasible to reach with where the base of the -robot is started relative to the board. The previously recommended Spot and Target setup is what worked well -in testing for the default distance viewpoint range. - -After the calibration is finished, Spot stows its arm and sits back down. At this point, -it is safe to take control of Spot from the tablet or ROS 2 , even if the calibration script is still -running. Just don't stop the script or it will stop calculating the parameters :) - -If Spot is shaking while moving the arm around, it is likely that -your viewpoint range is too close or too far (most often, adjusting -```--dist_from_board_viewpoint_range``` will help with that). You can -also try to drive the Spot to a better location to start the calibration -that fits the distance from viewpoint range better. - -## Example Usage (a.k.a Hand Specific Live Incantations) - -For all possible arguments to the Hand Specific CLI tool, run ```python3 calibrate_spot_hand_camera_cli.py -h```. -Many parameters are customizable. - -If you'd like to calibrate depth to rgb, with rgb at default resolution, saving photos to ```~/my_collection/calibrated.yaml```, -here is an example CLI command template, under the default tag (recommended for first time). -Note that the default Spot Board is a legacy pattern for OpenCV > 4.7, so ensure to pass -the --legacy_charuco_pattern flag - -``` -python3 calibrate_spot_hand_camera_cli.py --ip -u user -pw --data_path ~/my_collection/ \ ---save_data --result_path ~/my_collection/calibrated.yaml --photo_utilization_ratio 1 --stereo_pairs "[(1,0)]" --legacy_charuco_pattern True \ ---spot_rgb_photo_width=640 --spot_rgb_photo_height=480 --tag default -``` - -If you'd like to load photos, and run the calibration with slightly different parameters, -while saving both the results and the parameters to same the config file as in the previous example. -Here is an example CLI command template (from recorded images, no data collection) - -``` -python3 calibrate_multistereo_cameras_with_charuco_cli.py --data_path ~/my_collection/ ---result_path ~/my_collection/bottle_calibrated.yaml --photo_utilization_ratio 2 --stereo_pairs "[(1,0)]" --legacy_charuco_pattern True \ ---spot_rgb_photo_width=640 --spot_rgb_photo_height=480 --tag less_photos_used_test_v1 -``` - -If you'd like to calibrate depth to rgb, at a greater resolution, while sampling -viewpoints at finer X-rotation steps relative to the board, and slightly further from the board -with finer steps, here is an example CLI command template. Also, -to demonstrate the stereo pairs argument, let's assume that you also want to find rgb to depth (redundant for -demonstration purposes), while writing to the same config files as above. - -``` -python3 calibrate_spot_hand_camera_cli.py --ip -u user -pw --data_path ~/my_collection/ \ ---save_data --result_path ~/my_collection/calibrated.yaml --photo_utilization_ratio 1 --stereo_pairs "[(1,0), (0,1)]" --legacy_charuco_pattern True\ ---spot_rgb_photo_width=1920 --spot_rgb_photo_height=1080 --x_axis_rot_viewpoint_range -10 10 1 \ ---dist_from_board_viewpoint_range .6 .9 .1 -``` - -> ***NOTE:*** If you would like to save the new calibration parameters to the robot itself, just add the `-send` flag and they will be sent to the robot. - -## Improving Calibration Quality - -If you find that the calibration quality isn't high enough, try a longer calibration -with a wider variety of viewpoints (decrease the step size, increase the bounds). -The default calibration viewpoint parameters are meant to facilitate a quick calibration -even on more inexpensive hardware, and as such uses a minimal amount of viewpoints. - -However, in calibration, less is more. It is better to collect fewer high quality -viewpoints then many low quality ones. Play with the viewpoint sampling parameters -to find what takes the most diverse high quality photos of the board. - -Also, [make you are checking if your board is legacy, and if you can -allow default corner ordering](#check-if-you-have-a-legacy-charuco-board). - -If you are using a robot to collect your dataset, such as Spot, you can -also try increasing the settle time prior to taking an image (see ```--settle_time```) - -## Using the Registered Information with Spot ROS 2 - -If you have the [Spot ROS 2 Driver](https://github.com/bdaiinstitute/spot_ros2) installed, -you can leverage the output of the automatic calibration to publish a depth image registered -to the RGB image frame. For more info, see the ```Optional Automatic Eye-in-Hand Stereo Calibration Routine for Manipulator (Arm) Payload``` -section in the [spot_ros2 main README](https://github.com/bdaiinstitute/spot_ros2?tab=readme-ov-file#optional-automatic-eye-in-hand-stereo-calibration-routine-for-manipulator-arm-payload) - -# Using the CLI Tool To Calibrate On an Existing Dataset - -To use the CLI Tool, please ensure that you have one parent folder, -where each camera has a folder under the parent (Numbered from 0 to N). Synchronized photos -for each camera should appear in their respective directories, where matching photos have ids, ascending -upwards, starting from 0. The file structure should appear something like the following: - -``` -existing_dataset/ -├── 0/ -│ ├── 0.png # taken at viewpoint 0 -│ ├── 1.png -│ └── 2.png -├── 1/ -│ ├── 0.png # taken at viewpoint 0 -│ ├── 1.png -│ └── 2.png -├── 2/ -│ ├── 0.png # taken at viewpoint 0 -│ ├── 1.png -│ └── 2.png -├── poses/ # optional, for camera to robot cal -│ ├── 0.npy # base to planning frame 4x4 homgenous transform at viewpoint 0 -│ ├── 1.npy # .npy files generated with np.save(FILENAME, 4x4_POSE) -│ └── 2.npy -``` - -Optionally, you can also include pose information, to find the camera to robot extrinsic. - -To see all possible arguments for calibration, please run ```python3 calibrate_multistereo_cameras_with_charuco_cli.py -h```. - Many parameters such as board proportions and Agmruco dictionary are customizable. - -If you'd like to register camera 1 to camera 0, and camera 2 to camera 0, you could do the following: - -``` -python3 calibrate_multistereo_cameras_with_charuco_cli.py --data_path ~/existing_dataset/ \ ---result_path ~/existing_dataset/eye_in_hand_calib.yaml --photo_utilization_ratio 1 --stereo_pairs "[(1,0), (2, 0)]" \ ---legacy_charuco_pattern=SUPPLY_CHECK_BOARD_FLAG_TO_SEE_IF_LEGACY_NEEDED \ ---tag default --unsafe_tag_save -``` - -# Understanding the Output Calibration Config File from the CLI - -A calibration produced with ```multistereo_calibration_charuco``` from ```calibration_util``` -can be saved as a ```.yaml```file with ```save_calibration_parameters``` from ```calibration_util```. -Here is a demonstration output calibration config file for example purposes: - -``` -default: - intrinsic: - 1: - camera_matrix: flattened_3x3_camera_matrix_now_9x1 - dist_coeffs: flat_5x1_opencv_distortion_coeffs - image_dim: [height, width] - 0: - camera_matrix: flattened_3x3_camera_matrix_now_9x1 - dist_coeffs: flat_5x1_opencv_distortion_coeffs - image_dim: [480, 640] - extrinsic: - 1: # primary camera index (origin/base frame), first sublevel - 0: # reference camera index, second sublevel - R: flattened_3x3_rotation_matrix_from_primary_to_reference_camera_now_9x1 - T: 3x1_translation_matrix_from_primary_to_reference_camera - planning_frame: # the planning frame (in Spot's case, the hand) - R: flattened_3x3_rotation_matrix_from_primary_to_robot_planning_frame_now_9x1 - T: 3x1_translation_matrix_from_primary_to_robot_planning_frame - run_param: - num_images: 729 - timestamp: '2024-08-19 03:43:06' - stereo_pairs: - - [1, 0] - photo_utilization_ratio: 2 - num_checkers_width: 9 - num_checkers_height: 4 - dict_size: DICT_4X4_50 - checker_dim: 0.115 - marker_dim: 0.09 -``` - -Each calibration run -that creates or modifies a config file is tagged with a unique name, to allow for tracking of several experiments, -which is the main title (in the above case, ```default```). -Under the main title, there are the fields relevant to the calibration. -Under ```intrinsic```, the intrinsic (camera matrix, distortion coefficents, and image height/width) -for each camera are recorded as a flattened representation -under the camera's index number as it appears in the list of images -returned by ```capture_images```. For example, if ```capture_images``` produces a list that is -```[image_by_primary_camera, image_by_reference_camera]```, then the intrinsic for the ```primary_camera``` -would be stored under ```0```, and the intrinsic for ```reference_camera``` would be stored under ```1```. - -Under ```extrinsic```, the first sublevel, again a camera index, corresponds to the camera index of which camera -is the origin for the ```extrinsic``` transform between two cameras as the first -field in a requested stereo pair. The second sublevel corresponds to the index of which camera -the ```extrinsic``` maps to from the origin camera. See the comments in the example config file -above for more information. -Additionally, arguments from an argparser can also be dumped into the yaml, which will be saved -under ```run_param``` (excluding args that are: ```password``` or ```username```) - -# Recreate the Core Calibration CLI Tool Without Depending On Spot Wrapper - -If you like the core tools of this utility, and you'd like a more portable version (```standalone_cli.py```) for -use independent of Spot that doesn't depend on Spot Wrapper, you could recreate -the CLI tool with no dependency on Spot Wrapper with the following command: - -``` -cat calibration_util.py <(tail -n +3 automatic_camera_calibration_robot.py) <(tail -n +26 calibrate_multistereo_cameras_with_charuco_cli.py) > standalone_cli.py -``` - -The core capability above depends primarily on NumPy, OpenCV and standard Python libraries. - -# Contributors - -Gary Lvov - -# Special Thanks To - -Michael Pickett, Katie Hughes, Tiffany Cappellari, Andrew Messing, -Emmanuel Panov, Eric Rosen, Brian Okorn, Joseph St Germain and Ken Baker. diff --git a/spot_wrapper/calibration/registration_qualitative_example.jpg b/spot_wrapper/calibration/registration_qualitative_example.jpg deleted file mode 100644 index d091dc88b56b3c4189613d7abc22837c1da83da5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47508 zcmb5VWmKD8^Di7+ij^Y8p+K<$MbaWgiYIu`5~QV22<~o0Tbw|kc+n8tT?#E;+}$NO z#Y&K#-sk_Ev+n0TpWgYf*EQE(*Pgld+G}RdZ)X0?{#gZ3fR#W>04yxP|InWWfII*P z`ycve#Q96O__+TNAwE7HJ`o`?F%cmV5itol3GoB62Sh}q4@t?$DJULN5R*_oqNI5A z_nG3KMX>()5(k&??}ro*h#vfX`~Q>vd;>fr#QK4Cj)V0Cfc+2)=ONag9{^^+Urhq@cg>#=bt_mqeasD1(zUSk2uVWJ z)c#$b@vmSkynjmv;QkeWM}SZGclPx|05%p5HZBf64jw)(!9P3tkMxIlZ1|KyuODeA z{3H8gj&SD~HH}==8Uecq1b!l4J$}j|3e+)o$^A1AAi=@<`?EL?0Wts#(kNR;q98{? zs=&fFHgAu{E?KO+`g;u%td}x{$xt5l9R_LA#%aj3M{LKT@hfeZKnKUdWMAsckQ!E6 zvz&)jvc~;GqEnp(jr;1Hyo>o9JbwAH;_E@^wx^fTyDP@|#(8cvMlSj8>i<%|{ zuGs5tuLiBX2iy6(fm(+&Q7g4QrBlQ&u4>0C=HGa{Nhr7c^<^(|L+B02`dL`tDV8jZ zfYmBpBMsWT_(MAB`_zl}0(PkUUd+;PE-2ynK^*7Q!q#sHdnur^A#oL7&Z$cuTZ5<{ zXX(m!sJ_<@;ngzqfr{{1xf%*W#Q+P}f)_N_@nP`*C_#sY41zl68eb=$p>2WJO*7&z z^nnlfpAUu!K(kDHvdQdc<>KD8c!>9iSEOSkMaImCbn)(R z$@$iO4Qz<;4?tT6^$R-ok}Jt!f++<^)^FJMWQp_Q=qiPQ^w_>2Br!BdH)%%wlB@A| zrrv&A*6?zDNa3M@)T^P`sS=hiP071^4JaF>ey~m%w#I5$<_-8IR)VaDzJF<3*2*5s zrS6Ju8JA9aa2e=x-RL+HCxl9c`$Tp1bS>;?eQoze$Zq)@8CaN zE#W1j)Fmt@K$c}%w}Ym2*sV?T7gDF%L}s*(JxZcz=R!@Q_?~Y_J$?5K1XuC{D#Ged zDnpO;od`yG+Q&0#8sq{tUK&>}8jjhWF?ws85t1zF)nk903t2A#yidBXpGUu2+YZ}g z^q;>@nDob+_N`E_^GLZbvYktSjx%v8Hm{I9gRfgCdp~tC=ZHc1x8?olAyxtDiHd9eQ% zI1cl)7VZBQJt_V$uc?Gh0%a5mQ@WewxiyIdw~Z-QdWZpbJg~iRgPl#u_oXxT1Ty!N zR7}Lir9i>U5tH;9q`Br6W=Fx6F}#+-EB2EWi|KD2AG# z1kx=wX^*rTYC5jcXhNgy{CpEO_4$M3GVia6s$n8`7?))FDVeGl&TbbAINYg}d4*Ji zh6sn4?&5JpW+r>Bd)7DFlu|152AR&fWdA=*(g|x5I`YS ze`V%ly`0~Oep>O!!sHny96u%Fhp!s4^+EKs{Mcv#TXlT!9G=F`cV$Z+uE-I$GgR}- zVjpiKq=E^xQ+=*mi%XW!qSfsakpauiLnlr{)lU%DXy%P4tyTf(Y(ooom$Ra>`Ip+c z{3GS`#F)+SH0bMnV_(Qap^Me%>If{=)INfcwDD%4vEh6}V12|5OFsRT`rd3NsD0@I zKR;#trAHC#_Vk5gxem0A2WE~x z!HEtHrP=iI+Fh!{oUBesVMvaRV}P*j&QiqJ`5N$zu-jbJh+-XZMcW37$(m73b1>j} z&R{ub{v@*j$dkv`Rw5@44S_8pN-6Q5-%0M88I-}BiBNT#>h5WEQsKQN=d+Fd^25A? zlFN&Fh&e-dkb?x}dJ^qvo&y#ikp=+c&=9QTQE_KHT|th*$OUSZOYz4^O`z;wG|r?t z{j|c?CcUCr&>$f8;N!D@Z>`Nvz-ln8k$!G4?<1q;{!^XbZl>ASpby=A=;>kE=|L<= z`(237D1jecK_~J+PF5{B-M%^k4p#y&&CH1)iznL0jCu`WS<$v8czFU6PloW9mkaEF z@I&VOyD$(&_ilEEh%S|WV#CL#D$Jv&_#z?K`l-b}-4l1zZp{lolS7Xji>g4wBg!p# zZ;Fj?r@GS@poKNZW%w)B+@1Fgxb!C#dd<*7<$Rx(;)+axN;8LMPo%+Bb4&rdsIieC zygXJxq2%px{I^Hb6q1fAy$O|jFRMrNm4Z6l4$DM}E-2G?Up-YF@SmYiEe=EXkK_;p zP_t5(y?(VNsmy8~bak1v*Ap>CKYloI!@1<*n5^o?P`Z1+w}+!$i1&DZIX|?-Nv@p& z94?Ms)?6`dLOy2uKqA|;!_9p5rbi|Udq`kCpZf)p$mmJ^dVEk4H=+nP`fZk?m*ATl zakC2x=3<*np>Zjgovw5;EZmLJ{bmv5e3USLv^^f$sEZn+2Q&sz#WH<%o{2$}%1J+iU6VM=O)jI*T8 z5m?+hVE8x=ik0OvLxkNG^=K3taLfC6qao7^-%1VmlljZdOQ=u5c&2-CEq?l#YQvsC z5Au8a9rMM};Zu=n_WFHWXF{w=5|)oPxR~k2_9XXQflTy(RGx(8Wb9Y;Cwk#y(kSZS z_06cT!e5(Lj18$_$ao}^8s#8Qyl#GdE#vB~1$ltto_<2Yq_@;l#UU$Qg&JU>&%@YN zqTr1IOERx?55~qY(i~{Gu+no|7y3@nAMk?UMf zUXa8YVMFFM{jyPqvT@mB7bvu>?fky%?tLK)HqT{>?xWqO?&nb}dX#ob=V}uScixnP zBd`fsgI@QQda5_ePqfF+TfC8QE_gThL@nNOfCN$2FG%rdU1B3rCzj6|hgbU>fDF$; zoVoTZ2UIo)i=9AhggRtA?Gb3#Ya$0NNd<+{@+GCW)+*x?6snkBW$A138Z`kcb%nW! zUUA%>o;@v`eXax=8UCxdT6JP{;z>rVuTH0i>&S$2@0K2qtO^vw9Qd&t=dE}4FtPxP zE8n9EB;jV!2Lxu~ zz=jW8|JfGM8VBRq^<@za+~43fO zeYL+E%kb#z7F`7dauZvIGH>j<_7#O|0UjTo{ap>5#L4xf@c(g7U@U!4)eOm3Tt^0U zMCPJy$L`(Oa>^Cd-uh=b3XMuGjJv!35s7s@)4~XHIc^fs82{W{=`Ry?{>dVPvd;W6 zQiY2=?5u~7kDIRZ`O4a&I3cb+JDS{n<V zhOeYNW(jjr)%9%)UYtnShOQZH7qu{<)!*4_1n_^wX<@uP!$yu*oxUyH3Rbhvm=V>i zB`9jfCCPL~e!E_r{R2otkrle3!Q2|{6E<2$c+=uC$1~se!=b!qMIlLk z;>Yn~Kxc=_6Zvo0NT$GJABZXf$K95oF_c&40H}DuFP@#BBxn)1( zuxig!gF&0joj{%AexQ7;ljX1VfuO5)ZsaCd{s2;9{s5%>fC$ui_Z~F)@HHM10Ez!x zrbGw{I>_tM+|Yl?kFO9|`wTBCRKs;GhtnE^yr(XYQR)7*=o@$7Gk;4hQOh)(qGu2c zUGp8>VeT-ESgLPMpGa9$Rog#&u|74&)1u6AkL8v)8G}U;+CM|@o%@vvC*=jDiqxlp z7K5EbHX%m2arngNO)6Gw!gC;6x{a5*YcS_1SCVVlt@VkDJx3AJ1vD&PG#@rnsU%{{ zlh6naU3qB6$D8Q3FyhF*|I^<+kE8DR;e~&~bX8_w)_4$$R&lbjmK%o^jxez7Ba)Dq z(4;@9gkS{Eq-1S5bZ@!|9lZ%2!-W0-e|`|Y4|5U8y9DbkoK)yr$99|tmZ8ItJ{<*z z9#LIHYM$A0S`(4q`e9=+kz9RAw>1#%!5`69f?Y#2O$EAR<$Fdif3Dy8#pMp{Jq?iy zHYh74;_bm|dM=W+Qf9zqh*r3ooc`)BbWwpZVW3KN&vKJhDIHwgOwV;L{#`7H_FVw_+Zx#A24&At~I8A=p*6s#c z?@xtw4d>M;W~2ZtZ=&(jBAlRsm30MpWFHp^D7?>5iFw?;d?+*gH?ubyF}8V&vPM9Y zFZnV3Ph1UIuj8WXRNR*%RTkaDqDeOX0LDpT1E(vp?j$bLCTM5JO6$d^qdsa(exZ%S zUvc8W$t>czvPyatOa=EBf!d+kfKx9Op{by{#Q~iA-J)Vk-$2JsXV!#lQ3UbCqUlUS z7N;iP>d*5s)YL*c$(4KcpNHqyoUd8LXh@ zeSg09KDGuWw_@0f11#_+)su9~x(rURQDr>Q^)zRD?fGZeVpbvAo#e2uStJ_ce{_-O z`%W%0?!!^Ongibx`Fe9i(^|{`){nQ37UK|pU+NY%7q_Y$gSI)_*m$D}H+pb_0`h2x`s(`M`nT5mn7ni>~ zw`9JjDXDwAPMK1WU{rzrPEiLH{TBRM;159l@R!8aSc9>ux;qraZzD!T>p>**y1Bg2 z-Lv5GmN!tV_WUhKMbMZ*mcx@V`&r~~t`{@Tb)b_E%EsQLSzBW4qdV7;$1|!KQ7bk2 zQ3>K({Q*)vTTH!7Zy>R{a3Ze6ZskdV94kfr!pX$vyNrBN2D;(t&Rpt)_S4tbBKxJ% z$n&E$mK9c8=(9qZeZ%C4hxgSQyvxc4K4zq6lxA9P`{ZP*4vP$Oe*jNh&8S3`4S`o` z4%iyeQR81Mv}xwv6pXrKeTTX`+p6clm4F)YAJh9q}kPmm&Cc{pi`YEFD=) zYtu;vM?By-(G$K`NYmNQRny4hn z{=>L!8-Da8Lcr!m=Ho@Tlo2{Hx>(id{#&6|ZkU(=X%eSt=nmk{MrOW3y;VO|V!L-; zI`fcgMLXruURD6xc||Y=`o`9T7gIRz&QVnCFWWrqS$!RCMGhe`$D#3auEj`Lw6ff5y*4kc9_AX*iqi^}B=&(_Iw!tD}DrKA84!+qo=Y z4U|CH?=gRBS%lWkU)bjw_XccwSmtQ(?Z&&B@=y4CjhiTiukzHIKC$buk96)=&vK!C z2qUj2DE5q;$H+6Ns_%VBZig*2=huHgNiu@Cq;W#%wD36I$2$e+A;T^#pte}!v*!2TH4D*TpD^Cg5dLJDv-GE zr(};hIT)>`#p715mO1tmgX~cgQ=pWAB{AONEu?G?nq{ceQfgOxoQcx9N=1H)YGyTN`)DFbyJ5 zdX(F(C~=U+3LYaPrQ}peH_z~RkaMsQ+`G6!RTf-Nb@Xb}b)>+sjWa)3_a!%y0Ek%Z zDee+XcAe~5WCL&ZcQf}OX0 zID=OXFCSqq8aq(8WCU&Msnw99lwk?U+ZvJ2u{M@3*{2;vWD-h#DqK97_Sk37gr!S1 zPm@ktc+S0rK48FE4V5r~MEt0jBSD`De*G8q(7|U$pf{+_Y3pT>U%5i5EImcamW1p zgNSz32seXq*>A5OGgNDAX3)h8T>+J#6m`b_%JPP8db4d-$xzzSYd=F9ru zs`ILX0^QLlpiV(cZ-LADxcCW!ot%+IqLNM4Ga`1PF%Q8@LuzAZ~l_zpFFqO(iFVQgNzk z$bk;imP9*@Bt91Ay|h;BJ+xI-RSjNog=AW?dd#}e%pDKtN+W_3Y@ngxmrj{~0GgTa zr%3I&QInjq8F(Ci9qx8c#r$Kjy53j)m$VpZcZfSuE_;%&L6V@S9sK%Tzb%|1zo7NN z=$=1ym=e@$H&a;B&ocH2aA~7O+(#ej#dw=uVXP}{YLY;yN&-pBdHsK4IqejlPuhzq zoRgx1JbcdGMV#C!bi5oV007*YcoL;mxlXyR!Y0oek`bBqFZ4@`7pG0BsyTSPWvN`T zuBrTo&h9*mwV!=PNW!%iBXWV_58%4mohmwdZYm`1E|+i9hf4e}WCr5m0>o+9?!BzT zkMUH!5xxM z$VHZ`38Ncn;AYR$!IYnVWM}6Zo)G>eU!rNei3U zMNlGQofAfY6@7KQ;;N&@MvMfW-#bf7W+*CEcsLlHTy^izyh_Ak>>W?a5MINA3}4vv zJ$~tN_oD=2Jgy(To+;LK`)iq-UJ{~K#j3*cw#l7)3*5x#dzHTL+A&BqZ>~qZM6TFK zB&Nt~h(0g%MzS-MB>#+|zVvv@&FwklHk@q8ZxjXUz{ey{(F^G+5mQIhmr|By%O}JY zSOXu}8bWrwRrz?_B*@A3F~d#18uM|_O86)9Q-phan{ut8584!b&L{P=RT>l(3gHuq zmNo*o@TwJS4ZU6&()Z=lQRS(g!s)PWD&2gE#HN(`45p?{mW0mo0;K)-JWsx*V+EV* zhmPE;;0U7GwdsPY#ElnqK5=Y4{_hB%dmo+{G!Fhi&VuAq%zHWsWo!&? zy+NMu)4W>~5YmY~#QLQ~C9>jar*ANqhoreL+i!_Y&MsSn)s@d8*#zE#6NFW?h^&Zv zyto=(Ilqa`wN^J7^VdI$oYLN?|CLVueB|?nLef~@;yB`z!>aEWsTx9&&ZW2H)qzVv z%4oma&0I)Po3V3`8(ve>2gv~TqvoC9?*T!LIWcp;=*peTef&`vdsHEs31;iDukn4-dBG&kSRB@`7Q2g%PsXlHjkY1(qpd2MGc7u%^DDC z1n{*O;IAJK3jMF^JTC-mSdr#y0^Anb>Q1XX`$62XWjQ^DEf+f-LY)y$h=Y@{XyW$OJ%OSRUmWUjQ`IGWNFP57RSRO$HY8|{J+QbYp5r>ddcDkW;$S4*&nEVb ztP5uN2k@cV#Zo!3q(Ytwd8A2nK4-Yk;R#X~cC3qRuz<|9F>qlpaFwvE{&Zk!x(s()6Z62{&}Z zrmr}X>*vmn(X35M?aMHOJ;Rboe@(`vjy(B7;j7}_av&pixf~O}bi#E${g`0(ScIR_ z1$wze!HdG_bMhAZRgO~I-8$dn-Yp#hq5dKDf*+Uy{we)?OFwTn7>nSs*;xNfBeBef z#q*`^4%Sp#7#M@cbv>Q=GIx1g>bb@22lVdIi~c)Gb`y5~w~4d)MT`d14at4%0F3jZ z-~NuHp%1izb`m=I^uiABhi-RKImF}?f?&J^gmjk2biNc>Pdm#A)FI{*16ja zu9e)W9{x2@$jJAe_b%04e_S*vk4QAjFPe4+;ja4GWD#^WQju^G({F%QAuVJiq0~h&N<_nOl@J9ZEwnSeQSYf<4+t^}$itF7arVt?)PUwUTd*64u ztzml*n}a4-1xn-RqK~p0-%^Dy2r35^+()tGz^c4%jQbDx8}*bMs>LcAZ}I1)uK8YSl24a0ze!ZE)F18Ah|^tf zSI$@q^H)gXUDm()h$v8a>(`0V=M_MCZ&CHdE_{~L*sK`bN^1(gjjB@RZQy(umE(XQ z|7^`PPVV>6wD{HU?WozV0?yhL`905b6Nw1_)am-ECN7BabN_%XA581e9gFK>19O;U z4*ff}Fa3~{;t^)&q6i113vsg5gTCqOV)gfZ10?>|qW3ftEj4i+mr~8!ZmrK-nswkC zR9+_3=^K{zDk0@5uxl3XCRaoEk$!LZW+yzT9t|MKp3DNB0gaB`W?qsCxzVSq^)%!N zD3MaBJXnUGENYJ8W(00pz^TG0Aii0)Urx8Yizm@mOFhn~oD20|$IrOH;CM)R?_ise z=7a}BNdVKppkq?VlAw}Y=Pe=7FD#lR0%*X)QnHeDvBqYk?X8Zf-qE+s6JOey+s{q8 zTd_TFP;YAymtGCczLYJaJUie!nDgc}auY7w=MoPCPJhFYB$%8tW?j5cLFu|Pq-j1! zab8$bzd=r(-ko7YF;dT-&DFgqtbYU-#xN7fps~8WuZo%bo1ytB28S<(8wb>eK0EK0 z8+qb7&}m$7zO`bUnV8A=#_^tg#}sr~feue>w>3!L@|r($k= z3^rjR(6GS&;#@8|#l%py-{>>@IwX#lYjQ@-v>TAwDL?om&o%?(TwtSj$Q?ikDnNuV zbsw5Q0w&xAya!8bCV!)+k3UGGU}XxQ=~eSFaf=VwrXI)Z)>gOm25G|sln0tVlq>67 z^@VP?H`p|^4_PZ)dJ|eOMd;Nf^9#3(^AKt7}9AFOnF^3n~dC{r+!d<4J$vFqi5N6m~YmId{BJ7{rjSi z`@~=qZ|g3c&em*aAm|h7A9>sR`kl=*aXw8eJ}IsWxBeN=XXi=JsRtnJMrO(dHdHms+~An*+HJ2Atb*Cs)#cU1SBJ@Vgx8pYo*+MEN;59Hr5 zOEeEYiz_YbZv4gxX2k|we4|d`AiE%mnH|y&$xjaF@m%>{9VzewtE-N zQ!+(5#z_(8WtGcLU*k`3jxD0S4GMJSbfOazV-=mtD^gYbuamFLiP(4u1;6syG|^GF zuO0EE5Sx`uss2!ZPqbEJRthtUt+GA9STxyKG6;lrbcf`!sWc_Y#XaT;pRS47C0Nrs z6UX(pIaax#qfh<=kkl+(-lL}VE%IksN3aATT!+Ci-aGz1{rEhSwAq3F4&foOEwx^; zeATa4NwSv=Fpj3MaQDMxwC!XHmH(HG%FXJ*&pN_dYk-J{E3DDc9u+#UuB%o9NZTE^ z%T^OGE9N-FvHXS{A69=31RpMrJT@Hh|K*d=&3L9e_~i`v#vZ#gxGdj+fNop(vP z*^yK34-rPbB(j^Z9Zdk2Nqh!NG1J)@6LVNK43r8SG(6=iMX@|U@fY2G>MVulwvvxh5I4x2YHHjOO{HG-t)$@dzDGIpL&7* zW&AzkUEBVtYYo(lF$Fx2S6yDU7aL{xj7-+5w?^quXExu-_UMhb&$)eNo2cU#AkZ>@& zvHuuy!P%)yAX8=9QXvwkd{;jBVb?F+J;W<=3S;)I8a$et{3|hA$TYR$y+&>c9&qt& zLPHo92|;*U{|6AB!Gy*=lCydG2jDOvB$NK2EVvTWU3rw)_Cm=Mx*6?nq=#ic(fi5- zr!e!bMS(NG?nyoU6vnWFw$KN(xZ@wWTzdDu0jl^fAM{mLs#~d4(DMs@!2I3v?(YvA z7Dw(~L_`|0!+2Pq_5e7bpYrz_m4GCTG-o!l++2fBUn(}|M{$4X3lf?xb8QCK& z7A8F+4fn&MH&V=*w7fOFtt)l~7kfT!ZwyC<%chC_dGG8q5ZK=LKsI^9u8G9+a!oxk zQ1`3V5seMIFL!(FX~EZ`gB^m=jo&jR<@Dfwuh-@lO1e}|`P_cEAuG(vJ6nda znQotlL}r*oO87m+@#(iwf{NbIaq^bNc&(gUa@4Vjnda0Eq?B4Y%batomdaw?SBSl8 z1fdSeJBr5WP92aw|F6skZ;Qu}wNdcYR4Q%1<1CQBUaIOH^TA+g@H%17K_;>zrI5UW z;SMZ2@}vlZlRgl~y0<@GAQnN^=L^;jA_y;y%N7?eNvfq((cIEKdazJ(Os6lSNG`L- zTW-|V1n%HnTU1D&+?V-WTet{&@?n0XPIEOep+_2_2e2n#c7K7zv0dBx`9L|-*a7;R zITDCHpSbO8V6EZWt!TgbD6+5U70vG+VX(2PqKY=IL3Z=eugwL^o{vE1a-!EdHdf3} z511N9nC@)Hwt^vEy#>w=)o$$@PG647YdV^Kh=w+Hz7}JT`Oea+PoHtQ{{zYVp@kun zOD7YdsQ+MDIdt9n&RG(&VB!8qdyi;YbdvK~Yw*0$-$e3pbS&Sr6cF8>dS5Hw0UkSD zXo`64Bm0^V^7-cOG9v+mQa8hw$DtXw zCSl-GZ{1TPXMPT`EZO3)oCXeEDkRs4bVbF_Aq_BmGfn~O=RH71UiSR#6VwK<`7o_w zjrIDWCMmN*i2l%#X)$}Gt-2=Co_?MtEz`|hYIuu$`Mr|cp>Ak?Qc<2%pV&s5N$sE} z$4ky!=M#k01I_gS(^i9wj{L%&dVUOxUS&c3=9}@H6etHj100cGAYX+C+`&1tnx;BG z`VjO|Kff3(&%b0sd{zI2Yu(XlbbT^jrz0yq4%YM;dDx#! zC#QiE;$)QxhT8;9FP9^8z@ihw5A#tkAsT9sq|&Y6|AY%HMGx0!%SrDSl@qUX^x{jP zX>zM7uUWhc;x5&(O3%Fm1VL^(*>y(p*}WZj8QfnJaVoM+KKi(09PqNXLD{&G!LH2L@uP_Ni2!b| z_D(|&;I<^qxsDid1pDng+|x52x&s<=Q#O;AC7r`BoR{J=soD3J^sy7SYJ(zKe^E1o z8T)RDN*7%7iq;fj{q{SD1+pfhG}ohewMyn_*@&erwCY7hfVdL#9RoPOr*v@}m8P)$ z`JTbLe4l=r%kaq!PFEuH+yCr7XTWD;p3^ws!kGW;)h>vQ@p7YHIny(!>nuJvAh^__ z{!5BsEObYCc@RBYo3N;71Di@>KqAgLp(ex2Hn7!GM$>R;`SACI{G9>Hq+gcFle_vC z$n{RCZ+!>0O}nW6Bn8+_I}v+p!vE40W@K29WZ zznQPhNJqatWuiqrkV66hFo1!3q^mZ-m(!t zhvRF_zK zsc!gg!PML%J9wQWpYn~x+fxyQrYy? z9nCYdpMIzZ-HN3B{#vvLCIjKFV)arxP}UD5jVCQH%faO|XCgz)Zlr2`$4WTQ6eJKi z{97d1-t0@c(lJ94VO*E+ugcrJTc}fgIMvS9ju~+olgi%H3}@m>+mdtA4$p5{t2n#D zvbQDWZXFAOrOhu+XiI^LBieJ5aiq@6M)E`1TokP?$~mjswJEP?eQgOOM09!`qPq~S zn|TxKBbD~WB44o1T^F9_7VV1@5@}r<42pG(zn^;iQk?lkQnfLEvAbfZpsyzW*-01= z7oOpfvk8mZ9{^Y*LrV@QsD4cG`KVEN^jCl%AAU|7*`3bMT_34|X3?6XNvYd(A9m!q zGt{w3lHyUK_5lH|LGt;?`DZt5Owv<_^6U?g#_lf4Z!CITPnz3PFC)+b6+{xACgk#?AoL*-UTtemc@&E3G=)R|P2>HwZ1)OP=>_AqEqI?7hk zbP=T=!J;GstELe;TW{Cv4a;&3Eou0^W%4M*VYAFiQ6M_oRKV>$@R#(LK&3%xf|`~X zkM4U|FS2XNr@FQ6j;ZeiLx_24MSJ~4X1BpKr#ytnRZ#>g@E0jEHTnwH~F_L@ljTcwf2pK!+iFDFu|{~`C8L(77WuEY-FkGyYl8N%Roc% z-vrM8P5~7;FxL)S6Hbe0znKcG8K`}bG|PNG=eYc?k>V^+6i!CI_C`XBIbeSav7GEH zvp=LxdFIyAM2zDC|B1#eP;7vBYS^Yvc)>etQOQ<)OMmRz5`W_#do{sT=T}d zc^X=AN=Dz)UF-Py>NF7~$cwx_9py6+b*tfB4hCHiEZwsg{?(p8h;pIkmD~H9a-6iu z%!ikj!VGV?u&_YinCm{&HpKg{i4Th;ZJ?pVmoMrjc0$d@XeG+|h%zy~cdYlL(;PXE zcHTI0YXqP>2HTzzY3Rr7)|{#uBY{I<^inXAq)b#E{ldVCYk!=?fqOq)9m_3 z9z?0#{J5g5kKawj7DuoU|KbJpNn5;+T&ZoS@2I&f;m;+|v$Jnpdu-!!H)oOkOpE|{ z>Qz>$S9h71_3fE?n0nbeUic^n^FC7vUh?~ zo!>aaBa6L@HGbr6QYy$F*PoGiy^c16-CO5G57PS`=f~GSfCz#G9RVR6a*{+HPMCou z`Ea`~ED%k%-<|SZ{QzCE5BsJaAQ)6fo|dL{dpYBT_B@XPmQ|1%9y9Sn9gBjHWiz5; zNq3M+^5Y6g@9z?s#qQ!ds{IJ90=;!|*CjyW3bp!=f;WA@?hEodL6vj4w|r0=5p$P~ z_(8{egt6!1!xn31WL%~lz3cp=ym=Hmre%s>L>$~nC=x&a2f#^TJ2F|{?6i$|8TX17 zJY2xz;7x#0tZk$p1ldrD26mMl%R_28NOPk%%4eTv1 zVa^oxIMA6dKXk}7nXsw^s3nq~OC&>$l)DPBl|W22S$r8ckAJ_PS60k^$P39rT&eu6 z7E*GpbGxT1(RN+9`;{*hZ$u_c>T!F&^pZmGUoFgxXU^NnliL37WMIa1X|=@6!37;mg` zH#e{IF7&%-1X!d^4Q;;nJ1eKLC9b@16y;X}M867LR(k{^q8Ig;#)AMaLufuiVB@gY$dv z{2WCEKrR_kZ?dzIVuacSGt>67h2chpS^MmXUR7CIX~y@G%2;piU#0!nZE@Ux zWjuU*kp2b_=OlaYw&UK!|760+k;3jYcljvb4**yje-QMX#>Kgp*^IZZ&wi`d{o&Hd z5`>DPT9~rb$$2MaOCQAZ?0LEgd1#ua=8Yn3hDvlfb59MgCfI+RWb>d8*OE=_z@M-3 z{HCORv3~#ii;X)^kLR6lT3|EFvVhA)BZ7wE_Dr;i;AD%*{HqvH#hAyZQFF`i zyF30;9oRd5J9i`Fn5g0eX0*ch%)s{MJj~*I6}8bN(_xKswPs-8&6R4Y)e}6miE5~W1Wg8yLXwM< z2B3mQ3B9AGq5~T>%2c%oEqWlDf#m5M>Fjkjv#Jo<>8~G%u}wXKFTJxif6Wl0@$jU? zF^e3CM8^nCLjBiY4ZSWmYU~D6u~?Ge(k5vKRWu6)WA*sOp-|I|;*Nz#deWm`KV4c- z+(nn-R+Z|xXO_EBV}i-4(`eqd0Vkk<#M~PY7@!6ICblu&#`aecR#zQ;Ooll7fRdx& zlfN!k>p$H>Dm$zs8(h><;10o}ucD@*a37ick4NcHp4(~}gd<`EgZ~;$mfCqpg8b!d z-i-Gj42*^{(OGzdmoNIMm{HrN7=k*Hw7!orwNTF%YDW7KY*?2NC_b;y*pwlLiE=LB0RkAxt!nq?*sjC-;3=u z*m-ZaBoh=6LwxfC^wc}^xN2fZELLO*9@*#Nr^M&W-?XCoRvZ#*9UBRwE|&M{$Yl}> zACT2yMq5U%C*=(USBFGXdST2P2=5rsjCd*U+!|Dr&C=4Ux?kUZcW+wt{8lu@k(B&c z4DO>96<%mLxTAz|e}d!in>e{e*HY$aq564nV#u$ZL8jrC3g@bMjRZGO$1+@G&T%zl zj`qF&?%v^i*~wb;ySEN48MX#)0ezF(Z^rK^(Ld4yLsMv)^c#mtU(t{);@=+;IBAYv zmUSq(&8WP2^U?c$M6|5%_W+U3^o_1Z@_g*;`C4{%p5u^hC3S~cf<~%uB}wM>Akw~Z z#5-Z*Z7z9psX7Q<{2u(Ka}&7)r&Q4fZN1SV>hj*#Y^({TNJs3VKDKsb*pUft;5K=|ga+knhtjv=m1CWS`{q+2MyD~^-uIZZ4Z z4o*%pGQSvi(^Pj=u)B_0&!mr$&Ex_Kg zdC$%k_A>d>?SeSo2BB8eKEl9IAeRRou>FJ;8S}qOrzyUj7u$GqvvPk-j4UyluC7`Z ze9m-3y#2|5#>jOs6N1m+l@Lb#wulRmANmWT$+|kqK(9;NcTdfz&!Y0JbwTv79Y3e> zrNkDqFav_K@@s?%&N{>sUdodCv_=&BzlyT+oi-e=!Zab0bL_Wkx7K8Vqj(Nz{)pR2 z73D3?vD=cI8Hvk41|MxJ4e9+vooh8e1I2%VIkiplF2;01v&jcm%rI;Dp$%U!4=ca; z2J33{b>u{a2h)eUY)zxLD~!6k1=#+h;;RUIOBKkVyyU>ZXQCUF=@f-*x1&4Xr{&j& zk+ez<#s#7|QpxX!-}L{K+$O?fbc zO)JJS&9l!Ce&qa(cHNML$ibbwS@S~@dV$~%5#!ZtybR4QE31U%-w8=LIG(QuC(i<$ z;9sefQ|T%sUSGc5n~s_^O1qdCE*1V>lhgfS=IPEu!B<`BZYcGW=gHsS@vpt&=7wWY zTWoRXkH65beO8Vh>?v6r;;+egnJwL0tW_<$5%LQa`RUmB1(zUFtVzLYepk$x) zRIzKt$IQY42`I%a@j7`j6RX{1Y18tG0E_pG(oTJQVc$J!s~J_h)}^IZ28=lQ#g!*qv; z*>PA8+3Dup$;6+v;Z<_5CWq4B?EPiS#m>>d#}agoQ45|wTVs)e(a#j}ojI8gzlgiM z#(b}9A=+*(yV6f5em8va>Q1O=YPR92ai^WM&0SGz-Or_mdt^C#(8%a=@4wv;kF+Ab z9vSAYAK&JwcLf$&>r#3j(@Z!GEqApNLcO=;M{At2C1*_Xzr6SswGl;r=MiCptk3fU z`typ90M?&YX%P=SYAzXeMJl8>EJ1|Lt@3^(03q%FTX0lR@De6mAYugV(Esahx90jg z)6yrO$hYU^eDK_Oe`xog5t}g4llZO1sNnHlp-tWY=C{QoEI-f4pUD8yyXhOPFS&SL zUY~H_mMju!>3GUO07(i20k8{6%<_nVWY1RLq*KEmEF4sS*hs+;w%_oMc zcKK1$U_;<4NV^YqNvf-Z13ePZfAb~hhIx;-|8!I{xcNE%U_xo@P$`!~Q2F3Oz+RO7 zGi9}JN#Q*HnGbOkc{c)%B?F0qweU@DPyRJvHbWnn{d!2{edh`I@PbgiiG)V`~UzRga%%hIc}{h)px=IJWf zlD+l#{s-Ct^*+aMwT#AHTDhnZJ@AS>o~*=5EBm~fx#N3vliPoQNuyieH>%oG=e;yX zkmR-ZC$y`=`OyHz)RBikI<1}nwS_uU*{Ltcolo;zr1uwI$N7z&cErR7RRkD*PG?>R z-O&TSYfQ3Mjuy|`)+TEo93WC~j4GAijl2#+)iVkWA^*^}I{6sr>G#AQOGJjN4k* zhRWHmNJy{H{Z?F;T1il?mE4imqEON%w63J)0mCGROw599gIMVk_hf9*GgzGkxqGpj zDU#jpSQ9Z3)9p=)VJb*zr)s_fk0K$S-#fSAg9CLF>FPFsq9FYq;eV2!nu#jkOk~dJ+&UQdH z?FP4%z`^YCda1`)sLr;Z=B5XnH~7Crd2Ck7T9~Y)`%~p94K(cm@fotB3Eo1>U}JCd zL~oD-h1cuIE$iVw_;7$Vu`lbaVYXP>pvj_vDwt_zw|YKeAwD~RpA*UAL*NGNqk^yX zEBMu`f;sZ!t8}O*E&l;dD)fQMMc+k@Lr@+i1FeRRmv9|3-QSl|p*GB8OD{vNrgANi z#iI#C7ZT0R8an1aoIK9d`-4mxf?%yh{S)Ttg^G}|4#8%VLbcp7!$xUtcVFSUn*MSP z`IAsf%G3rG98>EK@9FyVc#2<%(<44^io#FI+Zuw?-As)Rnwd_lT~vMidriR%27Qc) zQZiD^cSw)MwE4hX4ZXvEdNMW*Nw={M#nC5jpwkd!_O)$!{(S-@kS&f#-=u^ z3@MKLzKm7Fzv%Z?{)JAr1H?wJ|f>UW-03Db4Q`+ zCx4`#hU_1|B}t(di~m5vkb8XHzZ~+62#^;PKLeH`o)+fT7S7iykanM>h1&e78?;hm z>!vmI;7CA2zzYlPQz0S^xZ+z)j<<|6MLBls()pe+D0XV^8S?8PKWP3jrg!Y852wk1 z)}S0CT-oES$6OxH+3O0?s&}K~F@ME+gIGyP`8iFUmaWvxU;eSy$QGy#1>IpE8c7$)gY=xT&+Ckab}5JNSCKw4 zc^TdEbk|;|Y)2>h@c6JNTU79ZWPldU_~cJ7eSF9NZdL=~+ZAIc19m7`nh$)a(gn5m zhF$P#V%3{SeUzNT{6ev`$nHbX^t=pBXOAS)bgR7L$LFrc5gBkIh#z`&MEr($oqlxg z>ubZ-0mMZ^vAUVIos|M|_=CMO@_p@`JkO`I=pRTiR)3db&*=yaD)W44bbrWFQ)N~4 za4Gn=w4&_?moHrzdnX9CEd?M@ zO=eA!Kk}*_9~vVA^`zEo$tRvLIY*1GrG@qs8g-?mz$^U$sq;-$0nTX%iMf*pCS9@J zGT2GvFf^}qr%kJ2ehJQ2L&LW70zyD+vd@_8-^uQOkUA>X6PR6+)p#%^O@%yynsC_e zI?;3}&mNtc+QxqK5U@?h=eT35rcMSAzF)BxA)dMFLxEgelMK4wvHW2k$z1|^DVxaV z3FV{0FLRlKC$qACl?VFSbnIr!#&v&g{OvgwLuV(UXR458z-tl*UD3^N#UVa0Fne_Q zekVznKM-rn*I+MsC6V>YsWSCcOA?uYGjpNK=?D_0`Ru63Y7H?nN@3)^}RezgO zXlJlM~IK_*AIkD!~Vs{CHXx<3I z9mBD&X74^Q@Su%FJ8&JPTVOf3v;0}D38VE|5{4L%gYoVX^YcyPg=LJravc$T3A^qY zgynhUZPn^*sDJwFQFzNfV_u{Z{jN>rP5+4lMqil@zSd72d@mSi-ba;|PQj5fW~R(~L$pdW|M16>rSVXFk}yb6iu{ zP<7~8mR==dNUhmNf<~xu@X3fO+~cD346GCb0we_W(&G^ufpJ+4OPs)e0I%Q=+L=#Z z@olKM*w{KpyVsxfOXY8X?scz3?fL}f<1NE>kr;?UTs#N0Gr)3VTU<(ic%?8EX>5E* zYP#Aa5(VV(;vldV8y&3t{a+U+&9i?1gFjuRDB7RFN;t*mCadFQudqk&I*LzQ1~7;9 z=82@O<{Ptr%}c%(pSFTAyZ_Dd@_$B+4=weGagLzSZnK4xo|)J7VEEdn=fVj5n*4F3 z$%n;i$&|2Q0*f*FD(y-aqk5&&4HU8MoA3UGRtiSY6}TK^It(l2aO$aj@nmypW65UO zqGdOr%ubPNUX8CmaoibYsNq|v2y-v+lwa{OUzoX-7_L13@s}^yx;NUxp?=qYJYC}U35h6lWko8^v!e_ zz1u}!J~?#wjMy&mb_$Dhj$m@@Ya?wTtA)!Et%mx??~85KNb~#-`MlFU?h)>!OUw0C zkC03BH}2X1+gsO0vBlJHqdmWJ=!=7YkGDHGH56+-UpxdhYvj5n4^Cx9m>HS4U{sE* z{n(@K*zrW3&7_r$Yh@1!w`-$}VYi4w7xp3b^IN}Ap?L4TN?q?r+FX^HLSFUGX4A!V z0(5MG_)95;w8H9SNS6GcUr!!rK7BfhI(OL}@T=*~v~D)ue0fUJ3Ejkd12Qvca^y(#i(f0K$fO(osT~rgRJ|;6t)`KNB8C&~3JY$F z38~hQwW`Fx5qDKqM(arZnJr3<$CHe+A8!d}{~RLs@J$SiEJbkl#04`Qyn1Zadlu6k z`3~0PE|rFp~77l#DBJJ@9CY1l%H2x*InajM9>6s9%+rO@KrsizfPA-{)B9+=w6#tepg z@UVdl*ZoGB_1RPBB8EWCtua%zHsM9LTBSfF;`cGQ2FRFjaIM1yaXK z(GAtjX2i#96(dQH{!gR2B3&iLf$f+{l8LtyXp%m9fxozhro%1m)jrVt7Y*@4hS7(_ z8q0o2k!6rhvWCMIDEp&2i*O)#zh?HZUY|QD*?90n!4}J#~2-7qbR9~ z(@*|Rd6aQM5 z`#{xw<#2$l(Q*u3*x1yx2h%(j;Q`Xa26%WhF}V+QUA0Z>0#GMh#b}*qlB~~B5}Zx5 zTLI6m_T6O58h4I;o|L7R9-O$IaqavJVelg&_v5wXP2SO&nk)pm*HQy`S!eO5>-eQ9 zrVoBzw6q!d^q``fU{k(Z=C0FTI>`v+TLhdknWm0-FczQw_HDJj>_{27>2-zxi1!7V zJENSrP?Gj3jSmFoFI?_FmnXOr+u&OMhl}$o8v5Sg{gE?dX3B!}ZO8jAXtXT|Xb6S8 z9}(*D#z>B~NmO&Sjhwy3Q8O@mODyh1s~EN`L2<>f);4}pB^W$Pw>x!@Ti?S6Sf4)UA> z#yo+69;m05>BR3Rdz``gBzpBrnBgZITr)kL=^AzrPnIaXcQyq#;t>zU`)GP^`zl1p zvg;Q2XFo2N$GW|4JopM@i&2=#RA~>|BT-QP2Y84(U=yA!YnT^IIS*g+<4!TvowqK| z`i`1;s?!=A2zyY_kO}YWP&H1U_74!4W!4Sq|&bv|Du;*48C!6zlK2O8AhhJJu|Sj$bCVNz|wia zE6l1;@e6v~KQ8GaF%06goxwRamo}Z#g%rhGnEZ)?=WJ;bVacf8F;7qrceDh6(lC(g28<*56SD~EZNrW%t(;z(eT<+uAlD6C_=fb$fGxCrY>OoGI}``vpOG{ zdF^3lo2eMb%xmL05hKUC;&;|#S0Ht_VEqCdx36OE2C}trb56O^5bM7y!_&P3Z#^UT zjnO&0clY!N?>j?}&$k?D`gj=(MKu0y9Cm##T70(T|?0ubxUx)@z$ zR9+V*z9j?F5Fv~Nt^F^l>|f`xy#N0G zS3l99>L~N)QN|N<%Bkh4!1cmhaCFhQCHC!AKi@3kpT(R|-`a>;6>3PtzM^sAm5kX( ziFYM&bsVO-J~i09*3Zc%%`$O)#Xm{9Au;~e!6OO#X61vG<=Di9ABY)m;6i;Ans;?l<0k=6}V=L*VbgN=fyH zwd`?*k~vaIQN$2Q6Gg({6E_V@l`ws%UVSjI`b;NTbH|j3Xi>$|ozU67WviANY5L!i zarL7U`yG9dN@x!p+rcXCBfl_~wKrG}eHT%+ZT6t$9{?;SF~CxTuC!=x#Gc^(-*WN) z`gQ*!tjOJDCGb3={;Nh$bH7l^6z-v$Dco9uc=fbWg2bAWUwn>OM((P|tn(vZERtVo zc!wwXDsJ#e6yjw(&ME?dt1K@rb!I42vSY5&zB2Z`x-}^vn^t!GofJ%WW3)kV#ulGI5_NO6Rfwo zYa&qWO+I+w7e+gq#>Hk94L@AA5Z`mY5qKa(3$&^qRN9 zYieUwUe?#^jQ!20;_P|u3q9LF+lXND76jx+O?eW?-_#M&t4=LJUzV>4SKh2(;vcHvjn!}xJDLb%(h8!svJ=y zh7?uhf=Gd2IxYP0-FN6uJLrR@HCN8dfxqI&&9hmH zm=oOll8j8<2S1Dwa7y#m1uMH*Zh2bnqfsGTLXKEXm{ZV)gS7~AJbr`j9zD{lhyS+v z?`_WluS5_|P>e?sMptr$qW_|7Do#gkKF#t~!6=m8sY zMwhp&+&#`HaiA(OWWxQSzAM;pA$8MKXLD)@+B7*dJ?|=sG4+1;D=3g*XMydZvMw8v z!C(kx!je@mn! zsGk46cbI#X1PM0|^5tsy9+U-{S^zJq`u0qrYi3zKj=KRdd!?Im`0`?ltVnBKB(}SV&*0MD|wo`{Lzix)Am7^t(k^q z1kxmoTl&wGe(z;%*2l>2f`6#z1^oIwD?5xPn;xc?D2Z12!yxp2e2}*;R5ji2G8^Qs z2%fcCi2;bG-hyL6_dk-C895D&m^zuzu8eDqce0@e=jHltGG~25HMSFKnIsHU6KPcn zO8=ER=p{ZXh|5EI9WrXs6GHrRE3+A0rYB{C1g5dF=vGp%`;3#oU8>L;bl%_Nx4i!V zG~UGjTfX@J1>`9{E7tmUj-?yMs9@wU?5)u_B8u!YpP3tC+jkwFm>^50fxIuwZ9u}o zNB{vRfeji&_*6p-e|xa(Q4bb!m5vU?g3BId#h4xcjH&K|9M(ST^)zjve4?S%I}MO}Nvzbj7(>l)(v<^9o%a zwdkq-z`)wy`bcvF`J0ClRD!;HsF!gL7bMJN~&%q=qHe{|(uG&~V`&scC-n zzUc*27-_-S%lN95vJd`^2IX{`4GgN^M5p22)m^9{At>natreAeULlB`hxGO zKM#1Qo2IXKbGvHIF>)L*e-hvBa7D2qEUzl&pJSGa66$_HwO!^>Yj&lk&jNMQo7n#g zY9wz~zHDje+&p(ZAt*Fo`EVFg@R0Q;9!h_{Wb7H?KL~2HYer2VMQS{8AIo}sdTi>w zGti@LIHc-b?J*sVTYMFfOg5dGghId6icefFYVIvH-;K6cUeo+6E2v<)Q1te=!1f2u z2q7%h-cyx(dz9ImpQ8dm#BBA^T>m&Sp5g+dSNh`&{^ec`E$RHsy&uO`mI$j9>+>Oh z@*uAr?|9+pc%~}ZFNmdQW7_?;7z0vx=pmH^DXnDhC=(`KCHMQ&WrezYDSndm_nE+N zKh_YRaerB@Qr-ZguiZ@+G=E77^C^Wg->j_6IK&$VS6^(l%q;$q*s+V`Q+%tYtT>$9 zI3NXt5b1ni*}65*-YH+a$22(jKuj(THjHOe(y6Q0k|Xzq-quR>v);@!%{Yo~8BUn# zg&{@HXV=(pfCR)O@;)G)q=J6tmNKE@^W32_G!>F6=BE4j#C@uKtU4QN zIAxq$Bx|*gWhn;?bYA7l`bB)bD$YuCWU~52dI8yAWokEZrn=t)S0?

-b`QR&tlPeuhysY0de?uIJdNS%i7aek}vbuDNZ@KY7K+ zQF!ky4!$R&FO@tEwK}rLQamXB#~*e7l_ZfZdo%2){tw`ULC|U>1sHCy2`+4J!ppON z1(y9Jfbt1nUGcwYx-%B>6XYp-HC|=$hao>yg2!WE*fFld2TX`XF+^( ztzc3j`ujq&km?_x_AAojNI0uFOUc?r+&e9Fn&xiKcJfWJt`Dm<-t9uNOe5Ow&Z)qF zZfriuP-!YyPFGqo1hP0)<}vlkCyoRr+RyIRU6@Bn7UF-38I~9_3Qgb9Ut(EmkvBN@ zEi0uRlXS$?ex0Vh$2(>ymg|!+lP(t&NXow6B;;DcfXtPCNL8wH`HjRk)-8)*AUt|SMJqU=vQm7H#ppz zUP4yxSbVh(d-C_4y12XQ-~A~~{d0xTJ~M<6_TSSq;KpJ$9Irp8^kvxOE@^BLWo)!Z zsLrr?;N;?jEyh=|_%jFM%2}v{^P#BIM<8L-wXu?Tijs@bKLFmj-2!B0aw;z$oq}=p zzZrE1rP6Jhl74vF*if<0EAyeHI`l;Mb3j@;am}5!)9>M!^bhuWJ9CVEUro&JH+n~p zMFJOpSDkieWXS$7)cQNMrIZ6dLIRPgeT@Z_CZ`I&(6^f9kQ3b;PoXlJse z|D0BXwDo{j&Eka+r&^@QpZZK&-d?l*0Z1}uEX^QISjkjMGCb@74$TU zG`KjYWfNhw`LunO;a#ynne@tJNM`4VDyG3uihlM5h2Ho3XS%l{UipexZe9HOFUqWr zl2+#*Q>6(XIW{N>=~(=)%6QyW&4WQK098DWBpH|$JSdU(bIas$gU7fHC?lb*Y9 zOC@sf(w+8ryD0K?2$fknrB#TyG_yTv>q;WBapjWSm90%jsh8bwjs6=%wp4-nQJltF2B2lOhrB(yGOn4~i~c$6lm z7g}xrIczAd>9+#UP!d9DU+}H-YaU&5a!=t|%3q0HIX{_|CsctR1RRXk?x!Nzzt7uN zC;4|e6s-ft`M(`U#V~2<#JfL^Dk@NgYEgQf(QI93#!7*F&|OlA`MiBO7M^oZl%vka zj1$z4x{dMXjG4N$sdC#MMqIB`^tUi-Y9cWjapOM+Ed0E=D#7E`33g4-P2R6nLy;_H zm?4dVXY-Rb8^x@5EeFPf&@E-G_04)Rjx&}rbdRXYq*MP!gjwYd{P+EpkFCyAJ4=8R zs%dJ>PlSGNt2u@c`4S{tz!t7vgq0g80 z=*MSpQUjzm;x?0O+;TJP=gCmxHTwSSp=`H)Tb6-_SSZ-Cu}b<%(fJ=>wW0%*gYK`A zY99_bz$;qbJm9pqm?2U52Ou@-nD~;TDjp>FllJdkQ0VE*0>rbgzXe*}3eJKBDosLr zXS_vnQ?PE6c#8W&(ERFzG;w3x+$4q#%ry3AVEr#m-14seqkjP6kAC(VhiNEvAHm+s zEoqS2bWZUf{_zjcjRj=WnbHg_)ZMo{)IQLqT2a&7h`q-3;&~`7scVziP~y@1X!7~y zr>7NxVR$$kaHlwnG4xw=JuE=!R4B-$&)nCET~3jR$X*ZDtCH+`{SR=)RkR#)JmtG* zW+?+}Lxmo3%>1B4gx7OFLyB`thO);c9MA|P_Kde<^1j;M(l5~azVi!nv)Jus&C~37 z+5Y%EON5~(A~;fa$3>IZ$ejNNmq!y{3o4|g0MNA30BpB2$ek!T5_3iFil>4+vSER0(nlz;z)+1+ms6fv+z6N zp)b5%jij5~OVYeq%ud{=BTowv<;HMUcQT!t8Y_f4g1~eSnmUB+?Z)k*>1I;adft+v z{09htL}Jo+Xpug$KjvtTe}L~!pZ~tzLSZ>WL5rg|?6^B~w}M1Eo{wqdqOS;(-ssVk zJ-9&kzG8R`Q9(jCN{>L+ZtNlr#3$Mg!@tk!2Hw>_+(rVlq$S$JJnzJcq zRZASF_|_y0o0*bpU&XZ?Y0Z8HiL6I5m#vxncoiRUiW?(UXU10;g@5}9`jIUo%jP+s zWi&S@OAsARdv!qxyfBAcau&P-RDstXrOACkt(Daxfso%Z&Hu4>C5FJ*)H-O0#F>LC zK8YW8ihNeQxX`r~Up86{%41fSmIq4ef!SIAD(OJ|DC$!#D$-A%I{pI$0HP|qlGqIq z9la0SrH0*uHF}MMrw%q3O8c`3M`{gl>ce}*F3L%Cbo&4|cEv49hD7*!NQS!PoVZ!r zOIzzYo`H;?ab>#H(JIOC`2}NKc!7T>VTAa>_Iyi8<)nMXrBj+TyNIa$g00h)*xvCt zJ7M}p{uePkSw8?7Z;|Pm#2!Z>ei^RHEY0^9^LEVD*@16**JCli3PW@c#TehdVB|pp zuT2dAE>Qo|GfZW!kCCZ@N2)6W;@0I0(U&2TR9#}VWMN?3aOFGytnfZ)cNs=-aiX@KfPfMgF6eb)?c)iY zH{YS}@r^BT9?BM_`T{V-`Smb0qE-x~dd{q}Y4%Ei|K^#shuK8J{M7sS`h9P_!<{ZU z5STR}Veu^0>`7jeeWhfn&-<3|4rj>LfPa9H7&58UNf+k|=X~Ki4ZDd!a0OFeczAW9 zRFB-@sYZytS#pCYaf}9`Fdv$`!?QQfIAWoOUdSG_-ii**%}=Jsez!b>Q=UR8ctq zsSz0Z<6>$Z;C8KIvn=KbGOUu9leavhK|hcIB8e@xM6la_HiCzg*Bs|G=0t*Tdz>-_ z-dw}oSnx<+8*8UgrdLT1KtfL8;0-(UTrx@UG&XUfh%!nBr&FV%gEe7?x0(lp^>-`{=^s#u;jA==Z2rt4J}Y98n%fNt z2cj7qYiqX+(}Mcvg{8Zi>QQM~Q6}F_M6V2YZSiXp{D1c+-?I*rG4O0!4Y^qb(1x^8SB zXb=l%?&!%UzfWZ!Mfg#T%P=|8_Qu4^nJ0NV^1X8X`$5d0uCqt%lyDiyz5Z(RsOn|8 z%z}E**xrf7*Z`48O&sZ|45QwlgG>Kth=(rsS|_T%k7gJ+{#6^yU7SpC|_BNE?YW zB5VZ-Y?~7J1Fl8?1H`z8f_`6&3V&LdJ_sINPO!1~JGqVx2LFGfX#H1Oj9^F@YrK~} zRxyf6(@WJX5aCTFj=PGkTm3U2eu{o4R`(5)u?c`Ag~@QdQ>d0_%!z=LJ*?#IHs+ml zGmg08Y`q%*6?H0hYRNuPy`7?^<8=YEM{dCOS8lY}IRc^S#sf9$Zz~wx%Cr2ms`qFm z`8sZR)SRx1Q#zX7#kHYbe`!jbInBG3(--S9IN?rG4)LiaaT&!#a`0FKqLMv}JVpU{ zeIiG;jY#UW?PHo)C%K(sKib}kH_u=i@8oiDtLB0Gnm-bGgORYTnWSM7+E2r8ODA_u zxO6*`By#mOBjxJ6<;&ifm}UkA4`^HYYw)omOU<+xa*of!{QD+a)k}V6+bFL^r>Ak? z)(Yf6m3sAN`FY4-5gUT35xb!heLlgV(75`RvPJ7w&7x2|eO?&siNryessx>(|qpxF^M-( z|FuoR;(lD8%W~Q(K+dJP0%t^&xHbjJ-5SkF8--L6*S7YX9BIrgp7=y#vD6{-^!R@N zi-P`)oIyM(_oFRS#9)Z+i}iGJ5wRf;d$ z)i@X;`B{sZqazHD+htHe%7sboamuhvI-VC(ZhPos`)S1{ttP}G1L(A2^)go?SBXyo zHa|4O8h8L^JPNuemMl__w&^OSzbfv0^rBd=b98ZxPfi0~05<_-0XiSJ^M!Vd0(7LP zfqc}cgE(V~X54nN@3OKIeLHB(#F1^1;Dx>Cq_}15n%FKQk1Qu^vOHr_x_W8p{KOsg z+L!1=#4o5VE`zgYT9}NSrQWwv*2k;n&X*B;c7;11)g961UnjK>)iG4a4xa1DREBha zp1808qn}YIQ>NvHsaENb>3KB7*hl}6n&-g$DHZW+KkI(wjpIyui$%k4i-gSSxjONI z&lc;7CekX91phr`lH$EVuvgHpSabR=(EkJc15|{9e(lLuC*<1C5RLw-nWM$_UB$3x zW&iPG|M7uHwZDR9;#pQuw_~kVC74;c?g9j->55T;`M7}SF^?a(J+3 zT#Qi3Rq6807=AJu{H~w6-*j{EOmOL3sui&8kks(SsRbnmmu86sMu~I=r~VdACW=T? z$xaROa~npFPGQ#}z6t5WmD-%g(RJ}G7<PvrvED4iQJ38wS za>Kai13a2#Wh-P|#gP6Ca%v=~;Qyq%xYXD|C!ah!SM@B$tVH-v&HNV&%*+MEd`1_f zV9d{Qrtp4PpGMo$KW?;t7?)JJtp3`=P<5SdSpPteZ%Il}7zxqC=`7R{ryiw4`4uGN zY+P1o6o0KN#q4ICIcaee67ZqqUJx=gMtGzoPAi|Z0aN**DT#M?Wz-KxPpgDS_8!-B z7@;g3rP&1#K$F)dDQZ<=#Zgh>@l0*S@9?cf*5PsJu#AuPND3ulIHXR{9uYMC04vmC+-@JXP& z$tQLmhJID{NKyfj`JYMnt6g4{d=Eq`MbF7aXvq4|4v%~K7zd*xXEdENuIH>wA zG=5-f_T@!%2ndfFA4b;$F;mc>_P98Zt1q()%YIg@69M-x+olE_gHqwR5A@`iZp6w< zYidlop7bE6b<*M&e0lDBW1W_*X!;QkOJ8VQVsPA&npa zYH%L(+c3j+K{#LkHqy>~6IF{&^<;4KTkayJO6bE+F6=*AM#PT&awNq+Q%@}F-X#co zISFh0GSBuSzTTZgLyS6;D&B@q-t0z1e7k0q<=X|!wpRI_-KdNj4Hd9Jm7kscYqwYx z+Y40da&KF7vFW+sKY%lA2HBb!#X?bf^;WjXS7rrqr`U{tmeOOF>AmaPJh{Ik^Wmpy zlT>5!k6}f>xaM)px76nPSlw30@k0W3z)IFfXoJK3Lz{tzK-Cz1taRF92-=oUWnx%^FpP5nGHRwHxi}Bnvgd`2V=3&=mQ)DiN{@CV3u+rY)d%- zNc5D3OT4e$nhUAQ2`94Bd3(OZY{Knn?8uvJDor6~I+Tu#FOd`SLtJkn;a~eQSH$;$s_L2ggSv zFj7tTxW!bTX4R;14>td3CZ#k{oWDAV<|0YTTBl&cen$HgV~aQB%Hi)zI(5>{r`D;LP?9qUEf`)BIHD8tjl~y-jlpLMD^zgw-=4NeoFiP$e&N@b(>gn5h`YZUJ`%+hu zUL9Yhvs34Ao0vE&y}z_Xv}L4sC^z>xhA<%0Z5rF{VX@vhj95%pFor)X&+H&ucQwCm zMbNI#0TInFw+Z5~yh)?T>n+~L@5O|eSG1dzn9sTd2n=zleJ~9mqb$%PqFWA2ejrts z56X0Op}!4kO{-p}w|M;GbKcfR?^TcOx|^H7p?R7PRNv6Pn|*1sAL$h}N6oLiZPA9QE|1AsHfQ%&LW|1+8D3GX44nA?00;vA7s$T`vzcGQb{96AwA$QSo~<49 z_^JMWjxEF@*A5(Pu1Ej@Qw&-6!;h)xo`~|_m&Ve@fGYy84$ugRD&EpdpM^>d(-$SP z&amKIw!(GbbRW)CT0}lU#~im9SghiePGI3d3*tiL*oRP4C7*a=(Fe8g#&01i!wlkS z;z3M=mB9cZ;$Ru8%8o3F-oIbbR+@Suh>t_S4V<{eQA z+i&`?7^<8uhB(!Y&{G+Ma#_?3$EcSJ%ar->>MYrfdNfrMcz)MHyx-x~4Ix+V&FbUG z!M*{2Je?yYoRh>YIM7 z;B=qTT8Jk8%cXrKkI(E*#MCx(IaPKTVG=+>#L0opBUL;uZVV`9Li){wrv+?8oSJu~ zMfVQ56bzi&q+%9=x!Ha_%tczXZJNlcNDv_c#a*wQ)FxeD5x#pp(4r9l36#?rJgtwc z^VM7iU|Cj(EW_}(li|d4gwM*9$48uu(*Ht=QoCy1PyiQ`sO zA_(uK8+4%-y=de3Rf1+&eS1vF%a7_Mj`<=*Ooy;r%`@(s2agpsr0v=}cd{Ai{6$;EKJlLtbw+KPbOXtPESSYQ_jL zUzOUk@@X$yZLA-#y=E$C+|dGpSy#yYd!5o?7M9Fajz9M`$IMJ(1Cu+!8Xoe zHe828dL^S$PKRHkCCfsdqF%)OxD=faC4y0}vpAv@O8)^kkSuvW|Kwe{T;{p(*Y27s z+(1(MW#6wgC)EpGHcK?gX=XP|?0-0+Xy9Ixx8*ekGlkFPYe>!}W6q?@S{XuIzY7kP zud6ii2u>WRdfL$Ht@Mj^D)cQMY$r~47?%7|_L2C5@WHOt^eZhiVo6)4oOHetL{xCE z1>?1gHqZNS(~A4Ca4;2maaR6IrdhZHWGbJGwKo z_8d1CaC%KY7t?ir|A!@cB>omYos_!Ff>5&a$gv3}@;}L&`x5^uDS|n9H zSu~Z6C}IHKglH!@7;~zqp?eMgvP+s-!l9JClbX-KTZyh@Lt}{%Z`uJ zLBZb=4D1^BhB$9MIqMug9Y0p_l`(diVUp`NCT~EvE7SCgG&?K>Og8yIBPX8l7yG!W zNcGGMvDk2iI`rgyWh7rXDtyOEopA-As=hqM)Vn&icy4>a+l(v85LWXRSowqX?*h#q zXJ>_S*sZeU8i4hD$cu=_BCH8HslJSQWM1apyM6Uv^#Uz?6Eiw;sw5IeN(Y%gJGF{7 z%_*;?WDw&l`m?(BN~V}3X-Jn!7)(K3)K%JAtlP}ZJ*7;fcZ#2wBsK~nIJtq^-?&$~ z*|YlAzDu<%wsay>A5XsuF5Penx7w*-5}ZkE?tBi~15O^jHEQoZ`5*Uv z+|T_S$MYgD^X7VSUFYw2e!rg&p7(*_=hu?kxx&2(V?*kb`T|X;;uL^-?SRgFuQsf}acN?T-V=Sdo63VQ@ZwwPtsFgJ0{}K6CDAu1p)Uri6xFJ(>zUz zBR-60v5rOxqRIGvtx5n8mKjD)G3BWjYIux6lsQw7&FJ(+=Iu7L6g;M!@4{xSG_?8g z3gz@(BXP&m${P!=^;jOnm=E)>!UVPpYn}54!GW_On*joSZ&@hT@y~UB1O6n;RAOx> zCq4q*5A!Gn%!yIB9o;Y;;xd)#)plz7c1(;%)B_&(Z3^1WIS!^H2dYO4-V^5>*?lZz zDtLH$Yxn=MdRUy?!?LL;=yU-&h*#?uC}7h%Kpt2AqYv%inl~Hu zLHXa*M&{+dkB)tl>VM$yzNMmNNG0#MR9f#J9^yqU*g0~*vXGILiwmolH@5U_T#&C! zPoH4VCH8y5IYy6Utr~6~rJzYRemR+hTr{r#hlfHvWJ^5a5_656Nkapv*b8q|2l(r@ zZiL=u*%}xmo(hder2wjU<=W(gQhl{;3@}d|Tb0ODYQOB*-!;LeAC(SS$fP|GjZ$7! z!t7gKbL1jXg~<)@k0g|2AIB0gR5W)E!1Kza11-f+x|o;812#p4jUs7dxMnU~uOI59 zn@h3e9E8g(VAS2ihQ^r8&SLZGdn2ryW-}v?=A7{nEk8p$nTm-G)MF`RO;Qk{rv-Ic zW#!CXL(7&{}ftbiG_0=ILkI@n@*5PYpqTGG2V+eicde z#~ZR>xdyNS624DLnoShEr=lhDldqn^^1RZg=iN5t-pp?&7Y0-KQkaqe?YU%HvdX5e zQomDGG-7t9dJ$I4V^j3&PhB%S%J#_p{c6nlEYFhH!rE#6L(G9sM9e5_3#PD57sL4F z!ns5GX&=5kK#5)aptrBe{F?Pz!`e^a7mxdQ-TYMoMsF85rX}1#%^tJDsepO9u$1P4 zq1O393Svh)G3Q^4ohpl6TGv`@sGWE}Qg@+KoHYz0NJVuLo8f~YtWKIzdV7ETxD4FL z=GbgyQ+GUo+^ zaQl9$pEgy}4t@{Da1PPk2ZjQMhSy|!_#K5sZ&a1Sfgs$Wz4UYy4_T z&CpamsB8VpIc(_bl3<85blgmdwZzg^pDB_`!!b*|!BF4!TJ@@mRkG^xZh6Val&Zyy4??(2dN4fjPHBdH&*UlGqDy=!JtAXpw53*myPR#R;+n;3^!Hx7y9{x0YC~C~9 z4u!lXCK;`JR@$f50XFO97a{*qoP%pq|b61tXEd4u2?|qa~CX7QsA#$O&pmhR@beuytH+HkS@j}T=>;Ke|3Te z03l+YpdU5_{6||T&&^DXFY-TEk`f0Ft-Oz>#<+3wsWZ;z+eG8~8?U%Z+mCS!M~bYK z$obF6N|y2o_C#-d{eFTzYi^ZkuiCy=ZiTz8N;DIwg|kt&w)$AD@VXO!|aP6}k-#=q!AA!69X#&(X?e15`I-ZzN0_7H3SyZrGYsG;*?7+01j# zxj%0C?i624C4ERW4H|K{@!Uk)-fDiH*w3?hun=v#Sg`kNe+Ve}&c&j5a@IvE1uFX!rv{T?Z^uyeJ zE^ivnP6kI56`Ydl`3W$;yKwJ#^OZaN{i~1^K|SZc;ybhUYaWZoJu)~7GouGS12w@fjvnG&TKVHqM^H-rc=EzFvSF^Xxg#{1sxKImHyo&z>`+E%DVMf8L zc+rKtv~0ppnAoQKbwJK!%NSBtE@=uiuCcZlH(U{Os$4S#)4e0jC*<1n$8fEY=2UgI z@paxe0y3wBkhLe7dVVhG?9a=?D9Ztn^)@w;z4g`MO!HX1Yg}fvN27V0Tc4IWIZgMo z@w$9wVGh#Uw`yUCzP5rseZ+nYl2HTf^RU&;_|yF@AY{)?yhHV3OgAf%@$rVUaoJ{t zPyY!4uaAzJO0sUUet@w(JhxYnjV1_W8qR!#QKx}`? z@O(J*Y2JD}*9h9XtJ6?bOf0l}Lj6c+l=mPy7q0{qq5bRP8dd1qy2jTA`W}S~7EzaSDrKj4Hrz@ zjj~M6`k%^n<kGCf9248Mo4P+lYv6hn>Elt&ikYVm-=gE+0yZ&q-pBY{k-OL6qM@R0Ik zW%`0}a>QAdBVuuhj*qp8@+dx(yq#N{#2H`S`?%X~6QkF}Q{k?!_rjnqrQUBs3{cUA zG4GaQ_wDahoD-jU3F7K?Jr+3u|NgW6t66%A|D_vwqkwo8F0(V|xWD&IF+%Oa>luwr zrN~*-mW7vs4`PJOrA4@CA@#gIdG&xmfg9h*H9FH%(OWYUQNYk(>spdY;M1BH3-@fO0!VrK)$8aWF*1z6L*eF?p>Z znEV?uy+fc2%}bbLCeyTgyCa}3sch3Iev8Y+&+_T>{B>@4&`@QnUZR+Jp_h1{FcBAJ zT9sHS>i@LkhEa`gLKVJ;KpU#JA3}a`X{(gvYrJ#{b_E5}}%IXEN9;wM(PySw+c5jVX z#CPWR*fwF8;QmW*0FH#sxrP$Rk@YnhwBH~43{hF~l8H;WvF%?Qcjz%>7C`bZxBfaO z!P6!T%2t(iAK1_0Znp7?os)C~YSi1=klFG|*uL?M^qwxtwcql*_PXKDhL)fTAygfd~z=P-%DY;xX{V{Ks!k!IOWz)UJ~&pX^Pd2JIPY{B@5 zZ5=#69fsA@W=f`#JQRCre~ z-@-+2UuzS%!jEUi?Bu^Z0!Y7njb^0Ab$*wcyy6FFZN&l-x85pi)8_eSfNBV{|U6+STxi4UfF)@{p?Dkk37`xtny@%9=y zW3pGYm)<7txhHy%dMX4+gc@kIv2j^T^8{7u-rqzcgcX&^ZL-caytW8VHZDtaf%#B&01=t&N=dxaOXO~4wJK0NK&j;jnOo* z8-Ip)DjYN;G9 zLff$YMFVHz#rsxqax50uwiRtJ25o@L9f7rVaHy+j$8#WPoB@x3|=1EJ&`S#c<+C_oCkGtb9(eVGMg7 ziH=|vx1h}3HT9ctAPV|K`DE8*ZESp|%_hTrK{BW_e2W;Q`NKJS$(wBbM)~K6Rsqk| zBjL<%Eq4r`iffu08^-syx6kWV>1%>A&dsAgj_n@4a$qj1Q;bLPa0doxwF>1dlqEFY zs)K`!zio$_u3dNn*l#!NEM>X9h=Lfn*AOFJ*%DW}_n}k;p$_e<|M291M`Y@Ur$Rhm ztoR>)`ko}x<0zv9TD;Y$YtkaIJk0&1oStA9Y=kH6kQ1bOtGu1^53eP8a@b&|ptUo; z0`Y9^(&A1;`{mL7ekx~-#PP@taI0=niK`Bpw2+Ro^jxJ^op#)1vMppR-e0;4dMzg1 z*lnV9So*zE2Y-=EVdukHG~$g1Tu~Hv zH#x;ec&sB2b9c8Y2B>VhyWR6Y-{JFbA$E9nnI0%Fvzz-~>-+SSD}2>!;8y)ENjq-B zBm*pEZr4m0;{C!`HRrVaMfbNc$6{Z}?6K-Q`^mUFU>^w6nJ6kplct(K3;rHnVSdG< zHbAl{jLJIly@;pR|EmFW6^PKGeaQ6qJrB6x>YHK1j_8jgM{O;&n-+VR=buOyh=1p;Txo-& zrqD1k!RW_wT^Wg>gz&2Zn9%L)?9M;Dk7XqV#c}nX7%ZQu#HxGW1HwZcm6FWnJfNHN z{)n8`j)4YLdn!tMgu;gI<{zGio7QD{?#+t^^ayxyW>+~;+97vAaSG`WrGD^=W}V{& z;yVBj9YmoZZ50^A*I6I(&!<5J97 zJMHyS%QoU4UiqnkiN-9AS<z8jwLR@!unli`Pp z_*j&drn820OeLqu*%>r%P4&xZCC%iJViv4CfkYcV6B(`sH79-)fjt8*Zcu2^ESxa( z!z#JEwJ7Z5_Oi{x-J5(K$r$#v{>9`C_`DCDzlt5IF)F2a{vp@woYDmHKoiTuMUt%` zJyoBUnE&ZD6()2pOX&EzJ9 z3f?~}EzLI+@lfw}rkgVDMVTKfjTkM-Vi)XKfqd&vwBXZU%zvLkocYB`b^?FgYZB~X zrH)rSuGE1hZ?}L<7xd~&t+6~^UEf9GHZ;8%OYpaVkzuFgjdHO-n4@$^w)7UYuAr)I z$8U@GHLnAHb9F+vMzO_RJiX3?=D1s`^9jM~7G2On1#AbZ-62@Q+4u}Y>R~{Z-a!Q^ z+%u%PnmfOmQj9%%XTzTxM2)}FIMa_?)luBv+%$-Jh@o7YZjK8J2+g$$%#4)-p3N~- z=oTBl9q}5Fd*h(eilk4zflc@g^%p<`@K?ujVL{67N}5`bm6ArzR45XKE#zwCz>`H+ z8KKXzg2u9K)sK^(8b0iZUfrj8B%g5I*E9T0b0bEehocRtYjMT?L7N%9{@pHyu-wv! zZs%Tlt!7i*;43n9${C#zu}~ULv+6|cq~N+I*{3s=e)27x*pr=Khs7`l zhdQZU)YE;jI@1i}9F-iMz-vLb;xf@&XLh)8-AOaG2%IU7bT`-5S22YcCjI{Rr z9P%(JJQ>il%aUy*f1oZl=^%Sd+bPTa#_@ihJ&9;6wW6-zeUj+Q$scpA#^nwemYv&M zQFY5E`S1wx4hN+q4$RNai?3ifICk(+cq#BAfKb}EOa_9b*V0p~eJ4BhRr1aA{?_iB zC`7A$(0ylAkFSH(=a;&LL86J&uOwf_kb#BYudgL=8MZa9@bsDJX-9vnDacB^ygh%Z zFK2n{UGm<`^HvNQM0Qqc3s*4x8%m6n0DE-{WgZ(l=J|+S(|j{Q(I*)gW<_%5>u9`i zejAdw*tysfgb(3xP8!l4lZ3x^Cm2Ue;9>)w0L{9wqrIWN61(?K@zh|bJYn!Ri{r%? z7(L*n4wg!_s#!_eZ7vHj;*+1!w|#3MAX5_UPEtm}#8|V#~Kohl-%1^5lTzn1W{# zKNtxihe4lNO?}Q;a3HRg*$BD)m4vSl|bWa2(uTDia8tC4DXkd+& zY12znLrNueZ$M2{vouoqLAQQO_F*9br5qnVT^vUA^Tj|pdmHVhie z+(|CC)sqE19nY4NPR?eK=`lC{hi9;5ZP+BFW|Z{Q0;*48ZVgWD=vR{fH$d-;L$%zl zDDRK)WpQA+8!%2-sHwVNSpSvk zqu{jO%fzJER~ODh76zl*>_RI=I4tSFr{DG!GiDzpoq9?idh<6+<0d zwo_+2jtm?hEWK5GomHyN4OSzYLnej%ZU1r5Tl(@H!)W&juLFr3x#i1z74e?_+(Kq2 zCqLrXPEH@(_3u|d z8BIx0y#?f~BYqfkUwZ8l=5Z7RhV`9@2r33XAUi%TPa6Eh-o>L}+O%rP7b?0y!qsXY zQ^-NdLvCl;mBQ@_DS?IpUi&7lnC?*+|xxYncsVU)e-H6s-uGIi=W8s zQrshPhU@e$gcjl$5EZ-X2r%L;F;T2tigQ;I(wzY};ZZGaefX^SF2GY7tH#t$6d6QH z%8}CvK>#@(TAfdY{eAewKE;>7rA9*f2{bGNNQXzM;(d8m+8@+L^YGU8>kKI|s|Czl z=EyQQ(YMxU8!>87_cMOsExBsLoFPOS0ZHCw`TV@GyQZlF`=sr68+?WKi-NjC9_Irv zh;@|-H?%Cisb6ffEmtor6!P_60$rE-k+(b==Yg=yixk+d&zd3EL9t*7PJ9VqPcHL! z;O7cE!#aRYeV$lv_Ox#euEbBS9&X#eAlTvcsVHhZDmz;XDge#1=^P9+R~O{h;V9>h z?Kj^;5R$AoJD5`3$ID!$L3liIr*0H0U$f{c*d-Id-<6 zYCBNpeHO;`j^8B`vfVcwQ-LJY6njX`ySkaaqJP)mwlU!JVRZiYZ^x$ZL}l#CYl5B# zPJ?c>ZjW<`l_r_kG?$94ao5d~NfVRp?-Bdr=9=hbjH^!CoFk=YVU8vTP32Pu#HXmWmraj-qj>S|3=Ru zS@guJO@fbrOh5XDOG0ywdE&QQ8`&5qQev(PToC(-PS$r-U{4O-Jc;ujXc_`P?d(r& z`8*zys$^S}pL(yx#lH7IiJzFO*TEWnLs}-~lS9W*Hd|E599SyQtS%MqZt0*~N>!p8^RowWyU)U1NDMgMz^+eVJy=5z ziG4i!X?%Nc>v6Z46g?DlipK}+!G0)-&E4L*yw=%cFdzh;?^=XGbIQ)B{n7B>CQAxx zH_A$#+n%zaq5iQs1$WUCd^64J#?+4;vGv9McRSjzT5+izx6Hep`MMy{ zMo#k;VgSQuhmCKwe|&tlsQK$3*ymP{#L-Yt$&f-M%0wN~w~CpZd!K$rl*=>74u3h3 z&%E5TvC-P#@$I-UMDfEm|Kai6KGYYn)-5`>`*rsjw+MS3a!#WOmHwc&aFm!g$N1Rr z%cz)m2yh6?r%r7p#UYKw{=-|WN8+DAN*MHY^Fr^>WVgEzt@-Yu>`NZKsoRiq_ptr- zmvv=IN4HerfUc@i|KvJNIV6R0*UW%RVpG-#yKPfbLIC_LASiQrM1sYI@f6X zi)%kiy+h#niLs!ddW}hpE4z88uV2TtteCxk%02RgV*Mv7WqdV}^hZeT{Tvs=`m#yW z@k^1bwV9qU3A&NV(w0Ku*+eRpjWIo;9f>ULI^R|zmy8r?1WrhlxWJ) z4?N$qZB}yE8>piLL8;?QM)H1T`V^+e(0<;}kH?ggMQHLluJuW()U}S6KgY!X>E%vG zTV)-ruY9uNXX$Top8OPV`wXLwn$_F+%ROs(-P7C=j^R+VPt*&kasq=a)g>g_e;hqK z{SHnIJNb~N*l>tQSgjhcowa9;`%yTVHx*@E`Pb#Ob(NPZw*PfNU3`kDP!NT;bV3}` zz3uS?&;;UURL!5mavn^+H^IceKGonA>7UIq-REXzfkB%1lYv@aZ6l-d~d<2*7)NdE3?+}AbMa7if*k0@O5p+c`-v`Z71Ail3L4*Ae^@kBsE0OyJX#)BeE9s!6p&^SW?thK5HJ;9vxgd0sNt zPc_%B2ax_uE++sTO;|CRZnU%J_p*0+jxnIqh@Ho7Vaqu7ILQcW+|5myqkmjOVQA1j z=jf@JO}Cz^s~0XPsu#O`1k2?0h$F{!U6Jgs4v4l|xgnP?Em#~k!KF+`<1Qn!pGP*dvlxC==Zg7) zTc_1?uy?`*sSB6l9|2aydr)c(wUb~)tQr8%s@}D|_|J8Nf~$@4Mul~FJmzgmJQcrz!hOEOU)Dnq{hX;ny8j=%)NrrC&J>rh@*_%@%Fl zKfG${e|W&H{}{HOap-5+r8QLM>qiS!cLvC!YvOsBe9Rnr!0|%W@EGr5?r-`{Zc|?S zQ3d=uaCJ=%tK*HL!$}nmP!MkL2oCbh^J0)FCZ2b5b z=4VUX!ng+GR8;Gu|7tzTh4g64|M>caY;JuBf!Dn^P_L`htdU(&YJisSbvOi~=#qS)K_y06Q4*h=^I4*`ode04$(*MCJ%o!4 zwVGDC>gSg)qWZh~7E8yC(MP{=mdbhJ+Vg{C75+hHM>TlAaoYq6C_L zJyXK>;$)?8#eurWR@uReOba|S8lr~r7< zUs6P0OhjiHF1%7Xg6Racq&Q*N~NI9le*OkuI~vudkMK#HN-i{S;tMj&APM z>xf)ahuI|PzMv{lVWMJ#EP7bHZKR_irXQ|u&MWqoLNpD|Ck6J zzi8q5yZ%p-Pa=tCrDdE;B8}8zjIz;+)>%Jr!ph`j@y(TC*KRGJE*Xv@m(g&+jj(4P zN`ZMRe0=pnnfTeDRhn`+l=~Wkxu~D;X={|_oyBZaNjy^cPk<3LQK=aga$%!N3exv+ z>w(o}dD3i+uRh-toPMv`fGj3sNTyu{e2;skn88-#5hQ;kvR}8|AzY%l;~lx6aqj=r z$7hnIdB(U)-3TC3FNut{s^93!lPq>^-D!3&8x4z3#U}ffo3a7GDSxRlZ)A&OPq(Q# zLM&JVQi(4;8JEms*EX8X?i(X;v<4Tpq!(i0Q1xnKKFOE4UXV4b zXR^5o`qm)Q8?98_`I(ZHd8MM#DLaGZN1Wtq5fX;C&hd%)RJr*%;g3V{gS(wHfmeC* zZ!ey7wdc-EQ%chyJE_Y%|GH!~YvRbWwcW0OwYUQQUpah zl$+QS--~13y4x3`kSJm@sHR{d#3NAxu!>gPEUv(m*kTqCFN;;+;ofi5%=kr(z(I-m zDFmcsc)@d&|z$=bvHIM8@~*t9+{7C;!pMy3@Q1Wtp77x z@(E|%9YzE=3z81-U)H~GB$9dYWplXvy3Rcld2;7oQ(V;aYmlQ={pDnLZAl%fSqGkl znft9&;w+^A#fJ(lk~CDBRtAa?m^&({3#QBWY-jQ%%6biXaw^D`Ej`a=6W7G2D?!%* zuwKr6&r=4seH%n}i6ca#e4VIg6ya)MXm%g-*K*{H848Y#&dCu;)uj#S7PJk@a3*=6 zaPJ#zj8ob7usvo@qh+oBwJ5_6MFROF8sqzTpM<-m(k&uuUPk+{GbE+y;hwr^f;Jbk z?)*`O17LpuL`J2lR*@BLHWinOja#r|Fpy$-;42$^F?kS{pIQ#8iaX*T=ziGmi=6=C ze!t%zMO&2Noznz0*BK@>a^bf}zpp)wYZAgyobG{T*$nxg*PaakI47m@FQkPCk_BoS zXyk$yIaYDESAn}aOUdVzFwJ1UH?c(zWzO0OA#mopIm-O;2EC0*Nd06|GK!2534)Lj zd9dnbxsJ)mw#;5lhuA zC2z=im`hdL4j`q&VPcw1gj=)EWU3wlr1 zNQzT66tIZO{bhRX43Nwn_YhB-u^bg#wj{R|M**u&ZkUgnCmekQ>|-nZ1NPl}wo$A`nCVeoQJ1dg;`&+77NA{#7%Ww#<0N%u@p{yeqVIKr zbfA9h{Z02iNkhq4GpX6b)$i23GUcjM5FeD}8wZaPBbo98>@{Qve~YW& zbRB>@92|Kg2D4XskW881&!Vkv z`3WAW!0LxD2TUB!sNt(l`sg85TmJ~B)Pw$dmp$s;lO{b%`~P+x|EB=CIWz4)mZquy z5AQ%s_mh*Z|KXGZiUc0*&R)#3hpXIGa^LKYu&iH~fBJ7^1c9<>ij|83jk(~+JtzI} zG0##?sUH`g$EvV`NwKF9!4^qm$Flp)dt*ATV`JLH>-FZH(=XNKS-ELE_11Z^r%O&z z+*5DMiS9zp0jdQ6wQOOL;u%r0fL?J)p{$9J+38a`kxq22@vka_%a-?8j_b z7$%R1G3807ldV0Wp>HhRxP#uw-y0@jOzUdc;ilcZh&0-VO5K1*zR?QEB40+f4DuL z7g5o%v-vknnr_=UZ~89o#$D*DWCYzK$o_}-*1sdCiN5O7yLVgWp9uX1RbziG@#1_= zg(!zTJttz-t6IXdY&qDY3)tENBVT82^H2KGYm?(D%gxseTl%P2*xy$JNzB_m*{32( z1O>))yu8msb)4~SxMj_0pt;IWC!MIMt@cXssgO$rnU<0uK|4+q=z#n5#PG9wB3rHr)Eqs(FGXiFJiEo_6)bk)_*P1C(9>t;e`(aoS8se&S0SX4 zNodF7;{(0#=5m}Nt@PgVfM37+d5Cvd=_9$ z?<9!HjMt2t_qj~NRqfr z(!e8LZfo5XYGaj1`A+creC3AN0)uowzHD+u8L!6)m$lV-I!w;9%72JO`*tcJ2?;PJ zid9E*HISso;_ZR|ml^s$pHyJLF&@)mjnU85;_;u okC6grLWrWzxd^>AKlifSpa#(WTZi=?!Vvv2faBU<^TWy diff --git a/spot_wrapper/calibration/spot_eye_in_hand_setup.jpg b/spot_wrapper/calibration/spot_eye_in_hand_setup.jpg deleted file mode 100644 index 737fd7cd29a3cd71be218a4f0e74e2830e9dbffa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39733 zcmb4pRa6w*7w^#Br8F|c(B0h(J#>h4cS%TtbPWs*QbRWkB?8i^Ff;>5E1jZ*dcXg= zPxtZu&RVDT+VOJsIy?TY{o4VMg4HzD0BC4v0FD0!;2#R048TDDAO1IE{0B^I%>Mxw z8ygE74;LRF4;K#)pOAzQpMaPE508k9h?sHmbF{f`p^6Zbzx zQUW}J|8D<3@oxw~h6{K^H^V@C1wbc5!yrTZHws_`0021uS&R1H`hS6rfr*8UgNp{h z!~c)2Lkd7c$HYR%#K6MB#>BwE#6UyG0AON~VdIdq2r3%jQoOPi@{OisRmv)-g4nfv zrUnZ8#Vp_{L$BCG3~5CDV=LNs?H%^60|Jr%iK9mYpkw?$>i!e|e--{mRUiYPqhb7a z#{Ewd2G;-V`5%>xoCQlz5!=Al7l-0ibXNK21xlf|T~^^MY8oYo9rT*2qTSCwW*GTz z13-xJp8_%rGJrgwz;^J(x}?QWlVHC8kqKPrvy)#=fjFaKG6Gf>T~@B#OZ_R;{xR;5 zyurLy*R}wZWMNTlC6qd#RP0%yo2hZILF-Wa*B0NmVJ!ylhn*g`o{JcHCffJJTa!(( zwBkc4^ZPz$hk(=9y8Vz3igb7oahyN9rgg4unA?L(}jPmJ88a0{eFl2erwR^fph(X!Cn~GD=!Oy)(Y*BO+TzXYi8N~!= z#|%=+6;;Ima^_b$Dk&+7rB`$#_GtqAJj5~danW5<(bf4Mt$?+|-ppc{Xg@P3&x6C9 za^M7+ED+a3Bsx+~za7&RL|;;FNNBai{BCz^I!ptIi7sbwpb4j;MyIe5$xW2qFK0*L z`Wv+?BFeZNh#}0Aahjeg)+ohV#S<0_$d|RVP&^9oOvaqo~Vqj>Q(6JLn=|Y(m*@?AO?qVTBpT5jHU>+;(VdgH`nnJS} zlpt8U2k7J*V~vC_a;-Rg@gaK(y@De`Q4w^1m?!a4IKqaV%{2VyIBCyoOfCIg%=SmG ziWCW!8j2J%$s|(9r9b0SZ_KxiFNOMIfLqPt0Xi(A18u?DH6yVL^!UC~_4nwf+3#n56~xwfqv`Y&{h>Ze2|cJ^LNm|3={AUfwA$S;=;qP@9?)PnqwvHnJ$v8%B8M37H6 zzMFc+I^)qk_7a{%JCg6D4+*Xi4=J!Zg}CyIBn*p3wHexo`v+b=7SCE5N2^yqfJ9^G69!bWkS)(YfoxP%9dd>P3ADaCQt@oA zY+T^@MAl4oWz4*#7)qEItDbYZ@^Zav@o3HzogE%PN7uaVAD~o$>EZp*bI_K;JLL%P zcf)rbwHy$SFy$Hp9jY%yu_X4SMsP}4o79uzV1jZz^@G{ydu7QE#vJkjdV51p5H-eE zE~c!WME6u~|1c%T^|pSUF(JC#=Zxh3MR;66VQT6l#bhB&b8uNq(Cj;-OO3c7#S3Vq zA}q7S&Vao6Wni5~05cS3Zbe6D!D{%WZpd(6S-CORkC5}T>OrC3&;mrxFQdv_y_*zy z05uAKH4gOFibwm5x`}Jn>VC?7z(oj2S-#LN#|g$3Ne}o3{S+&7WhHph&~niB%WftgTPq*Zfpnci% z>5}nD@&}8dCMr?Cz!&j3HWQCF7kC}4?*J|LvxU$i5@R#hWmyg|aiI#En;(j~RO&@g z8vR^afD4%d;SL9hSvs@kmwwPx27xub9j;waNbED zw`P_yK~cq2MaTuf%t@d3!|ILs=^1=tq$dCfKH4fRk~8?mu*ef_%=z~x~A3jL!g-gqKWlIc}so=s$EF8Kg6U_M@mn>Zf&_LTypZEUPZbqWj&ukf8otUBp)dSn=`wC?bJMB7f5 zSEd8znD~JL0q`!~P0!LA9T0_C`bJBEa#h-wfHF!`z&d&ZK*~FiS>k!=`&>S!9i~9o z!+v#x0p92*-B()el*(8vK@6LHJ@?uKzbwp~4pS?0i>xspx ze`aJ^?oH%gf&UN?|((2YrRcOa{Illh5mPL>iLtOQ%h2{ zm6m@;t@;MNtvl;nxJ?o7)^umKLzE~(6KUPLj4f;#=5)lE`9TD)pruWRyh-m|81kHC1`7ognWH3jV0@QalMPzq3a?0Dp`RdCn zRiYzjRA!U$;ok*aSRtd~MfEDOs$}s@k0WK=_!=Y2a~AK+Gxqqe(dAX{X?0vn9=y{?94dhGLGzVeH0%tBm zyqqzkstnmtv_B8g-Gvm(3LaHI8OMb*pF(%P{sgWh9_OCk$@zY%oL-K|oDk~plN4-y zwhVT8p~LT_hLg3dRA^ct%QNi-G-(;llyFtg*ka$b5Ij3|Q%VGFiyGF}&{xWo*93^k zc=x~3@7<;x&;SO-jc$$=yo!0PleWv<^es#|T7%UVZ-pFhsg)cJSPD4BR#R;YcYWcP zdUoAj`esWYd)c{V(w{PP1<}(@@2h2zV+3cI;_$OmPj8bPh1C#XBmEA#`95sx88i7s zYN_~Z%4>WB)3Oc<^L(#6!%-x_V%;XzVlKMgMo%xDZ;KM&dEPZDMd#)L@t~p^IlEBL z<>t)jI%Ro$3a^Cgx@PWxq@EFCmm5T?JuzQ~M;_D-?Lr>6>2uYoq80RavXD|0Sj`do z#XNUC8&YsL@!rR=U?3it<`RpF^Jg^Lqk;kuO8nhjnUz`WaiVglXdCt7PNu!Xi4Bm| zNskIzQ%vg^8+uTVi|fzI{ZRY$mB-W~9sav%7H)HM#vFhihj*rU@=RQscn?PQygva7 z_Kq987KoDpp%gzS0@tj%wYx=>{ix80!Y{H%wq{S{-*n>~y6E@CxKKWvv8j*3II6!x zF_yW~6y5W3s8N%>SZ?A69xV3EQoMsaakU_zXHO>)QHqttg0a@pEjA27x&G`Z7uEW% zH}kRDu~<)uzkYisB_kMGKbS8O!WbBlI0n(KV(Oe)ZGf5 zA7Nw|>(o}ZWf4=}@Tz8@rDxI|)W~$@7zNZY|zeTdH_C*{*7?=iFZAsLf9#l%9w41`d9B^x3-$JYuH9A3%pe+g*Ko;s4P z>8GpM8>#ZCLKTHGPtq%C(%}j#MFVcn(KMLRJj*lybU%*>qkzEeabon%3BYMEDMlkR zcG}KSe0M*2v}f5p(iY(It@+AM9aGWDH<;WcxHk%8F#@Y>!HXZ0xpc7j1r-4MO>g=i zW{lt$bY5?NFazeo)C1|P@iDFP$;8e9*l?Ah^u0f2T}wq#6$&Q`{j{i456(^ms|7xl z{j?tQ5I`v*_?nc!O?Jmb0KoeS#b{q@7R|ANokQirJZaA?)2dEtZeK(+%c~`J@K;}8 z*z?QZD{`c0y|LS~cvQq!NcSEiy23FdgdbbI%2{N|Mz@=t{2(5I%j{pc1BKc~6^(XNqHsV!bOmbR3vRCWZ~@y0(u zUe}D0Pqme;YR;S%(j{QLQ&tQBAnVSZf7asgZ1xH**>%JrF>L)w3K4jYL1pH7z(P#4 zI+F2zLxR4ioX^EqQXU<7*lABRQ5vC);-wVFL^;5U?QD%6fh40xsO%eRGA~N%P-tr4 zSuctCrxKXVWFci=D1FWDlyPJbbonG8ojy3-c+E%l%hhq_%M$+gZXGY=DJeg_Z^M0( zK{9L+@hWS){z$iOy(0Un5Smw9t0T(6#&>)*FT5psD=>yva}A$F~Jv2sOOQP=NCHSLM+gu<*98{Vw)( zc3=)MGSiv%BxuCFA4> zMRoJ)2Y*PR@prI9=Dt^F`H3b|5|F}o)P{uoC3Zh$=YAtA`{lh!^~DFiF>v)P!!wMr zR{DpjLEENU$JO`zz)*ygGv110wUgtqM@Zum?njR<`9rCWR@5#Bd_s|$iq9XzeuEbr zf23*rLpa4x*RhN*uJ$JOECihJ`tu%F?UwxBqP6>pNuk#A?C%G0K1FFS7WC*rvjs-> ze#zwfURaswUvx@VRaMuAWJx~rcJ z`-xc3#bg*i5y#%^Rlx)>=Zd}rJQ!g=pnv$Lip$9iWA;T^oEZVRJx_KDf7~M7(pWe4 z6%&xCY0!%$Si@u`s;UOlnLr^>z2r4#h;U0qS?&*n8P8*nPn-`rJ*pnQ^OaD5<520?ZW_%%?MipF(&E$j0>MT4>l(~4lw!Cn@Rjjf_k+>u>UE3#>nlZz}d>hGk9b>3d=`f$-i|%+q>@oufm3db*_Uq*|=- zRk6DrYwuI5EZSI1|Jgl@+*C=le#(!XwSMs>f0x-We80QvippwZfj;%x;GACiukW^) zSUS1*HYpm!2B#A@YZgrnAnECbdlysrJ1yZmW^KAgH0S0eR#86SXk4O9k2U`XwX}K; zUX%d#keAMY?;+XT{sv{!+&=uHf-KKb_S}ww1tvwe;8M50jd)b3Zn^Z6KQ>%kjBF=H z>=lWtwDmj*Smr%a0-G3jC+ll}FffqRjqaYw%$$*wNbHr2<<95wjulJ`Sxla!Q4pwm z^)4D+^?Ws7l#pWb6*k)nQH*9(dMv&FROCXUL znwyt>lfAurIEDTJ=Ce0$ZzO%OTD9T86`$viHc6Ueu9$2yUI+@`u9LeevlpLK)g4s5 zelgq*d`emSyfph?xFeoGh)}a|i&Z3!Iasuh6x=I1@f+n-;i@tu~hgO5L%*af0 zlX%}(ON)6ef&vf!#R6A{Ba6vias4V={IlMon(!|yV??#?d6*24&bx+yGfFC zGI*SH_6`4e=d>XluG1Ip2sIow@u(2nln_fCi-5=Z*)_--zUIo)pP=xrw@p^*o9XPA z&>N=uR1(5`CZX3UKOLWDeWRyXP4jf>fF0^u_H0#&{3z(Uh)9i~6(E8^xd8}|<*y6CfRrGVyc1E|L!=g*?z z91i4D_@^AH7x64JCBNf!I0@(o(%Yb(-MOzTB#>S>Kmlu?9mfT?LntoM|h=uu+MOBG$%Vp zJ+5~1i(2toVbOq|r`QB@Jy6%`8yv5o6b@mbjjW;S}a{K*BKr65l?94~Ve zT5$@MqG?-m>yhXF2aqTPrd%@Q+gba)Ue~OSP3_p3iqRa1*JsrzJbf?b(vC^F?y{d`O4I=YzN z&sGwyBe9NdeDg7~sx-D!{qK_EI*^z(L-MGqDk{8!xKK4&2MWHo)A_Q5WlGB8FZwb- z3>9IWn;nECj|YJm&{ZdoSS9F0PJgp9@qJ{6=?F?xRabH$)`dar2j42iH{d1>vK7d|DW8qfH@&TQLLRG!FF4;}BisxD|ht>`?3GwUa>Cv|O)=jYZ*vRY6FB*z5HI+^_r63k}&gjlX}5`SBkOZykE`AGRs;vC-@{| znsL`&oTyy^W80hF%l;&0*w_;l zL<=>H{33i;9qN=!uO-y;7r~QfYJZ@UOX2sHnL`5u6-{e7@;YR8a>7qSw>Qm~*A~%r zHj8Ac?d!7g?A$EZFun&y&;3d*HcaHBa5s4zkbN!DE4%HJYrXb;^NqKvNnv$It|&Gd z-P#LWTT__`-xfhx0&KyQt1pDP!<`2R6JZKNEjP_7^P~Wb!;`M^IGo&kDv{=P}kHn zv+*CUtA@-CR`6sv?{5DCwD@M$t+VCd)>$SG8?!^_f$^Q>#%lnLela7V)0Ybn#8PXC z#cQG%c#~fPrv`%;Rui54eP!G?_fV$X*9BiNC%N|+EZ8i(lot|&T|&&GkTiZkDht6 zaM)EA%%au$3JAJ-Dg>-g1KaiV>y8xv0WOjn^O9uvEou!u42Q(WS93N5XbtLWFFJ*E z`WQj*kU42`4VPhPZ7C&GNi;@)k{rvF9UM|P_hUY4qeWw@^=z|FItA>LW^|C(&E9&o zR0>(@`XaVPwQ)70mHOE0OoHuhns7F*3=Z7zl}WA%G2ewL{w zivZ^Bqt)+;>%r|Fk&X>4HTwVMJe;+U^%d?ufGq#89(_cRu=hNs`D(B&vz|9?61ZW^ z>LI!_Tz^b-$tZW3{Gw{9e9`9xa@DK%e%EEQw_KvoTML0JSD~X}W<<~DnQ(eE64!?Lty`VvwT89pv^DPFi)3p=Buvr2j zUtFrGs*Xv4S#_N%o`F82VJc@GkVtbA#D0V-QYtoxs$X*vmulZU7rb@iBCo1TkWD(o z|4U^yhF16>T(Hd3G&F39yS!>KjP#zG<>~4&GHB1-cZe&hDhC3;EmLPC43aKutE(7O zcU5&KI?$2-1ij2n$df~cvn-1=!6GOn!99pk9|<15ALeQS^`}rj9k4|L-6j3c}A1@Dr25iY*e@FX%|E&Aj*(&eJavy&M! z%d@Ey*Oy+SYb82L51m?JFGxV}oF1caz?zlG^KxpTz!Z`yXp=r4XfuQ ztQp0?a;8G>`tejBf81>O4`8})^T&uXpsaFl$u*!_PsUu0oVch0M>Aq z0^Uu}6szt&5gOe(!Q>%zJCCuQk5(P75G(_<)~e2KPn!+3v>@@jD9L0FG)Dw&Uc%um zw|GebbmS81YE(^J7$5E`Pr&>S@P6#xaMZHG%kF5Xr}rO#EF?VTramZ*MZ2_Gpzwk? z3+f;;gJ(n8%k#~6OS~_M4a~(6-2K7(*1hhqD1TS8&qRea&dqnT@gQ znmazF!DKk~zR%K7_|{`;YNqe@vXP~$NE5PB+4|qBf`%#xI1!Dmo%F9TjEHnQeYfh% zm7^4jzLq%oGFL8Z)D-9X+&M(a!vKzGP#A@@Ly+Gm8=WwR<<8+0j$kBdd0<0M-g)F! zqCH#Pj!Ab%5vX5w5Ck&I0w2~}8$J2W+&a6YNku8U=>x^!AWKw$+joDD>D~l-UtL~D zumKQ82h{_XUN@wXXJ_?x-jg**+-+Ms8GXov&q=*TA_%BJ&M+*l$6n-)?5nhvgHqw@ zqquu>WL=Bi7lB<%z7#eDxMcouC7e^OR8tJ|g~k6_ZvG#Vfy}AOg*RX3mW{q{IJ7q4v+SiSchocOAfDyP?&EWg@jMzwmcd^v z7Ii@_5D10gSgBfHIIuSEM>hWY1e)|md)prB6d4G^+mYMfA2~DYPR5zh>hI1KSsvrQ zbd(<<;U2`5t>~i+-P_~|!CUk!@+ERXQg6&Jts=CDuUrU{ho->>`jlHToN$r-Wt?)UpL^9w zSgQPeZ6Kc04<5|9KTTQ$ey`)R;T3CAIXi6_Q3}{kqmDGZkJ<663`I9Lph7J2wj?f( zg<7NU-)o$x;9cfwaHdGx&5}5a^Tk#^?Wlg`dl}R~h% zvsw3JBI7gSE(7YD{G|shczsAF*rTdtJ~(F>Y5`Op1L)GKhCosQaF>bL@T94>G2h|oEhD& zudaCeJ$OaJkitM%no!gN-e3LGOlr+^8!U%?EYGKS@=8`|`mAg^l}T7WiQSPMRh-MK zv*heiaYHFb4dJ&5#oR1O+iRbk#s=k$cl6|8g4W-W(_65!pZo*(p^!tHp5ZH{$#ldu zek`0|nO|RpdVA*F($`+fSRa%KOV_#$n;q7LXx@{WDm0fj9d36M$mHg=LPGt-$*vcf zc4UlmCECkleZxi9xnQD5RHDhQ@>LW5u7>_5rypVXeuGo4@&2<)W4G0V|5q%bWm#se zqz}V!wHMysVn?0$DF0_zYo6Gz-&s5d6*(#%cCkne6>b&)PPKWBBJ8t5wNJJ~aQ2T#arBMV7ATfb$EgaS2>&fO`MF1QWv$3|$~6~xs#^n|Z}U+x6-`z(M3XyLgGS4$ z=J&C~8TzuT?##~DUrUW8=WssDH{!N6;d}1jQ;O7D?8K6%b|y6zM3U~p5x@(MD&bJy za1BE=i((kfT}bJuPjKRlaH78hC+9tm?_J6_M#%RoPgz5eP?`QY-es6JTpWu_n#kVI zz#fqZxJzzba{f*e%5nC969C;K?7=MhOD>t_1FdTpo>|*UyUBOzHbtK0K%tVvJ?~oO zY9f)01@ zwz?j)?v*N$!ugdwp=Ibfl|$eBH?Bd-^b>e>7fF^qM{uL&8J#V7z41^rGSdiUBHr~} z^fL)=NxS6x!nW|7w_qst&(=;w0$tx*Me!|~wr(ZaS+HA(daMtIXsC&7G5=mta+t8l zaUtcjb&l(vS7(39YgMHcq(U1oAW<3`UmYt9z`^a&kjs=-bLO16$=%K4(>8YZ7kNl_ zpY&J+DtN-NQo6oN0Y0564!X4UvJ2~?jrmJxk!j6=R%PNrytAEABpWXWj@wXXbXC7L z?%Db4yTTo|SVk*}cg_qFvj=MG4QCgj5jtIYlydcCF?)^2nrf7XV!zh;i#*w`H@nDm zoFH1WnQYRSS)(MHh^R!%QufxlOOEV->L$byS9ZpSrrhX9)!p3>jE>ZA3(h2yg|3s? zLXEStig^5;+TOMo>9(1VC!hKF;QXqmOLKj_ z$Wi+<(^|`yvNApuuF`AX=z2(iri^1&XW{;DKci`I#C+l3GiU@G9GdM{Zl@{Ft$LdF zP7b1EEFM#{r4^f`86S`J#P%-sw*TE!%b#|&8d7ZjGM_*_}?Ie5bP zDN=dGaJE>=NxwLd{llT_MSGX0C~VvEt$WBYqomtuYE6c)twwO;%&}-U-{t$pPb&Fh zX14E<_*i%f!XOM8jPYmV<& z#VRPgr^o)jw}}27y#szq-w$ff6iNCpT6{RK)tyo4Ut%G@6zd(tK8=XA(^CKH+T%ju z+Dd^!lMUgNs>$mOY0-FO_0dKMwsHNoMV@+k4kW)KGiT zoP8mN?C^>9w|{^>ISKI$r*CialU(Cb?o^zgkH`H_j0f$td9o+L8Fzv@{E9}bofp98 zioAaSC%q@g>S6T&|59kR6e0v(J=UM;_e^d_@Uvi&Y1Zg$>@c) zU4PwZFl~XTVNw2v9Cl6jJS57Wx0V-9{?UUH>wfP#-IwRWLT0*TM8rdpO&1ol-r+^p8#^~ecTHSh{2suaX{SbOgwdp3 z4)j7er&=a=CPkQ6rW%?x#U};>$OUzqJ$fF(>82^l&7FjFd2;qntR{XfdI8U9qw`h4|xDUM}5gpcwtoX)kR*6BF5XZ zT*hcKjNvv|BwqxG`9>q81jcOp zc+UR#xsH|bNb6L02ux2Wt@E*bk|waeI*s4DuDC;T)b}O`Dk;+aT`A04^!^8!08{Gy zc_jm5QvcPK+~Pdys)~SB_<25XPRL`L*Ff*9 zVsG>XwT^hDoC;DnK)1$u^~rh4E?SrGY-tt2BoB(+)ANMF`Q{U93V)INjck@kK812^ z0R-b`4Kt?K0w|Mu9g`D7D%W$lcgW|lxHlSNJ8x=S9yuy6Xg1uXPoVpvTVR89))Moj zjV;M>p>c|@y6PmU#vL#!y1 z+BxwYZA@1`(|j-NH0>Kr6R8rigq2)Yuq;}@1bU??uy$i1b%QA1BG*qi-u8iq>dvNc zqAxaG=|}|g?LZ}e!i&l&_BRB(Ck_pMw=fkmSore5Ijjy0S!oWW1MvEHfV-pKi5YX> zshU!x)UdnjnbZc${Fj#|f=m3s)Ygsu`|2~}`}E7K2%Un6g z=58x#Hu-p5jdfgl%oH>_pF$S)-0{90D_qaSgEY5i1zYYzq|xXLu9W24Y?>utE0LtTQ!Lo^tB_mOt~r;Ran`^|4;SJA zeI(fB%DI4C!kzJBj3EAeJ+ZK@(Ny)8T!u&t9s3i(md4<lV8@jLb2 zdf>of;1nXHJ-}qC{avrv9MbB~VOmT0S298pwyc7l`ULeFq+7^@o=Rg0Z)XF+;n~NH zZ+LjfgqrQmDp!ofNY7buhseab{>c1g8Q}suZRGX4*6P+p&p1(T8)XmRZMmX~mtOt2 z5St;XG54}$@|attpscLeZs0@0Cb#bxBV3KR#)t_PURU63f@Eh8WqTl$BtAC^f5#1p?C4i;e0Ksx z#rGD}IQA-aEePQJ1AsrDel<&;wgbO@i>8fqft}e~&k|bQ5Z>}il*PH24!6p3dBg;N z8_49v@8@?KEruSof29HO8*u6LRU2I`p>Q<$*j^Vwyz~?zd}#j$?ertE3r1ezsCvTs z!A`!I(}DdvgG6pLNGS^2wZNo9(Yh?K*82Eq%Vx;maa-*+xM6+4ZQ@pw5ilPe7N4Jg ztE{;h_2~@jVORcWk2`hZE1_|IPz`6dW?6^B46^DE5gTh-P*=xe5Sg`}F~`Z`SEKsu z7M;1-7R#1_c0|+bDd~J&D?x2I4DOL)Pw36NRxX@KjJ}aWncb%MO617IiSoUd0sJb6 z+ZQw@Qsq`N*K6Z!p$FplHS~Q@cAqys`cwR_qgb7!PKjM;=@ax8GDF;c>NKqsy%tL) zS~JN(WZCSPwQv7fhg+^SYoa&u6-wf70&n?IvJ#b{D@M?Nf1;C`aIVt{Ow0X4rwn=3 z5Bb$g2V1a$Twm?JnOcygyJj0EBd*{h!8FgLiS6#)o;Z+t%T58CixZ4*Q3&wZciT5b z-0z&*zZCz=B1Z@pX3rBWEzr`?Xck%z>R=vL5I(QT%gp)*kQDpLac$YN{bZSBsR^n5 zlQ;DTzaC<$<(S0(Az|$SSP56b=%-ygb@vJS6))`Po*VLCbqNghP^4x(#-oO+M(s&! z9vpNoQg;g^gmblfqx-4koH{Q_E87q(;Q>$|T+Df;x~SZtQi0LG(VcTfl5|#}gw$P= z*iX2@US1FvS5x@N5c_Jhq~;1fs_oR>o4_=p zq?il+R3_5wmAp?%IuwJ-kKBCbHbtaG2~Zy#7EpV}5$AH!FfN4e6_?l8o8)JY%z_Q; zkB#B4`QW)mdb>WQ&*Z3CGNB5)1 z2CnRn$sEN&ZiP;D(GyG_56Mapl5%F)o7zq-p|)p9lNPP%y(H2nL%f@S#yU)Xce?aX zxTnFfkb`06?MhibimH7XO?ziNvXeEvzIIH+ zJzT+D4JxCe77F^e1Vk3DC6Ab74W;9xj)rClEN!dzhe933k|1-}B+V8f1HU=y6$Gn+ z!KbFfcy!67tlt%?3Bvrui&ZGQa${FMF~HilA7RSYGIRB+1_uUJAU~K}7?u!RTc!DYVT0zLR+bOs`ORLjx>*(+ zDWEF!+L5RLE-Q3eYFP1jx3QgB1{nCIToPy7I94mS%~khhU^#W^d8Xcb7m#P9aXFP1 z4PCQMTJH^ww<}ODpUME~@|oA*H#tP%aTyplg`xETO0^YbbPU?FagSz_hE7d5YUU={ z{c4w<>q)!V>CJ&6R14}$ih$@aIx>1blJ(M;>(l90V#)7Y=H8=Ro#|1B*{`>LPLL)) ztK6o)K?>PZcp1gssCu+AIJ)xnePqMvmBnOYjTM*?xB3}80OL9@uW~ieyY{y~W7*at zoL*|Ut$@5<50Q-8dy%VGw}U${vN3wwdP>VNg7MorrshX}3O{wybYZsWt3zF*MwzHg z^dfH6WXbU;>op~&~F{MRxKn@FW=S$`oW>z5dE#8}&;_WG%flkK z_D>0Ku~>}sttI)w-fmvRp|Z-x9oyPiUnHd=Vv?M&bjhRy7ux47tPwY>1@F>Yy6Y=D zK5BooRa=_MGlccAEn9KJmf0EHuw*mOM024zY~*Gf3lA>e3WO zCeWblfRyj8G%QR~ZP^JiY<;;oE%b0}0X0dYRmE!XwpGrPpA%>rBz=g}`=xeh(blI$ ziH&pPC2~P~4ca}bUaD*OWL7?_&B>-={4zm*;ZrlbD}pIpFcj$^U~FM5+)1ckpW22f z$uR`$?(IDWqsmjC^^l^!2+aF-H%!BFzcCQKn`!ze>;2~8?aN;g-@hJ{kghMGl8j=r zlOVx2wK}L9B9$IQItr|)SNbY5oQkrYl3}+hIK=(fG=8G zzumcBI_Mq818#$if@1_mB(ESlXJ{%@{bH5Y>|BphiT5RiA5nsmJq_+%o$md83I0kw zm!H9O?psg3!#2lnPB2>XVlMHx?bDCjHhp2Zwj5{YYH|yF4=*pcW zafc8Xw8{cdDawz&OC`Lz;7Qwc^>yq?;d+a_+b0Cj>W}@1i2l;bn%etQmbV&^H{edx zE>Lpf^peT#L+4Ef6;8T6k>&>Zp|yT>bBPE~j(BYooUsgUS#wMVk__2-x(#^9GtXCA z4AKWHLA@x$WxG&qDdLoSGQ25ujRMOczeP5{ImnEL zeu2d#O&^}j6_5JBf(pFcHW!{N@%M9Whqgr8qs+2WDI@uX4x97*N092P|9Ujw1fW6B z-zhbp(*y0^(WTV15@Zj$4vVtCE-}h}YRcowYJz=3R%H7@HKf#Wf(>H1gQ-KqH<=O- zi>+xJKk=E0AeFs^PDJCHxL)mc9C31b`r0G;8%cI&*aTQbaU!M+>NSi@ROjz*G94Fc zGjoli!&!~bD!l3WBpux&?IH1p{N^|VVqZ23JsU+A4jJgAvMY_jIjuu=U%6aY*%-~x z#(tRf8QTTPq13gX9}Jj0^QFz8CxhF3zFBKTb!vxo*+0!yI@dgDw@K*G5YzFqivAdU ztFd&j(%jmY>*lb!_KGwvrKo}Zwo69)`1_C1*F}kz>JJ$XaEANilGWMcO&`=pGpUTc z*jE)^0hQCji&7+@TUgZ}eoO|CEI`&qVKXS5l0^mNwh!;h@C3u8B!Bv9$A`7&|c-_*Ub(h#^HQ9zL z#9B$G-&bq4EB2nK&C9Q+%QbOFO!$9}ZzV?W=4vugMBfULKgX=EEh2wE-88-$V_E&) z!1K$~HYK}J&7MXlfb`&CqHLQ1^-F+VNRmb<5h%Xgc6ve#K)Ssd>x zdx^A$M+mG@;n4_vvpCpd5;0AyUSl7;3J%>XV&QyJ=6kcplYjLm#bs%H9XcU;D}gee zUZAbJTgeIN*(}ui-9YnGDroIE`PZnLC4JMJR1>zo8(fFe2&BsqP;Y>$12FB1>mhmR zzfDJyNODIxto*5iFyLoL_IUh+GbGgq$6mZb4V#&Y_Dg<>zb)+siyWKRee3v>wi@F^?@)b0 zEItkC++N)z<-=YOx+cq7cH5SQ+iN$M>loDqKcM^OI<7Of36)W{HYaN0yq=4aDwkd0 z)Z|E*{K_K%@zAym@rM3Rc(J&g?4Aw%?T)YY_@Qx0_x)HPP}lY!mWN^{SOIpH zXnuy4y|&x`0jWS%zt9D|JtpnsWLIsz(&yt){n6Wwu{@~c_HO6?5u;`V#xQA=aBEj(?jq3O9&MVL|b zHnDXCB%j2S>QASzWluTRglOvFp+vF4O3HFLKyx_W1QRY^3 z8U=3?)S9E=`drOIapPn%!U~o1J%_p4E}1;2%gm*gMPoZFjn7odLyY07a`m{Q)6Q5T z!@mRE!+m2usd#Fhyd#m6Bw?y*y${wOKwSLD;H;g>2VSSOqH9j3WHwP#WpSo5#G@SI zN`pz9mqHJmR8%e&5TbPd0GZQ%!YomVRHY})eg6PrU6WboLdMgtVFgg6@bPZul_OLv{Lc0Gnu72a~{g1F`BlA7Ub@!Zl*) z&vG(Oi&Ke${{Z3@;E9T2Cv(p!>49=CpR`u#^2xYpoy^N%eiO=oy3GNH(RmX9iqvWO&+?%y*k!D6V2hitAU(pwL2<;wA%dK-?S*)$2gY6rBA9L zcH~>>9)}saW76cIXi{z5N25ZTWmsn5D(GM5jH z9Q%ds0)+KS~!Ow^CuEG#ag7*8Tn5o zxsfo#>TSTwjfJI3Q2<|kHUyjOBC4(76!|RFBp*wXlNbBe+i}LyK_J|O+#8M}Dt42o zs<$0+%FfwY#!~CP7JP>)Rq4kKJUz!eF4hud#d+9@T^h*L6bdyTWf5RLv0|c5Me28z zDWP)iB%~>Yg>pFx7v3n*PoS`+Ayt1ZY4nr^)NDsLzT@Q&ROaZKM8!U(JuasCQd}r1 zJ;i`OP2%||HI*Wy z*A|3*=CYuo1&Al8+6fhU8XZf_s&Okwx81XUsnibe1XZcp)~O10PGV)tk*I8?A7Ve+ z9cHRWN+V7#GRT^Ss^T$3jEfS8Dp*}=pohpkfPR7`gnrz$k3053wDhVc7xSe64 z>MK&s&ej&wxU_66BZOF0Ego#T5L`~_ZLK?6J}Kz~w{^QOaY~)Ci&%M0XFoL8)b@NM zgHpPn>fAQFnt$?q(ZzVr7gj6t!x1q!q)KW^(n8kQ&~%d8io4%|ZN5x+mcm; z(JQ4Mcb@7Vl3J2dENl&~qORLU>6P%4m$d4lYL6O(+emy&31^)OXpnA@p{G)SJqh2e zTJlsHY`RKJgMF{%N1ftco2>1j81T9dYa8d;!|+>^q1n|e&b3c0Z~EZ*uQ`!2Gp#B{ zRTajkrhrpZE>qN*X4X}5hcxO}8wCMt^B3oX>>G?Q%@Vsvtx+c>#Kl1lGV9DK0HH1E zxYPy2YsqLgV8v(}(5JvVZ{{VRT{{Uz`DP8gV8Cc5NF;d3q zzKuRdQqPCN%D1hHZ_J7}g(uZx<{ke4fWcKv0@YrZH%O}wK2CXs%gi#QY_`@2Q2dNb z&LomR(q^TA_jNz-4RiRqoXA$HgoySpqy6LOv~PHens84mkQ7x>^6vLq*P^QORO!VAxk6=I9Pjo#EFTyYI{g_yHQ-D zM!*!bq6z;1J*^*DloLGEWYIU7siBQ5uyss3>1nj{euvmb+rYHx`PpncPMKSv&8}Ro z_ZK!vQMb^>Fv>dqT0f3yO-;|s&8^9n(!{dVq?g_*0Vz_H00Q>f;28QrfiU(Ws^VE1 zl}`-KQspVZOD1I@1g}W}DIA^6@5O|8&e`$XUIW8xRj*Ppo{2c7(sFp&CC%@SM_Yzl z&aFh!+bbo8B%47CC{q4$TxX=PKh`lQ%wv zXd7Gl#C;VBg#c27pdPlq{{S-`bu;f96*;48-0Uy%kJ*ttB%XF2ou5N~mmX~<*XNi@ zs;f%4LKfw{tpqVlQmu7b;`@$}a%5140a3W(+(fdG6tW?jdTM1l0*W{3)(U2v;)mY2 ztDd)qg|d)sViV5W!HqDGJxzhXUSZ`8L_o}slnGzM-0upTmTd81@`Ih13K!M_dqC6W zAtY_9pzR1xU@bC)@aF#TaZ(!uY6oaeNK)*P%z3~Cwkd2K!zKTd4z1(ItR5@hb%&SRKHGi*Hy^u{IDVUnL|Cr?)VfZYxE?x@~g| z3-iHtGiHxpui#0SG@!bQ#Goqw0F>pLm`EPW^4Ik+vpl79k0~?Wc6TFb%b0&N5U)b ze><})EvU3AE(&?7bcK3d=3cC`yw;>z?IBh#YpM zO_)RSEw|{Gl+dk7Lrsm6oveDEq{Uk7)3Xn)_uX5LIHIMkY9&f1a!EUr2Qu-#*Nx64 zIXB*P*AY;sWzURk3o3=ZE95a+WdIusl#s60SD4v70+SEMdj8(Q9rRbRU^xB_J9=$?uBkSxlR3vR{ZpYIHK6b_`?BZ zEnQfnHSr2FPc)J)+=5m=Wzp5hPM?J~JuFr1If}-E;=3{4&1!C7Vrs*a60**DOQx~t zmPD%T7TOydUfbN<+zsPAiG{3{DP=`OsYIKSPb9*Lchzt~;KW}dYsA=oFmmESPg8Th z2L!!F#7JyUEurZTDK~BHzkNF&;!nI-{vWaCP36uPqhMz_G}QBwoN6h4@Ft^o-EJb)OHI+=p9oS;bSe?n^4&NKf%Lt1UlX(9KEkpFzMs zYQ-7*jwGoR2|0zLURqt-%v2JUdQWkvSm{VA-=rM6nzGQ&w!tgLIR@ z*#0Hhb_O9h7#KkWb=QO2n8{uj+q@(S9f=X-23wlJAT?$i* z=H%F$3tww*2>f2<{vf5|rm?!G=PDFxotr^Nah^usz}`TDFj%@ZP1!LE*pWPf={LQiugv<|Ggmex=&#VJxGBb-6qgkq_cC$)^~9K0G8$=^NlzfT3eK#k$I|x;p@Yz1z3sXV%$;? zN7>3!>X1m>2JfMf?y^-79H+}{%EXw-IOhOSSA}UQnp4ux&aFym43qv6K><%D+i(ZG zO@0zorG3L!DM$I7B>P2Snf@xV{yc5kT^ZqrDC!j~x?X^NBREhDJK2`Fiz#V@_kt?aPb=@@bW-%X75_5Qn8CQ|76=?4rHwkT=+y z`$S7LDbfcFs-n!Z75pDsV@s`&9Bi`S-6sD4gpLnGv5ZWY*Q~8eB&ghyZb|PKw}))* zmBUY~FLAoi{u!$%wuL>qWZYkS{{YKT=ds_sQ#i(^(kB$?dr{xK`Pbr~!+g$ASN{OC zm{_?zf~yV{Q|3tZN-W%&XL}LQo=3Qh_~Jk!_vdK)L7urmBZX5?p;sxYtY92Ul7*o9 zjJlQZsVAb-$Rm(D+CEq3-bGh(XA_!y%FOK!pi9(xk~c)lJpm(yjjlei)3&VT{5qeT z^)X!0U(AOhsa3>q>}*fv1}JH`CJQ(J0BF)Hwl38&*cEcvQrTSs(oO-eDEB^kJ~_;#5=#Lan(6s5vF zTMNgcpQGYg+H;{zOsFJp&oSuB>Z-xwFJnTh2hAI2SpNXA14DxR!elHc*g=4A%ordj z1ar)HS7;<1>^Flzxc7*$r-O#)l6Qh!Pq~E(ZOd$v3n=(w5#mR_%|N2gR2|r9VUoUk zk+6zuTvE4#sai@gT-$hG7F`I+ZxLBzLA9)6%E~-DfvGYT4BO2)`Dt7$KyWk>Fg1O$ zY^LdDDCNUq4v~A@Uu&3xPT~5!KCbl=ZcRdVU0NJvwFe4S+UO(`@`(GM7Bk;6>I+=O zGOTIlWyMP}5LjUiYyyFBfE1yzu(`J8){j5h3yVvR9V&G$)fi1I%}FPv^U~YGZEU5! zGnKv#c95wWx}yzEbjM#4N`LJNYd5mr5#x%*GFvUO!jC4uv%2*;r)pIRZ6p=6mHpdj zKM2y5T$=;Qj1?`L_^M|X)EdeZ;)W<;7Dz#NV%TAtZMJ3hBQI4rM&AB{u$0N~P+1%ZzxV=5??EX#t z3_nWEoYTb2uQJFxa~rLiZFE+LlDxC#N!X5 zn3+cB*wjH+lC4M!LdCTwkaz?b*s^$I4q)nHX*gnoPozp9ST`dsmXr0;H~Pg*I@bBpWc;#1kvmU-hfTBFgh?FNqJDnSXTG(@wLsrx{1`bk<; zl6sM(0tJo5gjLkP3;D#<+tZF=wM;Hxm6W?q)RdBLbcG8fT%Ck!oClng%u6b{X^Pan zKC37KXCzlLoouea@q3h;?07#rh?`KeR}D;5UZc!a{MnWp4=km%=t^&G+)xO=uvPt{ z@9z_|+$xK+t7{p!erD9S@!M~qI=lwf>R*V2-u$p+IWGC8H>!zOY#r8h4jrLeo9!6d^p^hy0nzq8bFyruCny)VNq ze#=(QBwi$~nu^r5mz6vdVzYIfC05j;0Vk|kv|PB)$py>5P{w$bq>j*LW-^O1`($iP^)bsW2H3QJDs*jw&cVi+pafRy)mH|{{Xj-t1kZlqfk$IOXAAh zcwNU+8_nhn#s2{R0BG%?wRlUeJXB{f@`xLM=bcrDN0Aj-a?qS(3PB zzNh@^rYv5NG=}AvN9A$;Egrd;y8h*c0ZNyk5y$tPvB#RhfaoZ75N(9iW8HWD=;r&@iAGWcLHcc1! zC1sZBNcJiz7x#~4!8=F5_+?eaxb^gE-7Ka4B^Cad!8DuSEv)q0uL}C!y+rM;VWyks z(=W#+H=*T*2A7wXtKuq*&LC5g=v(saIHp~eeC|?1iO{s9azc&CupE&kb=*BRnR;Cl z4^7LfqU>dAf9b_-I!;<-c?b&mDelvy2Fj6B}r4LWwoBFDMEQa zIJdBtQss6B0n9OkB&FsX_$EtXZw~&oT#umzZ> z=N48=zmwiBCU9#_ykw@FdW^dQL2Z7fTsHnBq~F}f0*@a4^EegVzY?V%UqO1vHU3r2 zZ@zJ}@5gzD%qQ%$`}!Y5+00f3?#a3>@**f75H-n~jb5D0$aN1HV>Rj) z1-EUXL)%Jr_Ow`&6Lg6-()D(G>~!?qb8m#-%V8qxPy?=zex@@eS29jL#dS*jz$i@A z!i=&H{H8~xroiYDgrt2Ueu;Y}MprZAX{GAhido+-JHISHd-5*r4R}za>L9n5KP1I+ zD_)RDRn4~dhli#n63coPH(kg!CL5caXf#&ZLJ1q#SQuL*pE(-Tq}@^a4kw4|JGAHt zLS8mfyB?5s+P>_a&49n<^M>%#jF7aXpXmz5rKdS zV`u}8ask_+?H*6g+#B8|xe2_7gryd-s5qbRd#VpuKiFfgtxCV8DZ} zP)NT1`^T2)3Da(z!eu<5ej{Q5bK2UBdY;dQQY&mgzp`#+u9hL5Y-!QYEkJ`@ajrW$i?$B zH1~)P33GJ0o6O=&B`x;k5_LGIq<-!2I^A;RD)+F8CGZMJBcAbNcvi*=!a1EaMVl#9 zl~J)ehEEG9PnR%{^)jpf0HPkzYEPiEjf}r7Ed1N3W>uBCMtpE7!F z2XI_-aLNe$DY^duEN^nk$yUp8R!De&!Z`l`7QK;6!4#Y?m$;Ii{US<5goQjNR4#`f zdzHAMwfY-#%w;UX!If;g#?_n?SwRzYiM7iuR@uaZ=~(tsN>n~^dU(RcbW9zOXjx~M zSYm-ik-+5>vQxCjb<8Q}0!d4%94RV5B`UhFVovbq;+B&sn;CV2{7d1o#k0*aHY81(~{#(={@ay_sO{NgNaO1}_}CRcfg&<`wXRmw&M7jvpd_A6J^Z z!@z8!*--qF0qN;XMWRXUH{@4`U;Uk9JUd#=9Ke>t75TT)X%sm}(7D$fAluD5r3pM~ zu?LIqXr=r>r!Z;xqgkWoHEM3BQj&azpLqHf!o5wYwEQVaNgE^p4aZ1g%&Jy7%dFC> z)3IG%YG)8rXG5l<?+eK#JL5?=JQB?Gk(!i>nwn z5XjQ%AU>pY)i+{Mz2 zHQ~=7J$SaJGY=LsZ~Q(~kFiU+&@b@w`^2B&5giSU&0`$Pdlu%ehs;%kaNc5O44k~w z@)}#Rbk#Oom@aU%XxJ$MLg1^@)+pS{@o$ZBQ!Aoi#u0_6$w6%il_TvdM;0y%1P^2Z z^NTvL%?HJvPOg6s(=XP~sLW zP@rr<9SAVy+DihZ7-PkJgWjBR=N9v>DQ-AQmbEC7qI!~fFiCXtXd_=p;Z#)1@*bzH z#?T+M5O**|$&UjFGHQPhIl)QC6jfw~WjL9Z=3DY z>jC?T$l9^(|f0u2g<-wcJM|E(&FtM$zyN(E3;6*E4kZ zVvPuQ%MzcSKPvL}yw6)_z$nEhQ8~YY`<1pD@YR86+GZc6{?L36j&(&t{()ioMT;$k z=`vD(U_%n^OO%urSwKQcZDKZrGZ?02!(euX=j)RSxGrpCm{>9H$!>3HYYyRCC3@%J>-w1jz*SL$gux`Y(fRlDg@h#t`ony)a}D^q|aBIQJj z0Tuo$@dcH*M3mO?)hUMs{^_}Z^M~6*dW=_T!0xZ*qTZaRC)}HVBKgjZ75qk$@nykU zstqLxRsOLJo~S4~G+d~ifadX>F|UqXA5WEE_Dc=cYmZyfr&1*u`I4)v{{W^lZe@6> z&J28qB*jsjJxp{Is!o;)7PR(p;!4y#fdkq;k=gG>TyFKJS_+gq=gIFl6NDVb^Jrlvv`%)H8UxYAZNHgp>%!&Qmg5q`0%^6xe9 z#%SSIYgC$voRxG0y35Ay%QKZSqrTk3-U`LI&jQp}RKe9* zs(i}RrD|yGwDwX|3RFH~KHJB=9^3I-Dp98_SEm$|-}sVAdtVKj<>GlRAKDq()t050 zNw<#d_a9K0qG4YtY_&_14Y+kUl*=Fmo_$+-zPE)&Lk_l-1xQQjX*y5~39<*WZDI5g z^R)Pq@e1P8VN>gN{b`kUC+|ls?K6Ja7 zb#kQ}0EHx6_5mr0_FCm%ifq-zGPDUiYgTEeN>?{D?Ee7W$+P&Jb4j@N9Fw$0x1&nd zbY*dScL>Hiw)J6rf*qoTX{b`B+kQsEWykRo$)dAQ{iEmY_IH>|sUx`x2E*PYG;a*~ zV=q44a-o7Luhi4IN(;YdN$=$qloRi#=@VSA%d9JuIB~iaK6+xSJ){}hq}t1m&AHT- z9uwIkVfw_O%Hv2sbkMhW_{IMKAGxJkbU0k)8hfm!6F$irKeGaKWUR>dK*akL{Zwag8u+|VKbFQ0Blr#P`?FIqxPOi=+8Dx4NVuYAa=BQ zRUi{`yTyGyiz#&E7b+E5SyYZo&!wLMZ4&k-3*hmx<_xsgI>rCNnDOdb-WVI+MGBpZ~Rbyaf zr3zAhN^qz7MQ4aV;u0NNnZp<)TZFkgG*12WpbzIBuE)`bXfIc@Y~r8!u^-i^@-S86 z{5p!mntogUhS|im$vJ5^=BB1wX^D2!>yE8jQiP;jsYL7m8{RUW5~}5;UpQ-7&sxk1 z0ueNXHBL*@?I0m2N0yVMo8L~OxZ6}_yun4xNxeJWbW zx&Bcdqsg@_(pzN-LT`H{SVWdhs}XID?H@_(-)F7+IcfMycG}KceA|9rNZ?=JRKKh1 zYHi?I6j{VN-iHpPk!64Qhsk%tXIeH*?agLK2XA2>gH8;C!Y2y< z0K`$Z5OY&YUYl@z;h~kKM2$8BeatFpR@exR5DMHOCsNiGbAMwT^OB)(2pmFXsbt?( zhiEI-PWJb-G=MnFYBwp{jf@Y?+kpuzK?DomZeV(d+jH#$0DfV=uz)Ron{NS9h}`U8 z9$~RbKKmFjBg-HdB~AhV0QVem&X@LrO71z{7_td&8*Bxy36$pM;^XH7QnokWXiD>f z_8h}P0`m5@*D#hszUC3cG+bcQVQQk!wKlZh{6;~usQDBX53C7F-*e0+nr$vJw5d9Q z1QG2GtgH}k4Ek}8@QoijGDfY0>7pEykqj(#}{m6l@xAO~#;g0tneW=>Gr%`HZ|tm9p4&dDSj&3%a^P zqm>q|u2}t0bdPZqMsE1flESqb?i%p-f~9b?lk<-*sa}3rB6el+qpYO{`aM^%RmkWB zU1vQ|()q22J-6r?Oz`QH`57eMGVxK28k(y-48*<@roHCA`nmhn1F2U${3!0D4b?v! z6}g;ELXg6^N|lE2B^G&yzhW^i+RVc!SNz3V*S6p!3n&9}MdK^g@daBDRG+KX>T^@` z(@NI##LKF2#}s<0MBKqlxoTh; z)`t*_**efoz~rF!i^m1!9%E22wqMb5+c@>%NiY0LdDo>tPFRKmyq01e+s{uKcYxfS9sHL$)5UPs}~ zwVuRy!ihNaoU)uJ$dqI#-7f*S(7ixhq*~kbirQ>xym<&bePTQTTgO!C3L0kr0JSu9 zDpIVYFQJmj0c|Hy-Ae?U5G|!#!+(MNQHiOz{$mWwP-kkme!j_;s-dO}jZ5rwIdsEQHuh-k@P8QinDvhR%J;FgXm6*M2dnnB{$O0&^0dCwv-Q=q7#lhYqv$EoEhQs z2T9>7D?>sjR>D5evBspfJt!yllvJ;jHx}6QnBfiB{{X@+Vl$hy93g>dF3r*1P@I#U zaX}L7OE^uC;?BWIAxXC6fCwAJk*o*d&%^0MnU7oWSZuJT z8t4K=mge2`Bib#@1>uV+uoX4Qbx@OBotSA%zd2G!acT+{R)qX5>`wyz@eRui^Ul0q zBU{Y6?pq|_wrTD^AuS~0e$chHZ*&!%PV5vEYyh^vor{$Z#Vn+@Urpu)Vc@F85?s#x zo}AlK%W*TwQb|ZdiU40@a%?+ah%np97UcF^yb2ZZU+|Tk!Rd6q{RV96NF>^Bo~3S+ z)68uCsU6|}0L6w?Q>qeDx$S@gCTR7fHswdi_)@hGL*m^P@T;=EW+r*-kMdVHvN~H3 z&SGggZCPks%_d!H^JdzaPRn6UqUTDjVy%BsRpls1{7XYHKRNcSY(@0PYS8$~dHP4U<5dh5wyiQ}xB{PpP?7dNpI?~ci zpaNN>AzUm=gQ%cwbtoQ0NmE{8%%6c`D;4-(@cs8VRz+Cf5;M2jijwjhSnK8y<)eOx z3O?~+WLJqPEZv@*px31(X_b_zPf4=U$wTZ8QVO>#Pf^rvI5CSbR#)Tv!r#Z2z&A=1q6Z^`!Q4XjpCT?X zmfcNxoq0tgnfP^RZ3$+vD1xaui&)>CB=$d~eBJR8k}qbRI%&701fF1F#4QR+lc)BB zC>JSF{qf(e^>tKzBgnkvre_8dMB2QzD{=uXRHXs6mu;L#{l#)S8;FT`h{<;`M6dSS z3Nl)+)5n;vC|OeoPX7S)4xPosX`@q;>(lIKYd&Yd>V%xcjN6iva_%zo3sX+EwAn~d zHYp(RL4>yiTnO4eCbB4FdVJ1iV9H=-yC5xGrnZ6$$shBZPgA!Z#Bv4ki-`d3)3j_- zQ&~n?M;w9_Epksiqk`t!z=-MI0#pZGy1*m^k`2hTdIoGo>?6l%|x zsstg|QZMB#H`1~PDcpnk#odi_a|lpyyv=q>vnf&X*;3h+Vf6X4E&=I6MeZ(ba(EYy z%s!TFyF7IKD~+o9UD*_=v9Iuz1C_urlSh}FZr2Fh`a@9vs!nv8jW@^9!uw`$=Q z)hK*l)Au)BSH;FfNR=XA9y3u|KMaE|J^ujJV4v0@ScAsq2b4m;V&UqX@fWdvVVP8) z%&A|bWyvhH!IEkUMJOu9>DIL>-%bsvnd zzwJr<&Q)&H-qe4?#C_j z(*sI30zfv}lAhNjpR6~TDMcSdiWGbKLJM+|mE8W$Xde0iB0Vj)Vf#B|*QvCI-RoEW zSuszH-&OqeAG>eRM_gLdV@Pt1?xVSJijN_?dz=GFnlY6|uQ@P2m@=;>O}w~mS_MRz2U_$(Nu zR&^d_zr2KE$7UuaOA@l~IF)R4q@U15eK{#Gg#^1S>wDYQf<+AGe>R6`-j`g+y&g)o8zoV|sHNd1F$x@YYhR{~ETGj?0rdkxGttd!ZQR?#)FQ|V=T}_!Y7P0^X zs$sgCmPViytTL%(B`*N5q~BHTe)b%Xt)YCpgG_)DkO}PyP&VS%{NY1LHwr1dQv6E; zDOzMFWoJV{W?kgFr=3%xNL`!a9i`#Xwy>z8 z)Ux8<6t?R3<XD?_ISdSA_nUj%|P2F|FuqT9oH&Th= zBXb#^t^WWJTq1`Snid1CR1lSEOO7cG1&`hd8q82)i5mrq~y3rxExJT}SW6Jhx(H(`gx|bPVva9H%)K`X98-4HoUrUH~ zp0w&|wAwx^c514Rk34%eR=OrxQy67CYnz{5vD)aeKb%G?zBY4)Y+beCigo%Svgse0 zRA+{%6Z0F1MSy=2lWW4Ju$2416_fLib#K@n&f9MbNPF`y{F^xSINkkDbsF;6v+8~- zvx=q)AIy<1lgN~kE!*T$kYXuP%}iN{#+uGz)L#RTsrlBg^8k?%(Ss)xKvRJu*g=TF zE4_n?UjG0n?%VKA9c%oG5Ra51zJ@v8+*JJaB>n7h6bv(3(&Wx93vqmxXTpvD0QsB3 zhVqJ&IIk{G5-Dot%^?o${*n*r4NAdF0_6%n>VnmTCS{#V(&}}Ur__}o=}NeM|U*v$S2uTUo2YH@@(heo70(YR0s+#3<~i7hglI)o)kZrb>{wW3i~#^2j{ zVD(qcYP(N$QgiGtK~0b>g{4UrxUoqa@3`g~sP3u9Ju50r&Aw3vn%YE`m%1KlQc6Gy zbzp!v5g?NZN`X((@YySMSdMM9ILg>Mo}XAqnJc9f01`$20Nx+lBp@67;xSHH8cp}O zF(N~UNIl^)Sr;^+eL(yC;ZjxEM$mzmEN#8LpzGNBZ@eLj%bVNjXyJ5q2Z$$HfZ1P> z>jF0-_rH5U0F<=qHvFx`1gs5{ZCxT1YDBPOZCI0w5M`*Q{|w4S@#W9gX=Hg4VBocY@U5+-^yRkR5kz?grgqE0*Vd;MJ0D zN&3QdsGY7Y+`~%%6tjLngw82jfd{xq+7p|bg8^H4WwGHG9JzzDmApRu+NI<%b01_U zqGuVqj3|4XEkEjruZ^6#P6f`n%FKkZYL|3$3UvKbaQ##<}rBElAIg4U!q3hucfDxO6dDvUgCEnE?-{{R;q| z)-dS5S6e+L`mflV#Nm$P0gd>D4P5)g$1L(&GBawbDR>hLPE?9i66@(snRb2Lbox{I z-`X?;zE1dX@b20s^Vb=t8)BA*v4yA$yDhJxUvl8vzM_%*COUb^opU1o3)sy09Nwj3 zEDMDy6J71*u zsqpbXmsp+j8ucNR%PHj>tt&wO?J_#_(yx*4A354lNd_&y4%xXqlo^pt#JbjLj;rjo zHi6m6WsnuObnKz-J>m{H-3O4Rd7YfOWlpN)6L^|_jVUQU>L%qGvKE&|rD+aZP(iiq z2nP4-7c{Oe@iXFU1W8~TeiM{#hTBZDJ5_y;EfY#?i#VrT*(YFZ(7o@z_KTM`s~!?C z#wr<~j(=v>Ytu5!xXha$-j?VIyKWW%(iBohsmp8xP}!B^&m318T^lUu^4LJGH3UJd^swagIaVo7~XZSK^->P%|-{Q;snl^5{?qHq(V|%*m)~^AMgjBpdCy7wsQE(D57-8$-cP zdABs*{>L#~;5verMRC$BM=O>$`jn5D?G>elJz8G05#Xy6W#Xr1@TE%*(^|zBRK$%* z#=@SNZ}B#gtLajC8f|gzB5&g3Fmqo6Q>EI&t;scqDyTBkGlEz=_n1MuOV@OzDjX0u z+(jwkV=1cHBbXAn`vfW3T14#7nv->qf4I$qsI~r6xlgepq)l;mjJ&_a`CKsFa|BM+ zYV|2kCZQ~)1|^wvaN-=@M{pD@HsF|+G`tgAO+IOzlkvwZL%@{kO&2fPO?9RNQv&H? zo`sFIwub#i{M*xRFA&OG9wB^LzcR<|##2ON}))NxIg0f51kfr>ioB3UT)g-CAB{JcOWpHeXNvHM$9h>*`+Hmr-R`s3vKq&Q!i1LHr9(D%G{)Y zV{zbYb3n{&DFS_ zjLIp>1xZ<&sE^$}dmA2+^GC%_L(0(HR%+lGRT>3J!fF!8NCrXs zjYNU4Nhf}QkVeYe?rh|v6LRiK%(j&+L5yLZ8Agn}tW7DJX`jM1@TQ-Z zDAbnqUg_U!i<@7Oan?3bp1(EoUvinPvV3Sz^$;u|7O)_57hj1Sgq_L^MBM!;3Y)_f zl3k}wChALf-^o$#IZskO!HQUb1;8fTaT<2UQJ!AO&Mh8+`F8-3?;eCKo46>aec-o( z7j;-(40UT=-BG_nVGFLP-$^69WpT+Q&pl?rDz&}O0ve|u4g!#%FVt@Wo(plc5Knk( zDk<;z#F7T`Eo`*d)U^d4;xK9|GU!kNln_5yXi}7}vA~_RazrMR<}}CiRFz6WEdlfp znd;1{R6%6z)OChsU1rz$L1mSaciX&EC?riyj;_|F)GhXNzq}=OsL4*BvXUVR?YdLM z1f(RSs9anU0kvCv|X<-+nqn{We4F?4wX2R<}10_$3=`XhISb`=D5UQQ+EBzyi@uj27w=HrNWf zhVXCjgZjbR2OvP3Yf;ok=?TP=c?m;jasltW07xa}A2CE+^oK+?ked|ldl)eit}N~e z9U?kLGa59q2vUNSPg}vFS)>gpX#jf-;o(&GoZVIy+vfxsH6-b<;tU`V{ge~wH@pZ+ zZ_z(qVR4pIXKmmq2KNMuZ_G#pYLrPJm2Cq|vI4A?8v(Y^sJ4|0Aslrv)iq)?jt1f) z0XjzoBEzQ;6-%_CxUshIpt7JxHc-{aFd)$z695eJbaYIO1t5YuLp6GXS*$fLF1e)? z%Muc#k6MRI_qDb+;wEWe5L9|>ec<+`ljW#^?Rb=Af+;{{{X0Qm#3|1Lcy|c`NQOlgt)MEH#Xdp`b1?dTd~Ds zWH&H`ga8ssxY%67y*iIH`n7pwDfc#q2}as%Sq%YiZjj)I+erx^YzIy~q6MMSl;-8r z>rmRr*vd)lOazb|G1rEt{D0^xVp4DPE<8F{VA6a_} zWBP4PnT0VgQ9rY06gJbWg3|v0(&$0@m}@I^7*A$wKOPf!`zi1i9H!-UMB%I(G^Z)^ zvXq8XEK=>YHVcRWK`Ka4up;F<-tmI4dvr>cE~QH1*~z9UGgGZ1eqn+OsA)<8YZ|sB z1AXJ_t~a4E;zz?ASVqS#@&mwx-=}MGHaYDjN`e zYuMP<1MJI+3lX6)?YrI)X_8!o=+q zK1(UT5gr)vK1NNt+{PrqpZJvo>bX(@Q9@FCYS!P$#Kv!un$KMNa+!nW<}n^CMJbk@Z;fw5qNs7Gn^Tvn6!*C)UysW^niIPLyavvq1RMG zM&J+;sN4HpymSbgOLu^D|S;#tWQfR$J9ZwQBU}mzg4LT_K80M+ zB>w=KAy{fx8u*3Ixs0{(8<^F4G}MorRUn{*G9GMdQz%GKX|ty43bL*TSgLcg1>tN1 znRR?Of$=o8c84ycs#0m0guGCd4cc1MvX+x@NZW{7a!Hy4#=ct8=aEOx%nXFPM8wRr zCre2IDSb)1PMdM9BT4=w`^HWLj!(`uPlLQ;N6hTMulQ@nj(Lns%R2Qetoo9Jy3(8e zBreV;{ZW-O0|eG`=NMIRU3rp;%3R8rS+>q6@VKAhQb}(W1qA}~f7xRHw)}e!GxLRgt$Ua*$ z#?8v92XwE@RfUMLu(|e$KZOb|a?r87o+iRLN~1AHe$w?K@bV?zmnVN(>$O1Y3&1lW-4#SqX10cQP?Sm|n^WB|M>X{*XR#qVRTGU`zon zCGdtAk*6D3Pa(ZeFZ|9b>OY)F@4O+b)Y+v{rl;MiQ=|h(I%X%G31JJi+Ret-=Em_9 zJh!mDS1?+D8+D9tG2_1&NYo}Ip1^o+Z8GmIuQ~zp84c!Mb^Q)$Bx*^yx=pSRQ5r`n zF{Uug94yu<#W+e=54AV~q?kj?amQQ#0Mt-W8d7==NZumfKyh*AOq350%Zc#iI}}mo zD3w`o?7YefPTQpPQ1=k{s|Y1UB-?Aj7UJ@RVSLrfT3bxjGPfQ`srn^ND*H6R8?vHq zhmcRgL9zBe#NXxiZ)FxE$_!;UgmD8ZDM2ntO#~2$wh201PtSrn5JY|R%*+KLkEn~M z>9q48u2s~g_E+woV;KWI{4~|`M+CiuaYYH~O06_4$~8@G>q~B0-ddEK-3r(d+Q*dd zsy7ge-KCzNQnyL9j)%!zD;pj7k3-fb!*z@$Tb;wz`31>TDbK1}VTJOAq>x9;vFZ$Z zHST45JTqiF=^O0`T_O}Xr5>Q zL@Q$n=4IBU6(Iy)eS{p>8(8(@cyg6&x{q1(j$tquQ!ce8#V2k^5K85hdxR5fbct`5 zEJ+AJ`Ns+d`(CqYtEN(lzD|fGPQ8(ZOW5vfw`M^ec<=>Q+j9~_cxAe4YD?F1=-dsqS? zfPoAZC~bQk;bSuCKqpAw^Ds8iqSrP(EeE9=5&^ZjfDW_i+mQ;JSsb5!VJa`i!}-C; z3Lsc>ye6^(gAX`>q@?^@{I3d|bbvJjbe0iRv@$CnqY0W9ueO5n|HGog5XwiR z**ic;aBa^p+V+D|=?CJuFx=Lpt56~ox9aC}wV(lWZj0Q2E$zvJLx@)b+w*uviw(H2 zgVGhI<88Zh00t-mFYm+_uu10DPTO$;301aG=ZNtKat6ZPAOJS!i~B z%j9edTQ>TWv`I3`tA}Y*^R8E)X_YOchF0r((hdDw17pVj063jaW`+-v1uaZYPAjL7 z!3p{mSVyVhd>e;cTPvH(r3~lU+dF&u`5X)WC3{A@qiaFS{art=c}!L*GgNwm^vS~C zpDsdN4XaQ}tO*zHFJtd`NK{Wx%)+UsRQu07qO}(krCny~H$0Qn_k_wDeB;d2(&qGX z>Ew4|sYNMs3nJ#{^n%<@_WYph2n~kg{_PMONr_8W3%tH1JmTH)nyY$YT>>F-dY}@6NC%)Y;*=q#r6}1LmZKePTD5 z-Wo}W=?TqbfR9TIJxOuJco1}uhgXZZQa)aR;yb`-q(!j;cGBn%dExwGUjwk z%grLFmdUVNDzj5a{{W<+e?t+!7FZr{0q3S7kyG!?NYtE7R+{^+DZ=Ed<61ycw;?A| zf72W7qNTFfg;{kj%w^-=n}#W;33znNdTt!0vpz6c1noMFNZ%jkmmQRN}cA+}kv_f$ZHmcbn4mN|eru zPAUnfOlm%s-@Vv#{=Z4~R9blbx4bHNwUoyaO)Z=$j8>B@}4k@=AP+C-zVv-HP9^l8(EL_W8EW9I1;%PR~F^aI;W?@Md)@ImEpGW@y zF~3mvA}O3SymVxtEbK!60OALf?4oLuow$9F?5#l!$W+wOx>T(gI{c*# zgJP*!TTD!m*q?zvnT;uc{4-?MUIyQ$VK!jkMoDspTO z6Qv;u03{;HNEQHg0t-<*+B&5Z1*t|N#s$QO5~M2jP$@PxHal!b5g(nwQWdIf_8i2C zVy8l#Y#~yvw_*q&gK=vSZbszqw^!F+YF$2K?j;EwRBR!&+({sSp)RF7Q^`oMA5##O zSkv9S5{zXp%0R0=8FGTDT%gt&!t%8CsCum#&U$8AU9gq7 zhLC^&1b_hPXolBuyuCJEQ*{cQtcwX3DqDdz`KZMqjyzmtZW&XFma4s6oItjTQLJD5 z=WobHN!_`;Q;TKl-M!}UXxHm<>IdxLxCi1p56r_Ia~M+SY$|HER+g1z15%2X5&-`I zF|>U8&957|t2s-WUS3e7WR%z?A;MjFr>O^v`wK=M%slnYJj!)$;@n$5G8BDGhg62| z*U}I3i)mX|xLKUEuK~K2^f%^xWz8@6iR9K@u=IsCwu+qdev-_Jb&_}htNPk8Zf<|X z?=>-4n5W>02BsO56rQYQ_?aqR&U-;& z4p^HKS21m3XAojKlauus#p{@ucG{d(xj#SzdKz^bSPl9TW6`-7x6WC&I5q(N1O-Ux zYy9BCf=%tQ>l`M-ej_v3m`Px7Xz*YS`7miB(bA+^+d>)>mhNnjK9CTeNbgz97@4WNZKpEAkq$%T#*u6P2_Rkyvj>lHN607GMvAi&T!I(CkN z4Tn7-#EtAM4Yq^DQf{GR)*{ITc|aS1>@eh zcOvI_Jll@ko5zi?ayCik$ne|S##N|R)ZTf8`}Hx?sqZQ2njuvU7GU<4p3 z+i?O)6Ky1)$`m-yUeFXZ>gLwo6GYUqb+DaQ2{!KvQK$d~`MeU5Wm*)4Y&zIlB1ke= z`iMMn3IV>v-@m*kY}nhI!iN3#x8)IFbSC>Nj$lYg-(j%)ph-wp_JdN81&>$&O60Ls zjjkZ2%U_WW$x0NF={>eRU<3r40l!aZSplR;IBYcW*3e*+aU)8FyILIEMzT}}!VXG+ zAOyn54JnZa)Nl2J{?3bW^6w6yrrU(v4zNnvy2^<=bF?53h6o10+?}l+Lrdb;J#1le z8{Wz1tO*E6xjg=`4RaD9g(REa_w5SM#;uPhV+IrhxP=Wiked|lU@Q?eDRZet;B>Se zxpWUm9QF{c2u+l9@37qQ9k8&lx!>ggatQJY2AdG8*n_vy&^Ft(jrR~`yYmOM$kTNQ z0OyxsVZUfK8oaAGtB^SmU}-u%N1e7XKP&<*U^|FSfyUaqYq=M5GDZ%Nlnf6 z=KMiUr_D$wo;HPLC0>%F#rxdBAv>MniL8vqbr@?9(y>z0si`THqG%28EWbm5{{WClCoo{b|&no|u{C*Zex! zXf->T_{&k*N|B_e{{W>KW=QP1minO-h!T`S41jVS!Y|AoBF)q$MdGfEx(uRmp&od&X%?earOsf+~ROJ>!whLh%MXBy^K+lmwn(H1$eCoXVe{ zoMu_E%AIf^ez6Ix=Pnbcl{FxzsxA&Zl<7tP0L-uG3^f@`k`t0#**jA21WirK$g*Z< zW|^00a<^6xpUlM#i}TjD!wYxq)Y=5Kt`_Rk=3C~!=$Ov<*ToiI$UNh<98EciXHrP= z*;167{{Wd>50HVupi)nfvFq8_f@vC?l$v&?@Y_tPeHZ@#I;=k{#d}`QY*94Ri<6g> zp~pQBYEu!nBDxLuTPFsXlAg{{S+$`dTWCLyqx&D4H`> z+(A1wPWD@*q zr^`|$)p4yu%yQpdhvgZew?cuw<^)-}^nWO))Kku;viwQ<>GDLfznD0~7`$dNCL^7l zQm^T8(xi*_90+C$BEsenG?g?i5Box<2vO2{h@T=Yxo*WxEFD0b9)rAB*@u?@0JM_r zdV>c7BC^|+>KwkA)D2ItivaNb;rw3ResM`TbQL8@AwceuEJSqaR81QDj&^LU9ghV)C^ zoBnanv)xw*Q0Ohk&K!dcCdWx6j-!vP2g|nMJZ*l^1BieytDEg;5M%&Ay}De&(#n@} z*Sv5`*;dnYz`Oya2+X?9!;hSF%B?B9IHijZhnPJQ=~8X+6UNXF2ySjsKmrB*;N-Jy zh__#mh0142t*36sqy=dzR>J-p+QI-Z)3E3Nz;D4?Gnfd zRN5Aj6rH<>d&r9H))CEBO3OLr3Yt+nCIPVfWU;i9&=$_&JY zDx{@GdW|&P^LFJy2I^1MHy>EGjPy-1s?l?UNcwe1$-JAa*PYE|V2XV0TM%fUa$;>h za6;682|^X=Nx0nF_qmG^IvPS*Z7EA=7D`eql#gIIinqhIFO$T$FYL0@0mj%$Cgl}- z>Xm$JO}<2t^@~Je~ zDRlQAweI7`%Rv6W$}XjAUcU6={)8)DP@%O+B>rUdkRcU9u}r0-T|%J5xc>n476CsY z$&B5Yz9=xx7HPwYvHXQJ0Drs6xC+m<#GlkgWye490KyVZy50|<&Q@kPOvJ*95Pqj~ z7ybei_pdeNw`PCF)T^}`Z_VtD{{U3YN<6NlQ)Qo>Sm9RCkJo?FD*SKa1NJLUIatK> z+7k&IfZ+2j^EW?`5%X&q_`J?+WL#!atcpCU{{ZDlqe-`IQBzc^)w<*6VzX44opDCx zPr6oj`NMO!q_IjFeediF73T9+e(3GqH{{UE^gs!2wZ}Nq5%#@IlEz5 z7Cl!mS(Z~}F#yFL!v(H}90z$VNxV4Vm z%z2w5;@bT$0%^eQ({X*^E+*Uo01H}i1O*iWIr+epu%v5hL?yaELwR3RKp1t4~uS7{9n%9;>1hp7XS-EZY=gUy~Iir zaCt;zl$IHlT}G9ZDK}D0jmF|V7Y1bI*sC_W_8yl4e%hX4I|u#8@qa&@dOC2c8LXou zsm?D5(1dpuya_e~ad_$f01y381^)n16p{o;z2F5sL>P_8zj)#65gGtM0Nx4#I)nB3PCK zFJb@&-^+LsU2t7iy|&z1$GmYJesHN=rXt2C1#T+l zKp4*@H@4huybS;n<-ph*eozClk_Q|A07yV|Bnxa!zgQRr zM3oWOj*;R0KdcEmC|k5J1E+g-*p89qB-~!&`&tik;t|wE1kBuok!@ zpym{ylgaJ;pp+6_wa&&E1gQil>EHT6Gpu>cDW^@sl0gG&`$FeBe4w|!7k#0o1=ryB zF($Cx26mk@H!8!JVQ$E|_P0V1WgYv*4apbMbtHe5CHarzD=%>BTxzYDB5~y089GoZ zH|^>F0CO0>6g1Lj1Bqz?N=dqcMb6z!MU^hXbriDLCwqj5?AzaZxGX_AcfHPCTOU~_ zbOrhMbwP1Uhnm%+rsaF zZh({R%uLtG`9L^3MsHfn{aPc8VbvsQQQzOJDMbLH$4I|(4~Y1W{q}(?beqIn2-_lps=cDjd2(SHBth>1)GqCj9jf Yr39WyfDWld3j=FMpdEiW^Z Tuple[AutomaticCameraCalibrationRobot, argparse.Namespace]: + # Replace with your AutomaticCameraCalibrationRobot + in_hand_bot = SpotInHandCalibration(args.ip, args.username, args.password) + in_hand_bot._set_localization_param( + charuco_board=charuco, + aruco_dict=aruco_dict, + resolution=( + args.spot_rgb_photo_width, + args.spot_rgb_photo_height, + ), + ) + try: + args.robot_name = in_hand_bot.robot.get_cached_robot_id().nickname + except Exception: + logger.warning("Could not determine cached robot nickname, saving name as unknown") + args.robot_name = "unknown" + return in_hand_bot, args + + +def create_robot_parser() -> argparse.ArgumentParser: + parser = calibrate_robot_cli() + return spot_cli(parser=parser) # Replace with robot specific parsing + + +def spot_main() -> None: + parser = create_robot_parser() + args, aruco_dict, charuco = setup_calibration_param(parser) + in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) + + # Collect new data and calibrate + if not args.from_data: + logger.warning("This script moves the robot around. !!! USE AT YOUR OWN RISK !!!") + logger.warning("HOLD Ctrl + C NOW TO CANCEL") + logger.warning("The calibration board should be about a meter away with nothing within a meter of the robot.") + logger.warning("The robot should NOT be docked, and nobody should have robot control") + input("Press Enter to continue...") + # sleep(5) + + images, poses = get_multiple_perspective_camera_calibration_dataset( + auto_cam_cal_robot=in_hand_bot, + max_num_images=args.max_num_images, + distances_z=np.arange(*args.dist_from_board_viewpoint_range), + distances_x=np.arange(*args.dist_along_board_width), + x_axis_rots=np.arange(*args.x_axis_rot_viewpoint_range), + y_axis_rots=np.arange(*args.y_axis_rot_viewpoint_range), + z_axis_rots=np.arange(*args.z_axis_rot_viewpoint_range), + use_degrees=args.use_degrees, + settle_time=args.settle_time, + data_path=args.data_path, + save_data=args.save_data, + ) + calibration = calibration_helper( + images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path + ) + if args.save_to_robot: + logger.info("Saving calibration to robot...") + in_hand_bot.write_calibration_to_robot(calibration) + in_hand_bot.shutdown() + # Send previously computed and saved calibration data to the robot + elif args.from_yaml: + try: + with open(args.data_path, "r") as file: + calibration = yaml.safe_load(file) + logger.info(f"Loaded calibration data:\n{calibration}") + if args.save_to_robot: + logger.info("Saving calibration to robot...") + in_hand_bot.write_calibration_to_robot(calibration) + except Exception as e: + raise ValueError(f"Failed to load calibration from {args.data_path}: {e}\nIs it a calibration yaml file?") + # Load previously collected data and compute calibration + else: + logger.info(f"Loading images from {args.data_path}") + images, poses = load_dataset_from_path(args.data_path) + calibration = calibration_helper( + images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path + ) + if args.save_to_robot: + logger.info("Saving calibration to robot...") + in_hand_bot.write_calibration_to_robot(calibration) + + logger.info("Calibration complete!") + + +@ros_process.main(ext_cli()) +def main(args: argparse.Namespace) -> None: + spot_main() diff --git a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py index 94facd6a..6bd7d0d5 100644 --- a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py @@ -45,6 +45,10 @@ AutomaticCameraCalibrationRobot, ) from spot_wrapper.calibration.calibration_util import ( + camera_info_to_intrinsics, + ros_image_to_image, +) +from spot_wrapper.calibration.charuco_board_detection import ( convert_camera_t_viewpoint_to_origin_t_planning_frame, est_camera_t_charuco_board_center, ) @@ -230,21 +234,31 @@ def capture_images( ) -> Union[List, np.ndarray]: if encodings is None: encodings = [cv2.IMREAD_COLOR, cv2.IMREAD_GRAYSCALE] - images = [] - image_responses = self.image_client.get_image(self.image_requests) - - if image_responses: - if len(encodings) != len(image_responses): - raise ValueError("Need to specify an encoding for each image request") - for idx, (response, encoding) in enumerate(zip(image_responses, encodings)): - image_data = response.shot.image.data - image_data = np.frombuffer(image_data, np.uint8) - image_data = cv2.imdecode(image_data, encoding) - images.append(image_data) - else: - raise ValueError(f"Could not obtain desired images {self.image_requests}") - return np.array(images, dtype=object) + self.clear_latest_image() + images: List[np.ndarray] = [] + image_data = None + logger.info("Waiting to acquire images from cameras...") + while image_data is None: + image_data = self.get_latest_image() + + spot_img = ( + ros_image_to_image(image_data.spot_cam_msg, ros_encoding="bgr8").to_numpy().astype(np.uint8) + ) # type: ignore + self.estimated_camera_matrix = camera_info_to_intrinsics(image_data.spot_cam_info) + self.hand_cam_info = image_data.spot_cam_info + ext_cam_img = ros_image_to_image(image_data.external_cam_msg, ros_encoding="bgr8").to_numpy().astype(np.uint8) + self.ext_cam_info = image_data.external_cam_info + + if (ext_cam_img.shape[0] > spot_img.shape[0]) or (ext_cam_img.shape[1] > spot_img.shape[1]): + logger.info("had to resize external camera image.") + target_height, target_width, _ = spot_img.shape + ext_cam_img = cv2.resize(ext_cam_img, (target_width, target_height), interpolation=cv2.INTER_AREA) + + images.append(spot_img) + images.append(ext_cam_img) + + return np.array(images, dtype=np.uint8) def localize_target_to_principal_camera(self, images: Union[List, np.ndarray]) -> Tuple[np.ndarray, np.ndarray]: try: From 7ced88c840ba629e36849475fd045fb9b027fc5a Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Fri, 27 Feb 2026 15:12:42 -0500 Subject: [PATCH 02/12] untested but maybe ready to test --- .../calibration/calibration_helpers.py | 34 ++++++++++++ spot_wrapper/calibration/calibration_util.py | 52 ++++++++++++------- .../spot_hand_camera_calibration.py | 4 +- 3 files changed, 68 insertions(+), 22 deletions(-) diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 2eb17858..02a45f53 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -2,19 +2,53 @@ from __future__ import annotations +import dataclasses import logging from dataclasses import dataclass +from typing import NamedTuple, Optional, Type, TypedDict import numpy as np import numpy.typing as npt import torch from jaxtyping import Float, Shaped +from rclpy.qos import QoSProfile +from sensor_msgs.msg import CameraInfo +from sensor_msgs.msg import Image as ROSImage logger = logging.getLogger(__name__) DISTORTION_COEFFICIENT_SIZES = [0, 4, 5, 8, 12, 14] +@dataclasses.dataclass +class CameraCalCapture: + capture_time: float + external_cam_msg: ROSImage + external_cam_info: CameraInfo + spot_cam_msg: ROSImage + spot_cam_info: CameraInfo + + +class CalibrationResults(TypedDict): + dist_coeffs_origin: np.ndarray + camera_matrix_origin: np.ndarray + image_dim_origin: np.ndarray + dist_coeffs_reference: np.ndarray + camera_matrix_reference: np.ndarray + image_dim_reference: np.ndarray + R: np.ndarray + T: np.ndarray + R_handeye: Optional[np.ndarray] + T_handeye: Optional[np.ndarray] + average_reprojection_error: float + + +class TopicMsgPair(NamedTuple): + topic_name: str + msg_type: Type + qos_profile: QoSProfile | int = 10 + + @dataclass class Image: # Note: this is a jaxtyping-style annotation; diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 0ac37605..6df20691 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -10,19 +10,27 @@ from glob import glob from pathlib import Path from time import sleep -from typing import Any, Dict, List, Optional, Tuple, TypedDict, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import cv2 import numpy as np import yaml from cv_bridge import CvBridge +from message_filters import ApproximateTimeSynchronizer, Subscriber +from rclpy.callback_groups import CallbackGroup +from rclpy.node import Node from sensor_msgs.msg import CameraInfo from sensor_msgs.msg import Image as RosImage from spot_wrapper.calibration.automatic_camera_calibration_robot import ( AutomaticCameraCalibrationRobot, ) -from spot_wrapper.calibration.calibration_helpers import CameraIntrinsics, Image +from spot_wrapper.calibration.calibration_helpers import ( + CalibrationResults, + CameraIntrinsics, + Image, + TopicMsgPair, +) from spot_wrapper.calibration.charuco_board_detection import ( create_ideal_charuco_image, detect_charuco_corners, @@ -35,20 +43,6 @@ directories = ["parent", "child", "poses", "depth"] -class CalibrationResults(TypedDict): - dist_coeffs_origin: np.ndarray - camera_matrix_origin: np.ndarray - image_dim_origin: np.ndarray - dist_coeffs_reference: np.ndarray - camera_matrix_reference: np.ndarray - image_dim_reference: np.ndarray - R: np.ndarray - T: np.ndarray - R_handeye: Optional[np.ndarray] - T_handeye: Optional[np.ndarray] - average_reprojection_error: float - - def camera_info_to_dict(camera_info: CameraInfo, camera_name: str) -> dict[str, Any]: return { "image_width": camera_info.width, @@ -278,9 +272,6 @@ def save_dataset_to_dir( save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) -# moved from calibration_utils.py in spot_wrapper -# i wanted to change a few names of fields in accord with this: -# https://www.notion.so/theaiinstitute/SE3-Math-d81fc740b66c4a09bece9f47b62506f1 def save_calibration_parameters( data: Dict, output_path: str, @@ -616,3 +607,26 @@ def calibration_helper( unsafe=args.unsafe_tag_save, ) return calibration_dict + + +def create_time_synchronizer( + node: Node, + topic_msg_type_pairs: Sequence[TopicMsgPair], + callback: Callable[..., None], + callback_group: Optional[CallbackGroup] = None, + queue_size: int = 30, + slop_sec: float = 0.3, +) -> ApproximateTimeSynchronizer: + """Creates an `ApproximateTimeSynchronizer` for a list of topic names and msg types + + See `$BDAI/projects/watch_understand_do/ws/src/wud_ros/wud_ros/look_at_that/lang_to_pcd_server.py` for an example + Also see: https://github.com/ros2/message_filters/blob/humble/src/message_filters/__init__.py#L242 + """ + subscribers = [ + Subscriber(node, msg_type, topic_name, qos_profile=qos_profile, callback_group=callback_group) + for topic_name, msg_type, qos_profile in topic_msg_type_pairs + ] + time_synchronizer = ApproximateTimeSynchronizer(subscribers, queue_size, slop_sec) + time_synchronizer.registerCallback(callback) + + return time_synchronizer diff --git a/spot_wrapper/calibration/spot_hand_camera_calibration.py b/spot_wrapper/calibration/spot_hand_camera_calibration.py index 5eb830a4..907b3416 100644 --- a/spot_wrapper/calibration/spot_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_hand_camera_calibration.py @@ -8,13 +8,11 @@ import cv2 import numpy as np -import synchros2.process as ros_process import yaml from spot_wrapper.calibration.automatic_camera_calibration_robot import AutomaticCameraCalibrationRobot from spot_wrapper.calibration.calibration_clis import ( calibrate_robot_cli, - ext_cli, setup_calibration_param, spot_cli, ) @@ -71,6 +69,7 @@ def spot_main() -> None: logger.warning("HOLD Ctrl + C NOW TO CANCEL") logger.warning("The calibration board should be about a meter away with nothing within a meter of the robot.") logger.warning("The robot should NOT be docked, and nobody should have robot control") + logger.warning(f"the ip is: {args.ip}") input("Press Enter to continue...") # sleep(5) @@ -119,6 +118,5 @@ def spot_main() -> None: logger.info("Calibration complete!") -@ros_process.main(ext_cli()) def main(args: argparse.Namespace) -> None: spot_main() From 73c1d0ce2ee4edfec8e4aa28c55f3ce8f7a861f2 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Mon, 2 Mar 2026 12:57:56 -0500 Subject: [PATCH 03/12] removed some unused helpers --- ..._calibration.py => calibrate_spot_hand.py} | 5 +- .../calibration/calibration_helpers.py | 64 +++---- spot_wrapper/calibration/calibration_util.py | 179 +++++++++--------- 3 files changed, 124 insertions(+), 124 deletions(-) rename spot_wrapper/calibration/{spot_hand_camera_calibration.py => calibrate_spot_hand.py} (98%) diff --git a/spot_wrapper/calibration/spot_hand_camera_calibration.py b/spot_wrapper/calibration/calibrate_spot_hand.py similarity index 98% rename from spot_wrapper/calibration/spot_hand_camera_calibration.py rename to spot_wrapper/calibration/calibrate_spot_hand.py index 907b3416..31e3973a 100644 --- a/spot_wrapper/calibration/spot_hand_camera_calibration.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -9,12 +9,13 @@ import cv2 import numpy as np import yaml - +import synchros2.process as ros_process from spot_wrapper.calibration.automatic_camera_calibration_robot import AutomaticCameraCalibrationRobot from spot_wrapper.calibration.calibration_clis import ( calibrate_robot_cli, setup_calibration_param, spot_cli, + ext_cli, ) from spot_wrapper.calibration.calibration_util import ( calibration_helper, @@ -117,6 +118,6 @@ def spot_main() -> None: logger.info("Calibration complete!") - +@ros_process.main(ext_cli()) def main(args: argparse.Namespace) -> None: spot_main() diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 02a45f53..8ed51cfd 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -2,51 +2,51 @@ from __future__ import annotations -import dataclasses +# import dataclasses import logging from dataclasses import dataclass -from typing import NamedTuple, Optional, Type, TypedDict +# from typing import NamedTuple, Optional, Type, TypedDict import numpy as np import numpy.typing as npt import torch from jaxtyping import Float, Shaped -from rclpy.qos import QoSProfile -from sensor_msgs.msg import CameraInfo -from sensor_msgs.msg import Image as ROSImage +# from rclpy.qos import QoSProfile +# from sensor_msgs.msg import CameraInfo +# from sensor_msgs.msg import Image as ROSImage logger = logging.getLogger(__name__) DISTORTION_COEFFICIENT_SIZES = [0, 4, 5, 8, 12, 14] -@dataclasses.dataclass -class CameraCalCapture: - capture_time: float - external_cam_msg: ROSImage - external_cam_info: CameraInfo - spot_cam_msg: ROSImage - spot_cam_info: CameraInfo - - -class CalibrationResults(TypedDict): - dist_coeffs_origin: np.ndarray - camera_matrix_origin: np.ndarray - image_dim_origin: np.ndarray - dist_coeffs_reference: np.ndarray - camera_matrix_reference: np.ndarray - image_dim_reference: np.ndarray - R: np.ndarray - T: np.ndarray - R_handeye: Optional[np.ndarray] - T_handeye: Optional[np.ndarray] - average_reprojection_error: float - - -class TopicMsgPair(NamedTuple): - topic_name: str - msg_type: Type - qos_profile: QoSProfile | int = 10 +# @dataclasses.dataclass +# class CameraCalCapture: +# capture_time: float +# external_cam_msg: ROSImage +# external_cam_info: CameraInfo +# spot_cam_msg: ROSImage +# spot_cam_info: CameraInfo + + +# class CalibrationResults(TypedDict): +# dist_coeffs_origin: np.ndarray +# camera_matrix_origin: np.ndarray +# image_dim_origin: np.ndarray +# dist_coeffs_reference: np.ndarray +# camera_matrix_reference: np.ndarray +# image_dim_reference: np.ndarray +# R: np.ndarray +# T: np.ndarray +# R_handeye: Optional[np.ndarray] +# T_handeye: Optional[np.ndarray] +# average_reprojection_error: float + + +# class TopicMsgPair(NamedTuple): +# topic_name: str +# msg_type: Type +# qos_profile: QoSProfile | int = 10 @dataclass diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 6df20691..36304bbe 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -16,9 +16,9 @@ import numpy as np import yaml from cv_bridge import CvBridge -from message_filters import ApproximateTimeSynchronizer, Subscriber -from rclpy.callback_groups import CallbackGroup -from rclpy.node import Node +# from message_filters import ApproximateTimeSynchronizer, Subscriber +# from rclpy.callback_groups import CallbackGroup +# from rclpy.node import Node from sensor_msgs.msg import CameraInfo from sensor_msgs.msg import Image as RosImage @@ -157,50 +157,49 @@ def load_images_from_dir(path: Path) -> Dict[str, np.ndarray]: return images - -def load_calibration_parameters(input_path: Path) -> CalibrationResults: - """ - Load calibration parameters from a YAML file. - - Args: - input_path (Path): The path to the YAML file containing calibration parameters. - Returns: - CalibrationResults: The loaded calibration parameters. - Throws: - FileNotFoundError: If the specified file does not exist. - KeyError: If required keys are missing in the YAML file. - """ - with open(input_path, "r") as file: - calib_data = yaml.safe_load(file) - - parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) - parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) - parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) - child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) - child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) - child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) - R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) - T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) - - # saving out reproj err not supported, currently. - # save_calibration_parameters in calibration_utils.py in spot_wrapper - # does not save out reproj err. - # So we set it to 0 here. - calib_results: CalibrationResults = { - "camera_matrix_origin": parent_camera, - "dist_coeffs_origin": parent_dist_coeffs, - "image_dim_origin": parent_image_dim, - "camera_matrix_reference": child_camera, - "dist_coeffs_reference": child_dist_coeffs, - "image_dim_reference": child_image_dim, - "R": R, - "T": T, - "R_handeye": np.eye(3), - "T_handeye": np.zeros((3, 1)), - "average_reprojection_error": 0, - } - - return calib_results +# TODO +# def load_calibration_parameters(input_path: Path) -> CalibrationResults: +# """ +# Load calibration parameters from a YAML file. + +# Args: +# input_path (Path): The path to the YAML file containing calibration parameters. +# Returns: +# CalibrationResults: The loaded calibration parameters. +# Throws: +# FileNotFoundError: If the specified file does not exist. +# KeyError: If required keys are missing in the YAML file. +# """ +# with open(input_path, "r") as file: +# calib_data = yaml.safe_load(file) + +# parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) +# parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) +# parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) +# child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) +# child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) +# child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) +# R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) +# T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) + +# # saving out reproj err not supported, currently. +# # does not save out reproj err. +# # So we set it to 0 here. +# calib_results: CalibrationResults = { +# "camera_matrix_origin": parent_camera, +# "dist_coeffs_origin": parent_dist_coeffs, +# "image_dim_origin": parent_image_dim, +# "camera_matrix_reference": child_camera, +# "dist_coeffs_reference": child_dist_coeffs, +# "image_dim_reference": child_image_dim, +# "R": R, +# "T": T, +# "R_handeye": np.eye(3), +# "T_handeye": np.zeros((3, 1)), +# "average_reprojection_error": 0, +# } + +# return calib_results def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo, CameraInfo]: @@ -243,33 +242,33 @@ def create_calibration_save_folders(path: Path) -> None: os.makedirs(os.path.join(path, "poses"), exist_ok=True) logger.info("Done creating folders.") +# TODO +# def save_dataset_to_dir( +# path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] +# ) -> None: +# """ +# Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. -def save_dataset_to_dir( - path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] -) -> None: - """ - Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. +# Also, save the camera infos. - Also, save the camera infos. +# See Using the CLI Tool To Calibrate On an Existing Dataset section in the README +# to see the expected folder/data structure for this method to work - See Using the CLI Tool To Calibrate On an Existing Dataset section in the README - to see the expected folder/data structure for this method to work - - Args: - path (str): The parent path - images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index - camera_info_dict (dict[int, CameraInfo]): The camera info by camera index - """ +# Args: +# path (str): The parent path +# images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index +# camera_info_dict (dict[int, CameraInfo]): The camera info by camera index +# """ - create_calibration_save_folders(path) +# create_calibration_save_folders(path) - for cam_idx, images in images_dict.items(): - cam_dir = path / Path(str(cam_idx)) - for img_idx, img in enumerate(images): - img_path = cam_dir / Path(f"{img_idx}.png") - cv2.imwrite(str(img_path), img) - # np.save(cam_dir / Path("camera_info.npy"), camera_info_dict[cam_idx]) - save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) +# for cam_idx, images in images_dict.items(): +# cam_dir = path / Path(str(cam_idx)) +# for img_idx, img in enumerate(images): +# img_path = cam_dir / Path(f"{img_idx}.png") +# cv2.imwrite(str(img_path), img) +# # np.save(cam_dir / Path("camera_info.npy"), camera_info_dict[cam_idx]) +# save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) def save_calibration_parameters( @@ -609,24 +608,24 @@ def calibration_helper( return calibration_dict -def create_time_synchronizer( - node: Node, - topic_msg_type_pairs: Sequence[TopicMsgPair], - callback: Callable[..., None], - callback_group: Optional[CallbackGroup] = None, - queue_size: int = 30, - slop_sec: float = 0.3, -) -> ApproximateTimeSynchronizer: - """Creates an `ApproximateTimeSynchronizer` for a list of topic names and msg types - - See `$BDAI/projects/watch_understand_do/ws/src/wud_ros/wud_ros/look_at_that/lang_to_pcd_server.py` for an example - Also see: https://github.com/ros2/message_filters/blob/humble/src/message_filters/__init__.py#L242 - """ - subscribers = [ - Subscriber(node, msg_type, topic_name, qos_profile=qos_profile, callback_group=callback_group) - for topic_name, msg_type, qos_profile in topic_msg_type_pairs - ] - time_synchronizer = ApproximateTimeSynchronizer(subscribers, queue_size, slop_sec) - time_synchronizer.registerCallback(callback) - - return time_synchronizer +# def create_time_synchronizer( +# node: Node, +# topic_msg_type_pairs: Sequence[TopicMsgPair], +# callback: Callable[..., None], +# callback_group: Optional[CallbackGroup] = None, +# queue_size: int = 30, +# slop_sec: float = 0.3, +# ) -> ApproximateTimeSynchronizer: +# """Creates an `ApproximateTimeSynchronizer` for a list of topic names and msg types + +# See `$BDAI/projects/watch_understand_do/ws/src/wud_ros/wud_ros/look_at_that/lang_to_pcd_server.py` for an example +# Also see: https://github.com/ros2/message_filters/blob/humble/src/message_filters/__init__.py#L242 +# """ +# subscribers = [ +# Subscriber(node, msg_type, topic_name, qos_profile=qos_profile, callback_group=callback_group) +# for topic_name, msg_type, qos_profile in topic_msg_type_pairs +# ] +# time_synchronizer = ApproximateTimeSynchronizer(subscribers, queue_size, slop_sec) +# time_synchronizer.registerCallback(callback) + +# return time_synchronizer From 0fad9624d20ee82463edaa9b06034b0e71f3d218 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Mon, 2 Mar 2026 15:18:42 -0500 Subject: [PATCH 04/12] fixed some minor bugs. now i can test it --- .../calibration/calibrate_spot_hand.py | 9 +- spot_wrapper/calibration/calibration_clis.py | 82 +++++++------------ spot_wrapper/calibration/calibration_util.py | 10 ++- .../spot_in_hand_camera_calibration.py | 4 +- 4 files changed, 43 insertions(+), 62 deletions(-) diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index 31e3973a..058b0fbd 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -9,13 +9,12 @@ import cv2 import numpy as np import yaml -import synchros2.process as ros_process + from spot_wrapper.calibration.automatic_camera_calibration_robot import AutomaticCameraCalibrationRobot from spot_wrapper.calibration.calibration_clis import ( calibrate_robot_cli, setup_calibration_param, spot_cli, - ext_cli, ) from spot_wrapper.calibration.calibration_util import ( calibration_helper, @@ -118,6 +117,10 @@ def spot_main() -> None: logger.info("Calibration complete!") -@ros_process.main(ext_cli()) + def main(args: argparse.Namespace) -> None: spot_main() + + +if __name__ == "__main__": + spot_main() diff --git a/spot_wrapper/calibration/calibration_clis.py b/spot_wrapper/calibration/calibration_clis.py index c2c0f900..c05a5789 100644 --- a/spot_wrapper/calibration/calibration_clis.py +++ b/spot_wrapper/calibration/calibration_clis.py @@ -24,7 +24,7 @@ def setup_calibration_param( - args: argparse.Namespace, + parser: argparse.ArgumentParser, ) -> Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: """Set up calibration parameters from command line arguments. @@ -38,6 +38,7 @@ def setup_calibration_param( Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: The parsed arguments, ArUco dictionary, and Charuco board. """ + args = parser.parse_args() if hasattr(cv2.aruco, args.dict_size): aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, args.dict_size)) else: @@ -232,7 +233,7 @@ def calibrator_cli() -> argparse.ArgumentParser: return parser -def calibrate_robot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: +def calibrate_robot_cli(parser: argparse.ArgumentParser | None = None) -> argparse.ArgumentParser: if parser is None: parser = calibrator_cli() @@ -308,10 +309,31 @@ def calibrate_robot_cli(parser: argparse.ArgumentParser | None) -> argparse.Argu help="Whether to only calibrate from recorded dataset on file.", ) + parser.add_argument( + "--dist_along_board_width", + "-dabw", + nargs="+", + type=float, + dest="dist_along_board_width", + default=[-0.2, 0.3, 0.1], + help=( + "What distances to conduct calibrations at relative to the board (along the board width / X axis). " + "Three value array arg defines the [Start, Stop), step for the viewpoint sweep." + ), + ) + + parser.add_argument( + "--save_data", + action="store_true", + dest="save_data", + default=False, + help="Whether to save the collected image dataset to data_path.", + ) + return parser -def spot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: +def spot_cli(parser: argparse.ArgumentParser | None = None) -> argparse.ArgumentParser: if parser is None: parser = calibrate_robot_cli(None) @@ -320,8 +342,9 @@ def spot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: "-rn", dest="robot_name", type=str, - help="The name of the Robot to calibrate", - required=True, + help="The name of the Robot to calibrate (auto-detected from robot if not supplied)", + default=None, + required=False, ) parser.add_argument( @@ -369,14 +392,6 @@ def spot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: help="What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 and 1080 are supported", ) - parser.add_argument("--ext_cam_topic", help="External Camera topic", required=True) - - parser.add_argument("--ext_cam_info_topic", help="External camera info topic", required=True) - - parser.add_argument( - "--ext_cam_tf_name", help="name of the camera link of the RS camera", default="pole_camera_link" - ) - parser.add_argument( "--save_to_robot", "-save", @@ -386,44 +401,3 @@ def spot_cli(parser: argparse.ArgumentParser | None) -> argparse.ArgumentParser: ) return parser - - -def ext_cli() -> argparse.ArgumentParser: - """Command-line interface.""" - parser = calibrator_cli() - parser.add_argument("--robot", help="Name of the robot ROS2 namespce.", default="Ethernet0") - parser.add_argument( - "--ext_cam_topic", help="External Camera topic", default="/pole_rs/camera/pole_camera/color/image_raw" - ) - parser.add_argument( - "--ext_cam_info_topic", - help="External camera info topic", - default="/pole_rs/camera/pole_camera/color/camera_info", - ) - parser.add_argument("--start_dist", help="Starting distance from the board", default=1.5, type=float) - parser.add_argument( - "--dist_step", help="Step size of the distance to the board till max_dist", default=0.5, type=float - ) - parser.add_argument( - "--max_dist", help="Step size of the distance to the board till max_dist", default=2.5, type=float - ) - - parser.add_argument( - "--max_circle_angle", help="Max angle of the circle around the board", default=0.872665, type=float - ) - parser.add_argument("--circle_step_size", help="Step size around the circle", default=0.174533, type=float) - parser.add_argument( - "--ext_cam_tf_name", help="name of the camera link of the RS camera", default="pole_camera_link" - ) - parser.add_argument("--parent_frame", help="name of frame the camera link is rigidly attached to", default="body") - parser.add_argument("--camera_name", help="name of the camera", default="front_realsense") - parser.add_argument("--spot_name", help="name of spot", default="quinn") - parser.add_argument("--open_gripper", help="Whether to open the gripper during calibration", action="store_true") - - parser.add_argument( - "--collect_only", - action="store_true", - default=False, - help="Whether to only collect data, saved off to data_path.", - ) - return parser diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 36304bbe..bb02aa06 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -10,12 +10,13 @@ from glob import glob from pathlib import Path from time import sleep -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union import cv2 import numpy as np import yaml from cv_bridge import CvBridge + # from message_filters import ApproximateTimeSynchronizer, Subscriber # from rclpy.callback_groups import CallbackGroup # from rclpy.node import Node @@ -26,11 +27,12 @@ AutomaticCameraCalibrationRobot, ) from spot_wrapper.calibration.calibration_helpers import ( - CalibrationResults, + # CalibrationResults, CameraIntrinsics, Image, - TopicMsgPair, ) + +# TopicMsgPair, from spot_wrapper.calibration.charuco_board_detection import ( create_ideal_charuco_image, detect_charuco_corners, @@ -157,6 +159,7 @@ def load_images_from_dir(path: Path) -> Dict[str, np.ndarray]: return images + # TODO # def load_calibration_parameters(input_path: Path) -> CalibrationResults: # """ @@ -242,6 +245,7 @@ def create_calibration_save_folders(path: Path) -> None: os.makedirs(os.path.join(path, "poses"), exist_ok=True) logger.info("Done creating folders.") + # TODO # def save_dataset_to_dir( # path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] diff --git a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py index 6bd7d0d5..b1200e3e 100644 --- a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py @@ -197,7 +197,7 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou planning_t_depth_frame_proto = SE3Pose.from_matrix(planning_t_depth).to_proto() planning_t_rgb_frame_proto = SE3Pose.from_matrix(planning_t_rgb).to_proto() - set_req = gripper_camera_param_pb2.SetGripperCameraCalibrationRequest( + gripper_camera_param_pb2.SetGripperCameraCalibrationRequest( gripper_cam_cal=gripper_camera_param_pb2.GripperCameraCalibrationProto( depth=gripper_camera_param_pb2.GripperDepthCameraCalibrationParams( wr1_tform_sensor=planning_t_depth_frame_proto, @@ -217,7 +217,7 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou ) # Send the request to the robot try: - result = self.gripper_camera_client.set_camera_calib(set_req) + # result = self.gripper_camera_client.set_camera_calib(set_req) logger.info(f" Set Parameters: \n{result}") except Exception as e: raise ValueError(f"Failed to set calibration parameters on the robot: {e}") From 8d4cfb47d2d55a2c2fa8f45a7e0a6a6fa9cea077 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Tue, 3 Mar 2026 14:55:43 -0500 Subject: [PATCH 05/12] fixed a bunch of bugs (sorry this is a mess rn) --- .../automatic_camera_calibration_robot.py | 4 +- .../calibration/calibrate_spot_hand.py | 19 +- spot_wrapper/calibration/calibration_clis.py | 27 +- .../calibration/calibration_helpers.py | 27 +- spot_wrapper/calibration/calibration_util.py | 277 ++++++--- .../calibration/charuco_board_detection.py | 569 +++++++++++++----- .../spot_in_hand_camera_calibration.py | 46 +- 7 files changed, 679 insertions(+), 290 deletions(-) diff --git a/spot_wrapper/calibration/automatic_camera_calibration_robot.py b/spot_wrapper/calibration/automatic_camera_calibration_robot.py index f329a680..2900993a 100644 --- a/spot_wrapper/calibration/automatic_camera_calibration_robot.py +++ b/spot_wrapper/calibration/automatic_camera_calibration_robot.py @@ -1,6 +1,4 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. # What's below can be used in standalone tool from abc import ABC, abstractmethod diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index 058b0fbd..c8229858 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -1,6 +1,4 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. import argparse import logging @@ -86,6 +84,16 @@ def spot_main() -> None: data_path=args.data_path, save_data=args.save_data, ) + + # calibration = calibrate_2_cameras( + # images=images, + # args=args, + # charuco=charuco, + # aruco_dict=aruco_dict, + # camera_matrix_dict=in_hand_bot.camera_matrix_dict, + # camera_distortion_dict=in_hand_bot.camera_distortion_dict, + # ) + # calibration, num_images, parent_frame, child_frame = run_calibration_process(args) calibration = calibration_helper( images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path ) @@ -108,6 +116,7 @@ def spot_main() -> None: else: logger.info(f"Loading images from {args.data_path}") images, poses = load_dataset_from_path(args.data_path) + # calibration, num_images, parent_frame, child_frame = run_calibration_process(args) calibration = calibration_helper( images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path ) @@ -118,9 +127,5 @@ def spot_main() -> None: logger.info("Calibration complete!") -def main(args: argparse.Namespace) -> None: - spot_main() - - if __name__ == "__main__": spot_main() diff --git a/spot_wrapper/calibration/calibration_clis.py b/spot_wrapper/calibration/calibration_clis.py index c05a5789..ec88cd82 100644 --- a/spot_wrapper/calibration/calibration_clis.py +++ b/spot_wrapper/calibration/calibration_clis.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. import argparse @@ -223,6 +223,20 @@ def calibrator_cli() -> argparse.ArgumentParser: help="If set, skips safety checks for tagging calibration.", ) + parser.add_argument( + "--stereo_pairs", + "-sp", + dest="stereo_pairs", + nargs="+", + type=lambda s: tuple(int(x) for x in s.split(",")), + default=[(0, 1)], + help=( + "Stereo camera pairs to calibrate, as comma-separated index pairs. " + "E.g. '0,1' for a single stereo pair between camera 0 and camera 1. " + "Defaults to [(0, 1)]." + ), + ) + parser.add_argument( "--use_kabsch", action="store_true", @@ -330,6 +344,17 @@ def calibrate_robot_cli(parser: argparse.ArgumentParser | None = None) -> argpar help="Whether to save the collected image dataset to data_path.", ) + parser.add_argument( + "--from_yaml", + "-yaml", + dest="from_yaml", + action="store_true", + help=( + "Whether the data is from a yaml file. Use this and the '--from_data' and '--send' args to send a" + " previously saved calibration yaml to the robot" + ), + ) + return parser diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 8ed51cfd..515f6829 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -5,12 +5,13 @@ # import dataclasses import logging from dataclasses import dataclass -# from typing import NamedTuple, Optional, Type, TypedDict +# from typing import NamedTuple, Optional, Type, TypedDict import numpy as np import numpy.typing as npt import torch from jaxtyping import Float, Shaped + # from rclpy.qos import QoSProfile # from sensor_msgs.msg import CameraInfo # from sensor_msgs.msg import Image as ROSImage @@ -29,18 +30,18 @@ # spot_cam_info: CameraInfo -# class CalibrationResults(TypedDict): -# dist_coeffs_origin: np.ndarray -# camera_matrix_origin: np.ndarray -# image_dim_origin: np.ndarray -# dist_coeffs_reference: np.ndarray -# camera_matrix_reference: np.ndarray -# image_dim_reference: np.ndarray -# R: np.ndarray -# T: np.ndarray -# R_handeye: Optional[np.ndarray] -# T_handeye: Optional[np.ndarray] -# average_reprojection_error: float +class CalibrationResults(TypedDict): + dist_coeffs_origin: np.ndarray + camera_matrix_origin: np.ndarray + image_dim_origin: np.ndarray + dist_coeffs_reference: np.ndarray + camera_matrix_reference: np.ndarray + image_dim_reference: np.ndarray + R: np.ndarray + T: np.ndarray + R_handeye: Optional[np.ndarray] + T_handeye: Optional[np.ndarray] + average_reprojection_error: float # class TopicMsgPair(NamedTuple): diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index bb02aa06..cf91862b 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -1,7 +1,5 @@ # Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. -# Copyreference (c) 2024 Robotics and AI Institute LLC dba RAI Institute. All references reserved. - import argparse import logging import os @@ -27,13 +25,15 @@ AutomaticCameraCalibrationRobot, ) from spot_wrapper.calibration.calibration_helpers import ( - # CalibrationResults, + CalibrationResults, CameraIntrinsics, Image, ) # TopicMsgPair, from spot_wrapper.calibration.charuco_board_detection import ( + charuco_pose_sanity_check, + create_charuco_board, create_ideal_charuco_image, detect_charuco_corners, get_relative_viewpoints_from_board_pose_and_param, @@ -67,6 +67,7 @@ def camera_info_to_dict(camera_info: CameraInfo, camera_name: str) -> dict[str, } +# TODO def save_CameraInfo_2_file(msg: CameraInfo, camera_name: str, file_path: Path) -> None: """Saves a CameraInfo message to a YAML file.""" cam_info_msg_dict = camera_info_to_dict(camera_info=msg, camera_name=camera_name) @@ -75,6 +76,7 @@ def save_CameraInfo_2_file(msg: CameraInfo, camera_name: str, file_path: Path) - yaml.dump(cam_info_msg_dict, f, default_flow_style=False) +# TODO def load_CameraInfo_from_file(file_path: Path) -> CameraInfo: """Loads a CameraInfo message from a YAML file.""" logging.info(f"Loading CameraInfo from {file_path}") @@ -130,20 +132,20 @@ def alpha_numeric(x: str) -> Any: return int(matcha.group()) return x - def load_images_from_dir(path: Path) -> Dict[str, np.ndarray]: + def load_images_from_dir(path: Path, suffix: str) -> Dict[str, np.ndarray]: print(f"-----------------------Loading images from {path}") files = sorted( - glob(os.path.join(path, "*.png")), + glob(os.path.join(path, f"*{suffix}")), key=alpha_numeric, ) try: return { Path(fn).name: cv2.imread(fn, cv2.IMREAD_GRAYSCALE).astype(np.uint8) for fn in files - if fn.lower().endswith(".png") + if fn.lower().endswith(suffix) } except Exception as e: - logging.error(f"Error loading images from {files}: {e}") + logging.error(f"Error loading images and poses from {files}: {e}") return {} # Initialize an empty dict to store images @@ -152,60 +154,63 @@ def load_images_from_dir(path: Path) -> Dict[str, np.ndarray]: # directories we care about here parent_path = os.path.join(path, "parent") child_path = os.path.join(path, "child") + poses = os.path.join(path, "poses") # load images from both directories - images["parent"] = load_images_from_dir(Path(parent_path)) - images["child"] = load_images_from_dir(Path(child_path)) + images["parent"] = load_images_from_dir(Path(parent_path), ".png") + images["child"] = load_images_from_dir(Path(child_path), ".png") + images["poses"] = load_images_from_dir(Path(poses), ".npy") return images # TODO -# def load_calibration_parameters(input_path: Path) -> CalibrationResults: -# """ -# Load calibration parameters from a YAML file. - -# Args: -# input_path (Path): The path to the YAML file containing calibration parameters. -# Returns: -# CalibrationResults: The loaded calibration parameters. -# Throws: -# FileNotFoundError: If the specified file does not exist. -# KeyError: If required keys are missing in the YAML file. -# """ -# with open(input_path, "r") as file: -# calib_data = yaml.safe_load(file) - -# parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) -# parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) -# parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) -# child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) -# child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) -# child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) -# R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) -# T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) - -# # saving out reproj err not supported, currently. -# # does not save out reproj err. -# # So we set it to 0 here. -# calib_results: CalibrationResults = { -# "camera_matrix_origin": parent_camera, -# "dist_coeffs_origin": parent_dist_coeffs, -# "image_dim_origin": parent_image_dim, -# "camera_matrix_reference": child_camera, -# "dist_coeffs_reference": child_dist_coeffs, -# "image_dim_reference": child_image_dim, -# "R": R, -# "T": T, -# "R_handeye": np.eye(3), -# "T_handeye": np.zeros((3, 1)), -# "average_reprojection_error": 0, -# } +def load_calibration_parameters(input_path: Path) -> CalibrationResults: + """ + Load calibration parameters from a YAML file. -# return calib_results + Args: + input_path (Path): The path to the YAML file containing calibration parameters. + Returns: + CalibrationResults: The loaded calibration parameters. + Throws: + FileNotFoundError: If the specified file does not exist. + KeyError: If required keys are missing in the YAML file. + """ + with open(input_path, "r") as file: + calib_data = yaml.safe_load(file) + + parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) + parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) + parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) + child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) + child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) + child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) + R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) + T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) + + # saving out reproj err not supported, currently. + # does not save out reproj err. + # So we set it to 0 here. + calib_results: CalibrationResults = { + "camera_matrix_origin": parent_camera, + "dist_coeffs_origin": parent_dist_coeffs, + "image_dim_origin": parent_image_dim, + "camera_matrix_reference": child_camera, + "dist_coeffs_reference": child_dist_coeffs, + "image_dim_reference": child_image_dim, + "R": R, + "T": T, + "R_handeye": np.eye(3), + "T_handeye": np.zeros((3, 1)), + "average_reprojection_error": 0, + } + return calib_results -def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo, CameraInfo]: + +# TODO +def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo]: """ load the data for images, hand_cam_info, ext_cam_info @@ -247,32 +252,31 @@ def create_calibration_save_folders(path: Path) -> None: # TODO -# def save_dataset_to_dir( -# path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] -# ) -> None: -# """ -# Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. +def save_dataset_to_dir( + path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] +) -> None: + """ + Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. -# Also, save the camera infos. + Also, save the camera infos. -# See Using the CLI Tool To Calibrate On an Existing Dataset section in the README -# to see the expected folder/data structure for this method to work + See Using the CLI Tool To Calibrate On an Existing Dataset section in the README + to see the expected folder/data structure for this method to work -# Args: -# path (str): The parent path -# images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index -# camera_info_dict (dict[int, CameraInfo]): The camera info by camera index -# """ + Args: + path (str): The parent path + images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index + camera_info_dict (dict[int, CameraInfo]): The camera info by camera index + """ -# create_calibration_save_folders(path) + create_calibration_save_folders(path) -# for cam_idx, images in images_dict.items(): -# cam_dir = path / Path(str(cam_idx)) -# for img_idx, img in enumerate(images): -# img_path = cam_dir / Path(f"{img_idx}.png") -# cv2.imwrite(str(img_path), img) -# # np.save(cam_dir / Path("camera_info.npy"), camera_info_dict[cam_idx]) -# save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) + for cam_idx, images in images_dict.items(): + cam_dir = path / Path(str(cam_idx)) + for img_idx, img in enumerate(images): + img_path = cam_dir / Path(f"{img_idx}.png") + cv2.imwrite(str(img_path), img) + save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) def save_calibration_parameters( @@ -338,11 +342,11 @@ def process_data_with_nested_dictionaries( } # Now add R_handeye and T_handeye if they exist in the data - """ if "R_handeye" in value and "T_handeye" in value: + if "R_handeye" in value and "T_handeye" in value: relations[origin_cam]["planning_frame"] = { "R": flatten_matrix(value["R_handeye"]), "T": flatten_matrix(value["T_handeye"]), - } """ + } return cameras, relations @@ -392,9 +396,14 @@ def process_data_with_nested_dictionaries( # Process the new calibration data cameras, relations = process_data_with_nested_dictionaries(data) + first_result = next(iter(data.values())) xform = np.eye(4) - xform[:3, :3] = next(iter(data.values()))["R"] - xform[:3, 3] = next(iter(data.values()))["T"].flatten() + if "R_handeye" in first_result and "T_handeye" in first_result: + xform[:3, :3] = first_result["R_handeye"] + xform[:3, 3] = np.array(first_result["T_handeye"]).flatten() + else: + xform[:3, :3] = first_result["R"] + xform[:3, 3] = first_result["T"].flatten() # Prepare the output data under the specified tag run_params: Dict[str, Any] = {} @@ -527,6 +536,7 @@ def get_multiple_perspective_camera_calibration_dataset( return (np.array(calibration_images, dtype=object), poses) +# TODO def camera_info_to_intrinsics(msg: CameraInfo) -> CameraIntrinsics: """Construct a `CameraIntrinsics` instance from a `CameraInfo` message. @@ -612,6 +622,121 @@ def calibration_helper( return calibration_dict +def setup_calibration_param( + args: argparse.Namespace, +) -> Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: + """Set up calibration parameters from command line arguments. + + Args: + parser (argparse.ArgumentParser): The argument parser to set up from command line. + + Raises: + ValueError: If the provided ArUco dictionary is invalid. + + Returns: + Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: + The parsed arguments, ArUco dictionary, and Charuco board. + """ + if hasattr(cv2.aruco, args.dict_size): + aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, args.dict_size)) + else: + raise ValueError(f"Invalid ArUco dictionary: {args.dict_size}") + charuco = create_charuco_board( + num_checkers_width=args.num_checkers_width, + num_checkers_height=args.num_checkers_height, + checker_dim=args.checker_dim, + marker_dim=args.marker_dim, + aruco_dict=aruco_dict, + legacy=args.legacy_charuco_pattern, + ) + + if not args.allow_default_internal_corner_ordering: + logger.warning("Enforcing bottom up charuco ordering. Pre-computing correlation now...") + detect_charuco_corners( + create_ideal_charuco_image(charuco_board=charuco), + charuco_board=charuco, + aruco_dict=aruco_dict, + enforce_ascending_ids_from_bottom_left_corner=True, + ) + if args.show_board_pattern: + logger.warning("Checking board, you'll need to close a window in a sec (press any key)") + charuco_pose_sanity_check( + create_ideal_charuco_image(charuco_board=charuco, colorful=True), + charuco_board=charuco, + aruco_dict=aruco_dict, + ) + return args, aruco_dict, charuco + + +# def run_calibration_process(args: argparse.Namespace, in_hand_bot) -> tuple[CalibrationResults, int, str, str]: +# """ +# This runs the calibration process for either of the two calibration modes. +# Just returns the calibration result. + +# :param args: Description +# :type args: argparse.Namespace +# :return: Description +# :rtype: tuple[CalibrationResults, int, Namespace] + +# Raises: +# FileNotFoundError: Could not find body_t_spot_camera.npy +# ValueError: No data path supplied to load images from +# """ +# args, aruco_dict, charuco = setup_calibration_param(args) +# logger.info(f"Loading images from {args.data_path}") + +# if args.from_data and args.data_path is None: +# logger.warning("Could not load any images to calibrate from, supply --data_path") +# raise ValueError("No data path supplied to load images from") + +# if args.from_data: +# images, hand_cam_info, ext_cam_info = load_dataset_from_path(args.data_path) +# else: +# images, poses = get_multiple_perspective_camera_calibration_dataset( +# auto_cam_cal_robot=in_hand_bot, +# max_num_images=args.max_num_images, +# distances_z=np.arange(*args.dist_from_board_viewpoint_range), +# distances_x=np.arange(*args.dist_along_board_width), +# x_axis_rots=np.arange(*args.x_axis_rot_viewpoint_range), +# y_axis_rots=np.arange(*args.y_axis_rot_viewpoint_range), +# z_axis_rots=np.arange(*args.z_axis_rot_viewpoint_range), +# use_degrees=args.use_degrees, +# settle_time=args.settle_time, +# data_path=args.data_path, +# save_data=args.save_data, +# ) +# hand_camera_matrix = np.array(hand_cam_info.k).reshape((3, 3)) +# hand_camera_distortion_coefficients = np.array(hand_cam_info.d) +# ext_camera_matrix = np.array(ext_cam_info.k).reshape((3, 3)) +# ext_camera_distortion_coefficients = np.array(ext_cam_info.d) + +# camera_matrix_dict = {"parent": hand_camera_matrix, "child": ext_camera_matrix} +# camera_distortion_dict = { +# "parent": hand_camera_distortion_coefficients, +# "child": ext_camera_distortion_coefficients, +# } + +# try: +# calibration_result = calibrate_2_cameras( +# images=images, +# args=args, +# charuco=charuco, +# aruco_dict=aruco_dict, +# camera_matrix_dict=camera_matrix_dict, +# camera_distortion_dict=camera_distortion_dict, +# ) +# except Exception as ex: +# logger.error(f"Could not calibrate the two cameras: {ex}") +# raise ex + +# return ( +# calibration_result, +# len(images["parent"]) + len(images["child"]), +# hand_cam_info.header.frame_id, +# ext_cam_info.header.frame_id, +# ) + + # def create_time_synchronizer( # node: Node, # topic_msg_type_pairs: Sequence[TopicMsgPair], diff --git a/spot_wrapper/calibration/charuco_board_detection.py b/spot_wrapper/calibration/charuco_board_detection.py index 65b206c5..714f59e8 100644 --- a/spot_wrapper/calibration/charuco_board_detection.py +++ b/spot_wrapper/calibration/charuco_board_detection.py @@ -1,14 +1,14 @@ # Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. -# Copyreference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. - import logging +import time from copy import deepcopy from math import radians from typing import Dict, List, Optional, Tuple, Union import cv2 import numpy as np +from calibration_helpers import CalibrationResults logging.basicConfig( level=logging.INFO, @@ -89,6 +89,7 @@ def create_charuco_board( legacy=True, ) + def multistereo_calibration_charuco( images: np.ndarray, desired_stereo_pairs: Optional[List[Tuple[int, int]]] = None, @@ -200,21 +201,21 @@ def multistereo_calibration_charuco( ) continue logging.info( - f"Attempting to register {origin_camera_idx}" - f"to {reference_camera_idx}, pair {idx} of" + f"Attempting to register {origin_camera_idx} " + f"to {reference_camera_idx}, pair {idx} of " f" {len(desired_stereo_pairs)}" ) try: key = str(origin_camera_idx) + "_" + str(reference_camera_idx) stereo_dict = stereo_calibration_charuco( - origin_images=images[:, origin_camera_idx], - reference_images=images[:, reference_camera_idx], + parent_images={str(i): img for i, img in enumerate(images[:, origin_camera_idx])}, + child_images={str(i): img for i, img in enumerate(images[:, reference_camera_idx])}, charuco_board=charuco_board, aruco_dict=aruco_dict, - camera_matrix_origin=camera_matrices[origin_camera_idx], - dist_coeffs_origin=dist_coeffs[origin_camera_idx], - camera_matrix_reference=camera_matrices[reference_camera_idx], - dist_coeffs_reference=dist_coeffs[reference_camera_idx], + camera_matrix_parent=camera_matrices[origin_camera_idx], + dist_coeffs_parent=dist_coeffs[origin_camera_idx], + camera_matrix_child=camera_matrices[reference_camera_idx], + dist_coeffs_child=dist_coeffs[reference_camera_idx], poses=poses, ) camera_matrices[origin_camera_idx] = stereo_dict["camera_matrix_origin"] @@ -494,35 +495,226 @@ def calibrate_single_camera_charuco( raise ValueError(f"Not enough valid points to individually calibrate {debug_str}") +# def stereo_calibration_charuco( +# origin_images: List[np.ndarray], +# reference_images: List[np.ndarray], +# charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, +# aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, +# camera_matrix_origin: Optional[np.ndarray] = None, +# dist_coeffs_origin: Optional[np.ndarray] = None, +# camera_matrix_reference: Optional[np.ndarray] = None, +# dist_coeffs_reference: Optional[np.ndarray] = None, +# poses: Union[np.ndarray, None] = None, +# ) -> Dict: +# """ +# Perform a stereo calibration from a set of synchronized stereo images of a charuco calibration +# board. + +# Args: +# origin_images (List[np.ndarray]): A list of images synced to reference images from camera 0 +# reference_images (List[np.ndarray]): A list of images synced to origin images from camera 1 +# charuco_board (cv2.aruco_CharucoBoard, optional): What charuco board to +# use for the cal. Defaults to SPOT_DEFAULT_CHARUCO. +# aruco_dict (cv2.aruco_Dictionary, optional): What aruco board to use for the cal. +# Defaults to SPOT_DEFAULT_ARUCO_DICT. +# camera_matrix_origin (Optional[np.ndarray], optional): What camera +# matrix to assign to camera 0. If none, is computed. Defaults to None. +# dist_coeffs_origin (Optional[np.ndarray], optional): What distortion coefficients +# to assign to camera 0. If None, is computed. Defaults to None. +# camera_matrix_reference (Optional[np.ndarray], optional): What camera +# matrix to assign to camera 1. If none, is computed. . Defaults to None. +# dist_coeffs_reference (Optional[np.ndarray], optional): What distortion coefficients +# to assign to camera 1. If None, is computed. Defaults to None. +# poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which +# pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. +# (planning frame to base frame), or None +# Raises: +# ValueError: Could not calibrate a camera individually +# ValueError: Not enough points to stereo calibrate +# ValueError: Could not stereo calibrate + +# Returns: +# Dict: _description_ +# """ +# if camera_matrix_origin is None or dist_coeffs_origin is None: +# logger.info("Calibrating Origin Camera individually") +# (camera_matrix_origin, dist_coeffs_origin, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( +# images=origin_images, +# charuco_board=charuco_board, +# aruco_dict=aruco_dict, +# debug_str="for origin camera", +# ) +# if camera_matrix_reference is None or dist_coeffs_origin is None: +# logger.info("Calibrating reference Camera individually ") +# (camera_matrix_reference, dist_coeffs_reference, rmats_reference, tvecs_reference) = ( +# calibrate_single_camera_charuco( +# images=reference_images, +# charuco_board=charuco_board, +# aruco_dict=aruco_dict, +# debug_str="for reference camera", +# ) +# ) + +# if camera_matrix_origin is None or camera_matrix_reference is None: +# raise ValueError("Could not calibrate one of the cameras as desired") + +# all_corners_origin = [] +# all_corners_reference = [] +# all_ids_origin = [] +# all_ids_reference = [] +# img_size = None + +# no_poses = poses is None +# if no_poses: +# poses = [x for x in range(len(origin_images))] +# else: +# filtered_poses = [] + +# no_poses = poses is None +# if no_poses: # fill up poses with dummy values so that you can iterate over poses +# # with images zip(origin_images, reference_images, poses) together regardless of if poses +# # are actually supplied (for the sake of brevity) +# poses = [x for x in range(len(origin_images))] +# else: +# filtered_poses = [] + +# for origin_img, reference_img, pose in zip(origin_images, reference_images, poses): +# if img_size is None: +# img_size = origin_img.shape[:2][::-1] + +# origin_charuco_corners, origin_charuco_ids = detect_charuco_corners(origin_img, charuco_board, aruco_dict) +# reference_charuco_corners, reference_charuco_ids = detect_charuco_corners( +# reference_img, charuco_board, aruco_dict +# ) + +# if not no_poses: +# filtered_poses.append(pose) +# if origin_charuco_corners is not None and reference_charuco_corners is not None: +# all_corners_origin.append(origin_charuco_corners) +# all_corners_reference.append(reference_charuco_corners) +# all_ids_origin.append(origin_charuco_ids) +# all_ids_reference.append(reference_charuco_ids) + +# if len(all_corners_origin) > 0: +# obj_points_all = [] +# img_points_origin = [] +# img_points_reference = [] +# for ( +# origin_corners, +# reference_corners, +# origin_ids, +# reference_ids, +# ) in zip( +# all_corners_origin, +# all_corners_reference, +# all_ids_origin, +# all_ids_reference, +# ): +# common_ids = np.intersect1d(origin_ids, reference_ids) +# if len(common_ids) >= 6: # Ensure there are at least 6 points +# obj_points = get_charuco_board_object_points(charuco_board, common_ids) +# obj_points_all.append(obj_points) +# img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) +# img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) + +# if len(obj_points_all) > 0: +# logger.info(f"Collected {len(obj_points_all)} shared point sets for stereo calibration.") +# _, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( +# obj_points_all, +# img_points_origin, +# img_points_reference, +# camera_matrix_origin, +# dist_coeffs_origin, +# camera_matrix_reference, +# dist_coeffs_reference, +# img_size, +# criteria=( +# cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, +# 100, +# 1e-6, +# ), +# flags=cv2.CALIB_FIX_INTRINSIC | cv2.CALIB_USE_LU, +# ) +# logger.info("Stereo calibration completed.") +# result_dict = { +# "dist_coeffs_origin": dist_coeffs_origin, +# "camera_matrix_origin": camera_matrix_origin, +# "image_dim_origin": np.array(origin_images[0].shape[:2]), +# "dist_coeffs_reference": dist_coeffs_reference, +# "camera_matrix_reference": camera_matrix_reference, +# "image_dim_reference": np.array(reference_images[0].shape[:2]), +# "R": R, +# "T": T, +# } + +# if not no_poses: +# logger.info("Attempting hand-eye calibation....") +# # filtered_poses = np.array([np.linalg.inv(pose) for pose in filtered_poses]) +# filtered_poses = np.array(filtered_poses) +# # Use the filtered poses for the target-to-camera transformation +# R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) +# t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) + +# R_handeye, T_handeye = cv2.calibrateHandEye( +# R_gripper2base=R_gripper2base, +# t_gripper2base=t_gripper2base, +# R_target2cam=rmats_origin, +# t_target2cam=tvecs_origin, +# method=cv2.CALIB_HAND_EYE_DANIILIDIS, +# ) +# robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix +# robot_to_cam[:3, :3] = R_handeye # Populate rotation +# robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation + +# # Invert the homogeneous matrix +# cam_to_robot = np.linalg.inv(robot_to_cam) + +# # Extract rotation and translation from the inverted matrix +# camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation +# camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation +# logger.info("Hand-eye calibration completed.") + +# # Add the hand-eye calibration results to the final result dictionary +# result_dict["R_handeye"] = camera_to_robot_R +# result_dict["T_handeye"] = camera_to_robot_T +# return result_dict +# else: +# raise ValueError("Not enough valid points for stereo calibration.") +# else: +# raise ValueError("Not enough shared points for stereo calibration.") + + def stereo_calibration_charuco( - origin_images: List[np.ndarray], - reference_images: List[np.ndarray], + parent_images: Dict[str, np.ndarray], + child_images: Dict[str, np.ndarray], charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, - camera_matrix_origin: Optional[np.ndarray] = None, - dist_coeffs_origin: Optional[np.ndarray] = None, - camera_matrix_reference: Optional[np.ndarray] = None, - dist_coeffs_reference: Optional[np.ndarray] = None, + camera_matrix_parent: Optional[np.ndarray] = None, + dist_coeffs_parent: Optional[np.ndarray] = None, + camera_matrix_child: Optional[np.ndarray] = None, + dist_coeffs_child: Optional[np.ndarray] = None, poses: Union[np.ndarray, None] = None, -) -> Dict: +) -> CalibrationResults: """ Perform a stereo calibration from a set of synchronized stereo images of a charuco calibration board. Args: - origin_images (List[np.ndarray]): A list of images synced to reference images from camera 0 - reference_images (List[np.ndarray]): A list of images synced to origin images from camera 1 + parent_images (List[np.ndarray]): A list of images synced to reference images from camera 0 + child_images (List[np.ndarray]): A list of images synced to origin images from camera 1 charuco_board (cv2.aruco_CharucoBoard, optional): What charuco board to use for the cal. Defaults to SPOT_DEFAULT_CHARUCO. aruco_dict (cv2.aruco_Dictionary, optional): What aruco board to use for the cal. Defaults to SPOT_DEFAULT_ARUCO_DICT. - camera_matrix_origin (Optional[np.ndarray], optional): What camera + camera_matrix_parent (Optional[np.ndarray], optional): What camera matrix to assign to camera 0. If none, is computed. Defaults to None. - dist_coeffs_origin (Optional[np.ndarray], optional): What distortion coefficients + dist_coeffs_parent (Optional[np.ndarray], optional): What distortion coefficients to assign to camera 0. If None, is computed. Defaults to None. - camera_matrix_reference (Optional[np.ndarray], optional): What camera + projection_matrix_origin (Optional[np.ndarray], optional): What projection + matrix to assign to camera 0. If None, is computed. Defaults to None. + camera_matrix_child (Optional[np.ndarray], optional): What camera matrix to assign to camera 1. If none, is computed. . Defaults to None. - dist_coeffs_reference (Optional[np.ndarray], optional): What distortion coefficients + dist_coeffs_child (Optional[np.ndarray], optional): What distortion coefficients to assign to camera 1. If None, is computed. Defaults to None. poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. @@ -535,152 +727,210 @@ def stereo_calibration_charuco( Returns: Dict: _description_ """ - if camera_matrix_origin is None or dist_coeffs_origin is None: - logger.info("Calibrating Origin Camera individually") - (camera_matrix_origin, dist_coeffs_origin, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( - images=origin_images, - charuco_board=charuco_board, - aruco_dict=aruco_dict, - debug_str="for origin camera", - ) - if camera_matrix_reference is None or dist_coeffs_origin is None: - logger.info("Calibrating reference Camera individually ") - (camera_matrix_reference, dist_coeffs_reference, rmats_reference, tvecs_reference) = ( - calibrate_single_camera_charuco( - images=reference_images, - charuco_board=charuco_board, - aruco_dict=aruco_dict, - debug_str="for reference camera", - ) - ) - - if camera_matrix_origin is None or camera_matrix_reference is None: - raise ValueError("Could not calibrate one of the cameras as desired") - - all_corners_origin = [] - all_corners_reference = [] - all_ids_origin = [] - all_ids_reference = [] - img_size = None - - no_poses = poses is None - if no_poses: - poses = [x for x in range(len(origin_images))] + camera_child: np.ndarray = np.eye(3) + coeffs_child: np.ndarray = np.zeros((5,)) + camera_parent: np.ndarray = np.eye(3) + coeffs_parent: np.ndarray = np.zeros((5,)) + + # Pre-filter parent images to frames with enough corners, and simultaneously + # filter poses so they remain aligned with the rmats/tvecs returned by + # calibrate_single_camera_charuco (which silently drops frames with < 8 corners). + valid_parent_imgs: Dict[str, np.ndarray] = {} + poses_aligned_with_parent: Optional[List] = [] if poses is not None else None + for i, (fname, img) in enumerate(parent_images.items()): + corners, _ = detect_charuco_corners(img, charuco_board, aruco_dict) + if corners is not None and len(corners) >= 8: + valid_parent_imgs[fname] = img + if poses is not None: + poses_aligned_with_parent.append(poses[i]) + + # first do single camera cal for parent. rotation and tvecs are used, and camera matrix, dist coeffs + logger.info("Calibrating Parent Camera individually") + (camera_matrix_parent_new, dist_coeffs_parent_new, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( + images=valid_parent_imgs.values(), + charuco_board=charuco_board, + aruco_dict=aruco_dict, + debug_str="for parent camera", + ) + + if camera_matrix_parent_new is None: + raise ValueError("Could not obtain cam matrix parent camera to charuco board, needed for pose") + if dist_coeffs_parent_new is None: + raise ValueError("Could not obtain dist coeffs of parent camera to charuco board, needed for pose") + + # if None passed in, use these newly computed values + if camera_matrix_parent is None: + camera_parent = camera_matrix_parent_new else: - filtered_poses = [] - - no_poses = poses is None - if no_poses: # fill up poses with dummy values so that you can iterate over poses - # with images zip(origin_images, reference_images, poses) together regardless of if poses - # are actually supplied (for the sake of brevity) - poses = [x for x in range(len(origin_images))] + camera_parent = camera_matrix_parent + if dist_coeffs_parent is None: + coeffs_parent = dist_coeffs_parent_new else: - filtered_poses = [] - - for origin_img, reference_img, pose in zip(origin_images, reference_images, poses): - if img_size is None: - img_size = origin_img.shape[:2][::-1] + coeffs_parent = dist_coeffs_parent + + # next do single camera cal for child. rotation and tvecs are used, and camera matrix, dist coeffs + start_time = time.perf_counter() + logger.info("Calibrating Child Camera individually") + (camera_matrix_child_new, dist_coeffs_child_new, _, _) = calibrate_single_camera_charuco( + images=child_images.values(), + charuco_board=charuco_board, + aruco_dict=aruco_dict, + debug_str="for child camera", + ) + elapsed_time = time.perf_counter() - start_time + logger.info(f"Finished calibration child camera in {elapsed_time:.4f} seconds") + + if camera_matrix_child_new is None: + raise ValueError("Could not obtain cam matrix child camera to charuco board, needed for pose") + if dist_coeffs_child_new is None: + raise ValueError("Could not obtain dist coeffs of child camera to charuco board, needed for pose") + + logger.info("Finished individual camera calibrations, starting stereo calibration") + + # if None passed in, use these newly computed values + if camera_matrix_child is None: + camera_child = camera_matrix_child_new + else: + camera_child = camera_matrix_child + if dist_coeffs_child is None: + coeffs_child = dist_coeffs_child_new + else: + coeffs_child = dist_coeffs_child + + all_corners_parent = [] + all_corners_child = [] + all_ids_parent = [] + all_ids_child = [] + parent_img_size = None + child_img_size = None + + # let gets corresponding images + parent_keys = set(parent_images.keys()) + child_keys = set(child_images.keys()) + common_keys = parent_keys.intersection(child_keys) + + logger.info("Collecting shared points for stereo calibration") + if len(common_keys) == 0: + raise ValueError("No common images between parent and child cameras for stereo calibration") + + for fname in list(common_keys): + origin_img = parent_images[fname] + reference_img = child_images[fname] + if parent_img_size is None: + parent_img_size = origin_img.shape[:2][::-1] + if child_img_size is None: + child_img_size = reference_img.shape[:2][::-1] origin_charuco_corners, origin_charuco_ids = detect_charuco_corners(origin_img, charuco_board, aruco_dict) reference_charuco_corners, reference_charuco_ids = detect_charuco_corners( reference_img, charuco_board, aruco_dict ) - if not no_poses: - filtered_poses.append(pose) if origin_charuco_corners is not None and reference_charuco_corners is not None: - all_corners_origin.append(origin_charuco_corners) - all_corners_reference.append(reference_charuco_corners) - all_ids_origin.append(origin_charuco_ids) - all_ids_reference.append(reference_charuco_ids) + all_corners_parent.append(origin_charuco_corners) + all_corners_child.append(reference_charuco_corners) + all_ids_parent.append(origin_charuco_ids) + all_ids_child.append(reference_charuco_ids) - if len(all_corners_origin) > 0: - obj_points_all = [] - img_points_origin = [] - img_points_reference = [] - for ( - origin_corners, - reference_corners, - origin_ids, - reference_ids, - ) in zip( - all_corners_origin, - all_corners_reference, - all_ids_origin, - all_ids_reference, - ): - common_ids = np.intersect1d(origin_ids, reference_ids) - if len(common_ids) >= 6: # Ensure there are at least 6 points - obj_points = get_charuco_board_object_points(charuco_board, common_ids) - obj_points_all.append(obj_points) - img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) - img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) - - if len(obj_points_all) > 0: - logger.info(f"Collected {len(obj_points_all)} shared point sets for stereo calibration.") - _, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( - obj_points_all, - img_points_origin, - img_points_reference, - camera_matrix_origin, - dist_coeffs_origin, - camera_matrix_reference, - dist_coeffs_reference, - img_size, - criteria=( - cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, - 100, - 1e-6, - ), - flags=cv2.CALIB_USE_LU, - ) - logger.info("Stereo calibration completed.") - result_dict = { - "dist_coeffs_origin": dist_coeffs_origin, - "camera_matrix_origin": camera_matrix_origin, - "image_dim_origin": np.array(origin_images[0].shape[:2]), - "dist_coeffs_reference": dist_coeffs_reference, - "camera_matrix_reference": camera_matrix_reference, - "image_dim_reference": np.array(reference_images[0].shape[:2]), - "R": R, - "T": T, - } - - if not no_poses: - logger.info("Attempting hand-eye calibation....") - # filtered_poses = np.array([np.linalg.inv(pose) for pose in filtered_poses]) - filtered_poses = np.array(filtered_poses) - # Use the filtered poses for the target-to-camera transformation - R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) - t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) - - R_handeye, T_handeye = cv2.calibrateHandEye( - R_gripper2base=R_gripper2base, - t_gripper2base=t_gripper2base, - R_target2cam=rmats_origin, - t_target2cam=tvecs_origin, - method=cv2.CALIB_HAND_EYE_DANIILIDIS, - ) - robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix - robot_to_cam[:3, :3] = R_handeye # Populate rotation - robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation - - # Invert the homogeneous matrix - cam_to_robot = np.linalg.inv(robot_to_cam) - - # Extract rotation and translation from the inverted matrix - camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation - camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation - logger.info("Hand-eye calibration completed.") - - # Add the hand-eye calibration results to the final result dictionary - result_dict["R_handeye"] = camera_to_robot_R - result_dict["T_handeye"] = camera_to_robot_T - return result_dict - else: - raise ValueError("Not enough valid points for stereo calibration.") - else: + if len(all_corners_parent) == 0: raise ValueError("Not enough shared points for stereo calibration.") + logger.info("about to find common ids...") + obj_points_all = [] + img_points_origin = [] + img_points_reference = [] + for ( + origin_corners, + reference_corners, + origin_ids, + reference_ids, + ) in zip( + all_corners_parent, + all_corners_child, + all_ids_parent, + all_ids_child, + strict=True, + ): + common_ids = np.intersect1d(origin_ids, reference_ids) + if len(common_ids) >= 6: # Ensure there are at least 6 points + obj_points = get_charuco_board_object_points(charuco_board, common_ids) + obj_points_all.append(obj_points) + img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) + img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) + + # sanity check + if len(obj_points_all) == 0: + raise ValueError("Not enough valid points for stereo calibration.") + + logger.info( + f"Collected {len(obj_points_all)} shared point sets for stereo calibration. Starting stereo calibration..." + ) + start_time = time.perf_counter() + err, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( + obj_points_all, + img_points_origin, + img_points_reference, + camera_parent, + coeffs_parent, + camera_child, + coeffs_child, + parent_img_size, + criteria=( + cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, + 100, + 1e-6, + ), + flags=cv2.CALIB_USE_LU, + ) + elapsed_time = time.perf_counter() - start_time + logger.info(f"Stereo calibration completed in {elapsed_time:.4f} seconds.") + # unfortunately, have to use origin/reference terminology to use existing code structure downstream + + # now we will estimate these + camera_to_robot_R = np.eye(3) # Extract rotation + camera_to_robot_T = np.eye(3) # Extract translation + + if poses is not None: + logger.info("Attempting hand-eye calibation....") + # poses_aligned_with_parent was pre-filtered above to match rmats_origin/tvecs_origin + filtered_poses = np.array(poses_aligned_with_parent) + # Use the filtered poses for the target-to-camera transformation + R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) + t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) + + R_handeye, T_handeye = cv2.calibrateHandEye( + R_gripper2base=R_gripper2base, + t_gripper2base=t_gripper2base, + R_target2cam=rmats_origin, + t_target2cam=tvecs_origin, + method=cv2.CALIB_HAND_EYE_DANIILIDIS, + ) + robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix + robot_to_cam[:3, :3] = R_handeye # Populate rotation + robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation + + # Invert the homogeneous matrix + cam_to_robot = np.linalg.inv(robot_to_cam) + + # Extract rotation and translation from the inverted matrix + camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation + camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation + logger.info("Hand-eye calibration completed.") + + # save off our work + calibration_results: CalibrationResults = { + "camera_matrix_reference": camera_child, + "dist_coeffs_reference": coeffs_child, + "dist_coeffs_origin": coeffs_parent, + "camera_matrix_origin": camera_parent, + "image_dim_origin": np.array(parent_img_size), + "image_dim_reference": np.array(child_img_size), + "R": R, + "T": T, + "R_handeye": camera_to_robot_R, + "T_handeye": camera_to_robot_T, + "average_reprojection_error": float(np.linalg.norm(err)), + } + return calibration_results def est_camera_t_charuco_board_center( @@ -876,6 +1126,7 @@ def euler_to_rotation_matrix(roll: float, pitch: float, yaw: float) -> np.ndarra logger.info(f"Calculated {len(poses)} relative viewpoints to visit relative to the target.") return poses + def create_ideal_charuco_image(charuco_board: cv2.aruco_CharucoBoard, dim=(500, 700), colorful=False): if OPENCV_VERSION < OPENCV_CHARUCO_LIBRARY_CHANGE_VERSION: img = charuco_board.draw(outSize=dim) @@ -946,4 +1197,4 @@ def visualize_pose_with_axis( camera_matrix = np.eye(3, dtype=np.float32) dist_coeffs = np.zeros(5, dtype=np.float32) rmat, tvec = est_camera_t_charuco_board_center(img, charuco_board, aruco_dict, camera_matrix, dist_coeffs) - return visualize_pose_with_axis(img, rmat, tvec, camera_matrix, dist_coeffs) \ No newline at end of file + return visualize_pose_with_axis(img, rmat, tvec, camera_matrix, dist_coeffs) diff --git a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py index b1200e3e..c133096d 100644 --- a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py @@ -1,6 +1,4 @@ -# Copyright (c) 2025 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# Copy reference (c) 2024 Boston Dynamics AI Institute LLC. All references reserved. +# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. import logging from time import sleep @@ -44,10 +42,6 @@ from spot_wrapper.calibration.automatic_camera_calibration_robot import ( AutomaticCameraCalibrationRobot, ) -from spot_wrapper.calibration.calibration_util import ( - camera_info_to_intrinsics, - ros_image_to_image, -) from spot_wrapper.calibration.charuco_board_detection import ( convert_camera_t_viewpoint_to_origin_t_planning_frame, est_camera_t_charuco_board_center, @@ -234,31 +228,21 @@ def capture_images( ) -> Union[List, np.ndarray]: if encodings is None: encodings = [cv2.IMREAD_COLOR, cv2.IMREAD_GRAYSCALE] + images = [] + image_responses = self.image_client.get_image(self.image_requests) + + if image_responses: + if len(encodings) != len(image_responses): + raise ValueError("Need to specify an encoding for each image request") + for response, encoding in zip(image_responses, encodings): + image_data = response.shot.image.data + image_data = np.frombuffer(image_data, np.uint8) + image_data = cv2.imdecode(image_data, encoding) + images.append(image_data) + else: + raise ValueError(f"Could not obtain desired images {self.image_requests}") - self.clear_latest_image() - images: List[np.ndarray] = [] - image_data = None - logger.info("Waiting to acquire images from cameras...") - while image_data is None: - image_data = self.get_latest_image() - - spot_img = ( - ros_image_to_image(image_data.spot_cam_msg, ros_encoding="bgr8").to_numpy().astype(np.uint8) - ) # type: ignore - self.estimated_camera_matrix = camera_info_to_intrinsics(image_data.spot_cam_info) - self.hand_cam_info = image_data.spot_cam_info - ext_cam_img = ros_image_to_image(image_data.external_cam_msg, ros_encoding="bgr8").to_numpy().astype(np.uint8) - self.ext_cam_info = image_data.external_cam_info - - if (ext_cam_img.shape[0] > spot_img.shape[0]) or (ext_cam_img.shape[1] > spot_img.shape[1]): - logger.info("had to resize external camera image.") - target_height, target_width, _ = spot_img.shape - ext_cam_img = cv2.resize(ext_cam_img, (target_width, target_height), interpolation=cv2.INTER_AREA) - - images.append(spot_img) - images.append(ext_cam_img) - - return np.array(images, dtype=np.uint8) + return np.array(images, dtype=object) def localize_target_to_principal_camera(self, images: Union[List, np.ndarray]) -> Tuple[np.ndarray, np.ndarray]: try: From 0d4eac1db35ca24789daafe3e588ec84cf0e49a5 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Tue, 3 Mar 2026 16:50:03 -0500 Subject: [PATCH 06/12] calibration results after taking pics look good --- spot_wrapper/calibration/calibration_clis.py | 7 -- .../calibration/calibration_helpers.py | 2 +- .../calibration/charuco_board_detection.py | 104 ++++++++++++++---- 3 files changed, 84 insertions(+), 29 deletions(-) diff --git a/spot_wrapper/calibration/calibration_clis.py b/spot_wrapper/calibration/calibration_clis.py index ec88cd82..9decd5e1 100644 --- a/spot_wrapper/calibration/calibration_clis.py +++ b/spot_wrapper/calibration/calibration_clis.py @@ -237,13 +237,6 @@ def calibrator_cli() -> argparse.ArgumentParser: ), ) - parser.add_argument( - "--use_kabsch", - action="store_true", - default=False, - help="Whether to use the Kabsch algorithm for rotation estimation.", - ) - return parser diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 515f6829..837678a4 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -5,8 +5,8 @@ # import dataclasses import logging from dataclasses import dataclass +from typing import Optional, TypedDict -# from typing import NamedTuple, Optional, Type, TypedDict import numpy as np import numpy.typing as npt import torch diff --git a/spot_wrapper/calibration/charuco_board_detection.py b/spot_wrapper/calibration/charuco_board_detection.py index 714f59e8..a4276a3d 100644 --- a/spot_wrapper/calibration/charuco_board_detection.py +++ b/spot_wrapper/calibration/charuco_board_detection.py @@ -850,12 +850,24 @@ def stereo_calibration_charuco( all_ids_child, strict=True, ): - common_ids = np.intersect1d(origin_ids, reference_ids) + common_ids = np.intersect1d(origin_ids, reference_ids) # sorted flat array if len(common_ids) >= 6: # Ensure there are at least 6 points obj_points = get_charuco_board_object_points(charuco_board, common_ids) + # Build id->corner-index maps so we can look up corners in the same + # sorted order as common_ids / obj_points. Using np.isin preserves + # the original detection order, which may differ from intersect1d's + # sorted order, causing a 2D-3D correspondence mismatch → NaN R/T. + origin_id_to_idx = {int(np.ravel(id_)[0]): i for i, id_ in enumerate(origin_ids)} + reference_id_to_idx = {int(np.ravel(id_)[0]): i for i, id_ in enumerate(reference_ids)} + selected_origin = np.array( + [origin_corners[origin_id_to_idx[int(cid)]] for cid in common_ids], dtype=np.float32 + ) + selected_reference = np.array( + [reference_corners[reference_id_to_idx[int(cid)]] for cid in common_ids], dtype=np.float32 + ) obj_points_all.append(obj_points) - img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) - img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) + img_points_origin.append(selected_origin) + img_points_reference.append(selected_reference) # sanity check if len(obj_points_all) == 0: @@ -864,26 +876,76 @@ def stereo_calibration_charuco( logger.info( f"Collected {len(obj_points_all)} shared point sets for stereo calibration. Starting stereo calibration..." ) + # Compute the stereo extrinsic (parent-to-child) via per-view PnP rather than + # cv2.stereoCalibrate, which is prone to NaN when the two cameras have very + # different resolutions or when the IR/depth images produce noisy detections + # that destabilise the Essential-matrix initialisation used internally by + # stereoCalibrate. Per-view PnP + SVD averaging is more robust. start_time = time.perf_counter() - err, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( - obj_points_all, - img_points_origin, - img_points_reference, - camera_parent, - coeffs_parent, - camera_child, - coeffs_child, - parent_img_size, - criteria=( - cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, - 100, - 1e-6, - ), - flags=cv2.CALIB_USE_LU, - ) + R_rel_list: List[np.ndarray] = [] + T_rel_list: List[np.ndarray] = [] + reproj_errors: List[float] = [] + for op, ip_parent, ip_child in zip(obj_points_all, img_points_origin, img_points_reference): + ok_p, rvec_p, tvec_p = cv2.solvePnP(op, ip_parent, camera_parent, coeffs_parent, flags=cv2.SOLVEPNP_ITERATIVE) + ok_c, rvec_c, tvec_c = cv2.solvePnP(op, ip_child, camera_child, coeffs_child, flags=cv2.SOLVEPNP_ITERATIVE) + if not ok_p or not ok_c: + logger.warning("solvePnP failed for a view; skipping.") + continue + R_p, _ = cv2.Rodrigues(rvec_p) + R_c, _ = cv2.Rodrigues(rvec_c) + # child_R_parent, child_T_parent (X_child = R @ X_parent + T) + R_i = R_c @ R_p.T + T_i = tvec_c.flatten() - R_i @ tvec_p.flatten() + R_rel_list.append(R_i) + T_rel_list.append(T_i) + # per-view reprojection error in parent camera for a rough quality metric + proj, _ = cv2.projectPoints(op, rvec_p, tvec_p, camera_parent, coeffs_parent) + reproj_errors.append(float(np.mean(np.linalg.norm(proj.reshape(-1, 2) - ip_parent.reshape(-1, 2), axis=1)))) + + if len(R_rel_list) < 3: + raise ValueError( + f"Only {len(R_rel_list)} views produced valid PnP solutions; need at least 3 for stereo extrinsic." + ) + + T_arr = np.array(T_rel_list) # (N, 3) + R_arr = np.array(R_rel_list) # (N, 3, 3) + + # --- outlier rejection on T --- + # Each T_i = t_child - R_i @ t_parent. Individual tvec values can be O(1 m), + # so a few noisy IR-camera PnP solutions cause catastrophic cancellation and + # produce wildly large T_i. Use the per-component MAD to find and discard them. + T_median = np.median(T_arr, axis=0) + abs_dev = np.abs(T_arr - T_median) # (N, 3) + mad = np.median(abs_dev, axis=0) # (3,) + mad = np.where(mad < 1e-9, 1e-9, mad) # avoid division by zero + # A view is an inlier only if every component is within 5 MADs of the median + inlier_mask = np.all(abs_dev / mad < 5.0, axis=1) + n_inliers = int(inlier_mask.sum()) + if n_inliers < 3: + logger.warning(f"Outlier rejection left only {n_inliers} T inliers; using all {len(T_arr)} views.") + inlier_mask = np.ones(len(T_arr), dtype=bool) + + T_inliers = T_arr[inlier_mask] + R_inliers = R_arr[inlier_mask] + reproj_inliers = [e for e, ok in zip(reproj_errors, inlier_mask) if ok] + logger.info(f"Stereo extrinsic: {n_inliers}/{len(T_arr)} views kept after outlier rejection.") + + # Robust translation: median of inliers + T = np.median(T_inliers, axis=0).reshape(3, 1) + + # Average rotation matrices over inliers and project back to SO(3) via SVD + R_mean = np.mean(R_inliers, axis=0) + U, _, Vt = np.linalg.svd(R_mean) + R = U @ Vt + if np.linalg.det(R) < 0: # fix improper rotation (reflection) + R = U @ np.diag([1.0, 1.0, -1.0]) @ Vt + + err = float(np.mean(reproj_inliers)) # mean per-view reprojection error (parent camera) elapsed_time = time.perf_counter() - start_time - logger.info(f"Stereo calibration completed in {elapsed_time:.4f} seconds.") - # unfortunately, have to use origin/reference terminology to use existing code structure downstream + logger.info( + f"Stereo extrinsic estimated from {n_inliers} inlier views in {elapsed_time:.4f} s, " + f"mean parent reproj error: {err:.3f} px." + ) # now we will estimate these camera_to_robot_R = np.eye(3) # Extract rotation From aac82b641087bb99867df5f3d04a14758dc5ccfe Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Tue, 3 Mar 2026 17:17:17 -0500 Subject: [PATCH 07/12] doing calibration math with pre-saved images works --- .../calibration/calibrate_spot_hand.py | 29 ++- spot_wrapper/calibration/calibration_clis.py | 15 +- spot_wrapper/calibration/calibration_util.py | 230 +++++------------- .../calibration/charuco_board_detection.py | 189 -------------- 4 files changed, 90 insertions(+), 373 deletions(-) diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index c8229858..c55ecd20 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -30,9 +30,19 @@ logger = logging.getLogger(__name__) +def _require_robot_credentials(args: argparse.Namespace) -> None: + """Raise a clear error when robot credentials are needed but were not supplied.""" + missing = [ + flag for flag, val in (("--ip", args.ip), ("--user", args.username), ("--pass", args.password)) if not val + ] + if missing: + raise ValueError(f"Connecting to the robot requires {', '.join(missing)} to be specified.") + + def create_robot( args: argparse.ArgumentParser, charuco: cv2.aruco_CharucoBoard, aruco_dict: cv2.aruco_Dictionary ) -> Tuple[AutomaticCameraCalibrationRobot, argparse.Namespace]: + _require_robot_credentials(args) # Replace with your AutomaticCameraCalibrationRobot in_hand_bot = SpotInHandCalibration(args.ip, args.username, args.password) in_hand_bot._set_localization_param( @@ -59,10 +69,11 @@ def create_robot_parser() -> argparse.ArgumentParser: def spot_main() -> None: parser = create_robot_parser() args, aruco_dict, charuco = setup_calibration_param(parser) - in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) # Collect new data and calibrate if not args.from_data: + in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) + logger.warning("This script moves the robot around. !!! USE AT YOUR OWN RISK !!!") logger.warning("HOLD Ctrl + C NOW TO CANCEL") logger.warning("The calibration board should be about a meter away with nothing within a meter of the robot.") @@ -85,15 +96,6 @@ def spot_main() -> None: save_data=args.save_data, ) - # calibration = calibrate_2_cameras( - # images=images, - # args=args, - # charuco=charuco, - # aruco_dict=aruco_dict, - # camera_matrix_dict=in_hand_bot.camera_matrix_dict, - # camera_distortion_dict=in_hand_bot.camera_distortion_dict, - # ) - # calibration, num_images, parent_frame, child_frame = run_calibration_process(args) calibration = calibration_helper( images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path ) @@ -101,9 +103,11 @@ def spot_main() -> None: logger.info("Saving calibration to robot...") in_hand_bot.write_calibration_to_robot(calibration) in_hand_bot.shutdown() + # Send previously computed and saved calibration data to the robot elif args.from_yaml: try: + in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) with open(args.data_path, "r") as file: calibration = yaml.safe_load(file) logger.info(f"Loaded calibration data:\n{calibration}") @@ -112,17 +116,20 @@ def spot_main() -> None: in_hand_bot.write_calibration_to_robot(calibration) except Exception as e: raise ValueError(f"Failed to load calibration from {args.data_path}: {e}\nIs it a calibration yaml file?") + in_hand_bot.shutdown() # Load previously collected data and compute calibration else: logger.info(f"Loading images from {args.data_path}") images, poses = load_dataset_from_path(args.data_path) - # calibration, num_images, parent_frame, child_frame = run_calibration_process(args) calibration = calibration_helper( images=images, args=args, charuco=charuco, aruco_dict=aruco_dict, poses=poses, result_path=args.result_path ) if args.save_to_robot: + logger.info("Connecting to robot...") + in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) logger.info("Saving calibration to robot...") in_hand_bot.write_calibration_to_robot(calibration) + in_hand_bot.shutdown() logger.info("Calibration complete!") diff --git a/spot_wrapper/calibration/calibration_clis.py b/spot_wrapper/calibration/calibration_clis.py index 9decd5e1..cb8c4980 100644 --- a/spot_wrapper/calibration/calibration_clis.py +++ b/spot_wrapper/calibration/calibration_clis.py @@ -371,8 +371,9 @@ def spot_cli(parser: argparse.ArgumentParser | None = None) -> argparse.Argument "-ip", dest="ip", type=str, - help="The IP address of the Robot to calibrate", - required=True, + help="The IP address of the Robot to calibrate (required when connecting to the robot)", + default=None, + required=False, ) parser.add_argument( "--user", @@ -380,8 +381,9 @@ def spot_cli(parser: argparse.ArgumentParser | None = None) -> argparse.Argument "--username", dest="username", type=str, - help="Robot Username", - required=True, + help="Robot Username (required when connecting to the robot)", + default=None, + required=False, ) parser.add_argument( "--pass", @@ -389,8 +391,9 @@ def spot_cli(parser: argparse.ArgumentParser | None = None) -> argparse.Argument "--password", dest="password", type=str, - help="Robot Password", - required=True, + help="Robot Password (required when connecting to the robot)", + default=None, + required=False, ) parser.add_argument( diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index cf91862b..8f3358ec 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -14,10 +14,6 @@ import numpy as np import yaml from cv_bridge import CvBridge - -# from message_filters import ApproximateTimeSynchronizer, Subscriber -# from rclpy.callback_groups import CallbackGroup -# from rclpy.node import Node from sensor_msgs.msg import CameraInfo from sensor_msgs.msg import Image as RosImage @@ -26,11 +22,8 @@ ) from spot_wrapper.calibration.calibration_helpers import ( CalibrationResults, - CameraIntrinsics, Image, ) - -# TopicMsgPair, from spot_wrapper.calibration.charuco_board_detection import ( charuco_pose_sanity_check, create_charuco_board, @@ -67,15 +60,6 @@ def camera_info_to_dict(camera_info: CameraInfo, camera_name: str) -> dict[str, } -# TODO -def save_CameraInfo_2_file(msg: CameraInfo, camera_name: str, file_path: Path) -> None: - """Saves a CameraInfo message to a YAML file.""" - cam_info_msg_dict = camera_info_to_dict(camera_info=msg, camera_name=camera_name) - - with open(file_path, "w") as f: - yaml.dump(cam_info_msg_dict, f, default_flow_style=False) - - # TODO def load_CameraInfo_from_file(file_path: Path) -> CameraInfo: """Loads a CameraInfo message from a YAML file.""" @@ -210,23 +194,76 @@ def load_calibration_parameters(input_path: Path) -> CalibrationResults: # TODO -def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo]: +# def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo]: +# """ +# load the data for images, hand_cam_info, ext_cam_info + +# Args: +# pathdir (Path): The absolute pathname to directory containing the dataset. + +# Returns: +# Tuple[np.ndarray, CameraInfo, CameraInfo]: The loaded images, hand camera info, and external camera info. +# """ +# images = load_images_from_path(pathdir) +# hciyaml = os.path.join(pathdir, Path("parent"), Path("camera_info.yaml")) +# eciyaml = os.path.join(pathdir, Path("child"), Path("camera_info.yaml")) +# hand_cam_info = load_CameraInfo_from_file(Path(hciyaml)) +# ext_cam_info = load_CameraInfo_from_file(Path(eciyaml)) + +# return images, hand_cam_info, ext_cam_info + + +def load_dataset_from_path(path: str) -> Tuple[np.ndarray, Optional[np.ndarray]]: """ - load the data for images, hand_cam_info, ext_cam_info + Load image dataset from path in a way that's compatible with multistereo_calibration_charuco. + + Also, load the poses if they are available. + + See Using the CLI Tool To Calibrate On an Existing Dataset section in the README + to see the expected folder/data structure for this method to work Args: - pathdir (Path): The absolute pathname to directory containing the dataset. + path (str): The parent path + + Raises: + ValueError: Not possible to load the images Returns: - Tuple[np.ndarray, CameraInfo, CameraInfo]: The loaded images, hand camera info, and external camera info. + np.ndarray: The image dataset """ - images = load_images_from_path(pathdir) - hciyaml = os.path.join(pathdir, Path("parent"), Path("camera_info.yaml")) - eciyaml = os.path.join(pathdir, Path("child"), Path("camera_info.yaml")) - hand_cam_info = load_CameraInfo_from_file(Path(hciyaml)) - ext_cam_info = load_CameraInfo_from_file(Path(eciyaml)) - return images, hand_cam_info, ext_cam_info + def alpha_numeric(x): + return re.search("(\\d+)(?=\\D*$)", x).group() if re.search("(\\d+)(?=\\D*$)", x) else x + + # List all directories within the given path and sort them + dirs = [d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))] + if len(dirs) == 0: + raise ValueError("No sub-dirs found in datapath from which to load images.") + dirs = sorted(dirs, key=alpha_numeric) # Assuming dir names are integers like "0", "1", etc. + + # Initialize an empty list to store images + images = [] + poses = None + + for dir_name in dirs: + path_match = os.path.join(path, dir_name, "*") + files = sorted( + glob(path_match), + key=alpha_numeric, + ) + if dir_name != "poses": + # Read images and store them + images.append([cv2.imread(fn) for fn in files]) + else: + poses = np.array([np.load(fn) for fn in files]) + + # Convert the list of lists into a NumPy array + # The array shape will be (number_of_images, number_of_directories) + images = np.array(images, dtype=object) + # Transpose the array so that you can access it as images[:, axis] + images = np.transpose(images, (1, 0)) + + return images, poses def create_calibration_save_folders(path: Path) -> None: @@ -251,34 +288,6 @@ def create_calibration_save_folders(path: Path) -> None: logger.info("Done creating folders.") -# TODO -def save_dataset_to_dir( - path: Path, images_dict: dict[str, list[np.ndarray]], camera_info_dict: dict[str, CameraInfo] -) -> None: - """ - Save image dataset to path in a way that's compatible with multistereo_calibration_charuco. - - Also, save the camera infos. - - See Using the CLI Tool To Calibrate On an Existing Dataset section in the README - to see the expected folder/data structure for this method to work - - Args: - path (str): The parent path - images_dict (dict[int, list[np.ndarray]]): The image dataset by camera index - camera_info_dict (dict[int, CameraInfo]): The camera info by camera index - """ - - create_calibration_save_folders(path) - - for cam_idx, images in images_dict.items(): - cam_dir = path / Path(str(cam_idx)) - for img_idx, img in enumerate(images): - img_path = cam_dir / Path(f"{img_idx}.png") - cv2.imwrite(str(img_path), img) - save_CameraInfo_2_file(camera_info_dict[cam_idx], str(cam_idx), cam_dir / Path("camera_info.yaml")) - - def save_calibration_parameters( data: Dict, output_path: str, @@ -536,27 +545,6 @@ def get_multiple_perspective_camera_calibration_dataset( return (np.array(calibration_images, dtype=object), poses) -# TODO -def camera_info_to_intrinsics(msg: CameraInfo) -> CameraIntrinsics: - """Construct a `CameraIntrinsics` instance from a `CameraInfo` message. - - Args: - msg: The message to convert. The fields that are copied are the intrinsics (`k`), the distortion (`d`), width, - and height. All other fields are ignored. - - Returns: - The converted camera intrinsics object. - - Raises: - ValueError if `k` contains non-zero skew values. - """ - camera_matrix = np.array(msg.k).reshape((3, 3)) - distortion_coeffs = np.array(msg.d) - return CameraIntrinsics( - height=msg.height, width=msg.width, camera_matrix=camera_matrix, distortion_coeffs=distortion_coeffs - ) - - def ros_image_to_image(ros_image: RosImage, cv_bridge: CvBridge | None = None, ros_encoding: str = "rgb8") -> Image: """ Converts from ros image to our generic image datatype @@ -666,95 +654,3 @@ def setup_calibration_param( aruco_dict=aruco_dict, ) return args, aruco_dict, charuco - - -# def run_calibration_process(args: argparse.Namespace, in_hand_bot) -> tuple[CalibrationResults, int, str, str]: -# """ -# This runs the calibration process for either of the two calibration modes. -# Just returns the calibration result. - -# :param args: Description -# :type args: argparse.Namespace -# :return: Description -# :rtype: tuple[CalibrationResults, int, Namespace] - -# Raises: -# FileNotFoundError: Could not find body_t_spot_camera.npy -# ValueError: No data path supplied to load images from -# """ -# args, aruco_dict, charuco = setup_calibration_param(args) -# logger.info(f"Loading images from {args.data_path}") - -# if args.from_data and args.data_path is None: -# logger.warning("Could not load any images to calibrate from, supply --data_path") -# raise ValueError("No data path supplied to load images from") - -# if args.from_data: -# images, hand_cam_info, ext_cam_info = load_dataset_from_path(args.data_path) -# else: -# images, poses = get_multiple_perspective_camera_calibration_dataset( -# auto_cam_cal_robot=in_hand_bot, -# max_num_images=args.max_num_images, -# distances_z=np.arange(*args.dist_from_board_viewpoint_range), -# distances_x=np.arange(*args.dist_along_board_width), -# x_axis_rots=np.arange(*args.x_axis_rot_viewpoint_range), -# y_axis_rots=np.arange(*args.y_axis_rot_viewpoint_range), -# z_axis_rots=np.arange(*args.z_axis_rot_viewpoint_range), -# use_degrees=args.use_degrees, -# settle_time=args.settle_time, -# data_path=args.data_path, -# save_data=args.save_data, -# ) -# hand_camera_matrix = np.array(hand_cam_info.k).reshape((3, 3)) -# hand_camera_distortion_coefficients = np.array(hand_cam_info.d) -# ext_camera_matrix = np.array(ext_cam_info.k).reshape((3, 3)) -# ext_camera_distortion_coefficients = np.array(ext_cam_info.d) - -# camera_matrix_dict = {"parent": hand_camera_matrix, "child": ext_camera_matrix} -# camera_distortion_dict = { -# "parent": hand_camera_distortion_coefficients, -# "child": ext_camera_distortion_coefficients, -# } - -# try: -# calibration_result = calibrate_2_cameras( -# images=images, -# args=args, -# charuco=charuco, -# aruco_dict=aruco_dict, -# camera_matrix_dict=camera_matrix_dict, -# camera_distortion_dict=camera_distortion_dict, -# ) -# except Exception as ex: -# logger.error(f"Could not calibrate the two cameras: {ex}") -# raise ex - -# return ( -# calibration_result, -# len(images["parent"]) + len(images["child"]), -# hand_cam_info.header.frame_id, -# ext_cam_info.header.frame_id, -# ) - - -# def create_time_synchronizer( -# node: Node, -# topic_msg_type_pairs: Sequence[TopicMsgPair], -# callback: Callable[..., None], -# callback_group: Optional[CallbackGroup] = None, -# queue_size: int = 30, -# slop_sec: float = 0.3, -# ) -> ApproximateTimeSynchronizer: -# """Creates an `ApproximateTimeSynchronizer` for a list of topic names and msg types - -# See `$BDAI/projects/watch_understand_do/ws/src/wud_ros/wud_ros/look_at_that/lang_to_pcd_server.py` for an example -# Also see: https://github.com/ros2/message_filters/blob/humble/src/message_filters/__init__.py#L242 -# """ -# subscribers = [ -# Subscriber(node, msg_type, topic_name, qos_profile=qos_profile, callback_group=callback_group) -# for topic_name, msg_type, qos_profile in topic_msg_type_pairs -# ] -# time_synchronizer = ApproximateTimeSynchronizer(subscribers, queue_size, slop_sec) -# time_synchronizer.registerCallback(callback) - -# return time_synchronizer diff --git a/spot_wrapper/calibration/charuco_board_detection.py b/spot_wrapper/calibration/charuco_board_detection.py index a4276a3d..d2856e4f 100644 --- a/spot_wrapper/calibration/charuco_board_detection.py +++ b/spot_wrapper/calibration/charuco_board_detection.py @@ -495,195 +495,6 @@ def calibrate_single_camera_charuco( raise ValueError(f"Not enough valid points to individually calibrate {debug_str}") -# def stereo_calibration_charuco( -# origin_images: List[np.ndarray], -# reference_images: List[np.ndarray], -# charuco_board: cv2.aruco_CharucoBoard = SPOT_DEFAULT_CHARUCO, -# aruco_dict: cv2.aruco_Dictionary = SPOT_DEFAULT_ARUCO_DICT, -# camera_matrix_origin: Optional[np.ndarray] = None, -# dist_coeffs_origin: Optional[np.ndarray] = None, -# camera_matrix_reference: Optional[np.ndarray] = None, -# dist_coeffs_reference: Optional[np.ndarray] = None, -# poses: Union[np.ndarray, None] = None, -# ) -> Dict: -# """ -# Perform a stereo calibration from a set of synchronized stereo images of a charuco calibration -# board. - -# Args: -# origin_images (List[np.ndarray]): A list of images synced to reference images from camera 0 -# reference_images (List[np.ndarray]): A list of images synced to origin images from camera 1 -# charuco_board (cv2.aruco_CharucoBoard, optional): What charuco board to -# use for the cal. Defaults to SPOT_DEFAULT_CHARUCO. -# aruco_dict (cv2.aruco_Dictionary, optional): What aruco board to use for the cal. -# Defaults to SPOT_DEFAULT_ARUCO_DICT. -# camera_matrix_origin (Optional[np.ndarray], optional): What camera -# matrix to assign to camera 0. If none, is computed. Defaults to None. -# dist_coeffs_origin (Optional[np.ndarray], optional): What distortion coefficients -# to assign to camera 0. If None, is computed. Defaults to None. -# camera_matrix_reference (Optional[np.ndarray], optional): What camera -# matrix to assign to camera 1. If none, is computed. . Defaults to None. -# dist_coeffs_reference (Optional[np.ndarray], optional): What distortion coefficients -# to assign to camera 1. If None, is computed. Defaults to None. -# poses (Union[np.ndarray, None]): Either a list of 4x4 homogenous transforms from which -# pictures where taken, or None if unknown. Needs to be supplied for robot to camera cal. -# (planning frame to base frame), or None -# Raises: -# ValueError: Could not calibrate a camera individually -# ValueError: Not enough points to stereo calibrate -# ValueError: Could not stereo calibrate - -# Returns: -# Dict: _description_ -# """ -# if camera_matrix_origin is None or dist_coeffs_origin is None: -# logger.info("Calibrating Origin Camera individually") -# (camera_matrix_origin, dist_coeffs_origin, rmats_origin, tvecs_origin) = calibrate_single_camera_charuco( -# images=origin_images, -# charuco_board=charuco_board, -# aruco_dict=aruco_dict, -# debug_str="for origin camera", -# ) -# if camera_matrix_reference is None or dist_coeffs_origin is None: -# logger.info("Calibrating reference Camera individually ") -# (camera_matrix_reference, dist_coeffs_reference, rmats_reference, tvecs_reference) = ( -# calibrate_single_camera_charuco( -# images=reference_images, -# charuco_board=charuco_board, -# aruco_dict=aruco_dict, -# debug_str="for reference camera", -# ) -# ) - -# if camera_matrix_origin is None or camera_matrix_reference is None: -# raise ValueError("Could not calibrate one of the cameras as desired") - -# all_corners_origin = [] -# all_corners_reference = [] -# all_ids_origin = [] -# all_ids_reference = [] -# img_size = None - -# no_poses = poses is None -# if no_poses: -# poses = [x for x in range(len(origin_images))] -# else: -# filtered_poses = [] - -# no_poses = poses is None -# if no_poses: # fill up poses with dummy values so that you can iterate over poses -# # with images zip(origin_images, reference_images, poses) together regardless of if poses -# # are actually supplied (for the sake of brevity) -# poses = [x for x in range(len(origin_images))] -# else: -# filtered_poses = [] - -# for origin_img, reference_img, pose in zip(origin_images, reference_images, poses): -# if img_size is None: -# img_size = origin_img.shape[:2][::-1] - -# origin_charuco_corners, origin_charuco_ids = detect_charuco_corners(origin_img, charuco_board, aruco_dict) -# reference_charuco_corners, reference_charuco_ids = detect_charuco_corners( -# reference_img, charuco_board, aruco_dict -# ) - -# if not no_poses: -# filtered_poses.append(pose) -# if origin_charuco_corners is not None and reference_charuco_corners is not None: -# all_corners_origin.append(origin_charuco_corners) -# all_corners_reference.append(reference_charuco_corners) -# all_ids_origin.append(origin_charuco_ids) -# all_ids_reference.append(reference_charuco_ids) - -# if len(all_corners_origin) > 0: -# obj_points_all = [] -# img_points_origin = [] -# img_points_reference = [] -# for ( -# origin_corners, -# reference_corners, -# origin_ids, -# reference_ids, -# ) in zip( -# all_corners_origin, -# all_corners_reference, -# all_ids_origin, -# all_ids_reference, -# ): -# common_ids = np.intersect1d(origin_ids, reference_ids) -# if len(common_ids) >= 6: # Ensure there are at least 6 points -# obj_points = get_charuco_board_object_points(charuco_board, common_ids) -# obj_points_all.append(obj_points) -# img_points_origin.append(origin_corners[np.isin(origin_ids, common_ids)]) -# img_points_reference.append(reference_corners[np.isin(reference_ids, common_ids)]) - -# if len(obj_points_all) > 0: -# logger.info(f"Collected {len(obj_points_all)} shared point sets for stereo calibration.") -# _, _, _, _, _, R, T, _, _ = cv2.stereoCalibrate( -# obj_points_all, -# img_points_origin, -# img_points_reference, -# camera_matrix_origin, -# dist_coeffs_origin, -# camera_matrix_reference, -# dist_coeffs_reference, -# img_size, -# criteria=( -# cv2.TERM_CRITERIA_MAX_ITER + cv2.TERM_CRITERIA_EPS, -# 100, -# 1e-6, -# ), -# flags=cv2.CALIB_FIX_INTRINSIC | cv2.CALIB_USE_LU, -# ) -# logger.info("Stereo calibration completed.") -# result_dict = { -# "dist_coeffs_origin": dist_coeffs_origin, -# "camera_matrix_origin": camera_matrix_origin, -# "image_dim_origin": np.array(origin_images[0].shape[:2]), -# "dist_coeffs_reference": dist_coeffs_reference, -# "camera_matrix_reference": camera_matrix_reference, -# "image_dim_reference": np.array(reference_images[0].shape[:2]), -# "R": R, -# "T": T, -# } - -# if not no_poses: -# logger.info("Attempting hand-eye calibation....") -# # filtered_poses = np.array([np.linalg.inv(pose) for pose in filtered_poses]) -# filtered_poses = np.array(filtered_poses) -# # Use the filtered poses for the target-to-camera transformation -# R_gripper2base = np.array([pose[:3, :3] for pose in filtered_poses]) -# t_gripper2base = np.array([pose[:3, 3] for pose in filtered_poses]) - -# R_handeye, T_handeye = cv2.calibrateHandEye( -# R_gripper2base=R_gripper2base, -# t_gripper2base=t_gripper2base, -# R_target2cam=rmats_origin, -# t_target2cam=tvecs_origin, -# method=cv2.CALIB_HAND_EYE_DANIILIDIS, -# ) -# robot_to_cam = np.eye(4) # Initialize 4x4 identity matrix -# robot_to_cam[:3, :3] = R_handeye # Populate rotation -# robot_to_cam[:3, 3] = T_handeye.flatten() # Populate translation - -# # Invert the homogeneous matrix -# cam_to_robot = np.linalg.inv(robot_to_cam) - -# # Extract rotation and translation from the inverted matrix -# camera_to_robot_R = cam_to_robot[:3, :3] # Extract rotation -# camera_to_robot_T = cam_to_robot[:3, 3] # Extract translation -# logger.info("Hand-eye calibration completed.") - -# # Add the hand-eye calibration results to the final result dictionary -# result_dict["R_handeye"] = camera_to_robot_R -# result_dict["T_handeye"] = camera_to_robot_T -# return result_dict -# else: -# raise ValueError("Not enough valid points for stereo calibration.") -# else: -# raise ValueError("Not enough shared points for stereo calibration.") - - def stereo_calibration_charuco( parent_images: Dict[str, np.ndarray], child_images: Dict[str, np.ndarray], From db2144a044e8c54775dc17003b4d650df5cae334 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Wed, 4 Mar 2026 12:07:52 -0500 Subject: [PATCH 08/12] ithink this works but Lu disagrees :( --- .../calibration/calibrate_spot_hand.py | 4 +-- .../calibration/calibration_helpers.py | 19 ----------- spot_wrapper/calibration/calibration_util.py | 22 +------------ .../spot_in_hand_camera_calibration.py | 33 +++++++++++-------- 4 files changed, 22 insertions(+), 56 deletions(-) diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index c55ecd20..bf623052 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -66,7 +66,7 @@ def create_robot_parser() -> argparse.ArgumentParser: return spot_cli(parser=parser) # Replace with robot specific parsing -def spot_main() -> None: +def calibrate_spot_hand() -> None: parser = create_robot_parser() args, aruco_dict, charuco = setup_calibration_param(parser) @@ -135,4 +135,4 @@ def spot_main() -> None: if __name__ == "__main__": - spot_main() + calibrate_spot_hand() diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 837678a4..41c3ab78 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -12,24 +12,11 @@ import torch from jaxtyping import Float, Shaped -# from rclpy.qos import QoSProfile -# from sensor_msgs.msg import CameraInfo -# from sensor_msgs.msg import Image as ROSImage - logger = logging.getLogger(__name__) DISTORTION_COEFFICIENT_SIZES = [0, 4, 5, 8, 12, 14] -# @dataclasses.dataclass -# class CameraCalCapture: -# capture_time: float -# external_cam_msg: ROSImage -# external_cam_info: CameraInfo -# spot_cam_msg: ROSImage -# spot_cam_info: CameraInfo - - class CalibrationResults(TypedDict): dist_coeffs_origin: np.ndarray camera_matrix_origin: np.ndarray @@ -44,12 +31,6 @@ class CalibrationResults(TypedDict): average_reprojection_error: float -# class TopicMsgPair(NamedTuple): -# topic_name: str -# msg_type: Type -# qos_profile: QoSProfile | int = 10 - - @dataclass class Image: # Note: this is a jaxtyping-style annotation; diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 8f3358ec..05c256d1 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -193,26 +193,6 @@ def load_calibration_parameters(input_path: Path) -> CalibrationResults: return calib_results -# TODO -# def load_dataset_from_path(pathdir: Path) -> Tuple[Dict[str, Dict[str, np.ndarray]], CameraInfo]: -# """ -# load the data for images, hand_cam_info, ext_cam_info - -# Args: -# pathdir (Path): The absolute pathname to directory containing the dataset. - -# Returns: -# Tuple[np.ndarray, CameraInfo, CameraInfo]: The loaded images, hand camera info, and external camera info. -# """ -# images = load_images_from_path(pathdir) -# hciyaml = os.path.join(pathdir, Path("parent"), Path("camera_info.yaml")) -# eciyaml = os.path.join(pathdir, Path("child"), Path("camera_info.yaml")) -# hand_cam_info = load_CameraInfo_from_file(Path(hciyaml)) -# ext_cam_info = load_CameraInfo_from_file(Path(eciyaml)) - -# return images, hand_cam_info, ext_cam_info - - def load_dataset_from_path(path: str) -> Tuple[np.ndarray, Optional[np.ndarray]]: """ Load image dataset from path in a way that's compatible with multistereo_calibration_charuco. @@ -592,7 +572,7 @@ def calibration_helper( logger.info("Saving calibration param...") if result_path is None: - result_path = input("Please provide a path to save the calibration results (or type 'No' to skip): ") + result_path = "calibration_result.yaml" # default file name if none provided args.result_path = result_path diff --git a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py index c133096d..fc7a7322 100644 --- a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py @@ -122,13 +122,13 @@ def extract_calibration_parameters(self, calibration_dict: dict, tag: str) -> di calibration["rgb_intrinsic"] = np.asarray(calibration_dict[tag]["intrinsic"][0]["camera_matrix"]).reshape( (3, 3) ) - rgb_to_depth_T = np.array(calibration_dict[tag]["extrinsic"][1][0]["T"]).reshape((3, 1)) - rgb_to_depth_R = np.array(calibration_dict[tag]["extrinsic"][1][0]["R"]).reshape((3, 3)) + rgb_to_depth_T = np.array(calibration_dict[tag]["extrinsic"][0][1]["T"]).reshape((3, 1)) + rgb_to_depth_R = np.array(calibration_dict[tag]["extrinsic"][0][1]["R"]).reshape((3, 3)) calibration["rgb_to_depth"] = np.vstack( (np.hstack((rgb_to_depth_R, rgb_to_depth_T)), np.array([0, 0, 0, 1])) ) - depth_to_hand_T = np.array(calibration_dict[tag]["extrinsic"][1]["planning_frame"]["T"]).reshape((3, 1)) - depth_to_hand_R = np.array(calibration_dict[tag]["extrinsic"][1]["planning_frame"]["R"]).reshape((3, 3)) + depth_to_hand_T = np.array(calibration_dict[tag]["extrinsic"][0]["planning_frame"]["T"]).reshape((3, 1)) + depth_to_hand_R = np.array(calibration_dict[tag]["extrinsic"][0]["planning_frame"]["R"]).reshape((3, 3)) calibration["depth_to_hand"] = np.vstack( (np.hstack((depth_to_hand_R, depth_to_hand_T)), np.array([0, 0, 0, 1])) ) @@ -155,8 +155,10 @@ def write_calibration_to_robot(self, cal_dict: dict, tag: str = "default") -> No def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSource.PinholeModel: """Converts a 3x3 intrinsic matrix to a PinholeModel protobuf.""" pinhole_model = ImageSource.PinholeModel() - pinhole_model.CameraIntrinsics.focal_length = intrinsic_matrix[0, :1] - pinhole_model.CameraIntrinsics.principal_point = (intrinsic_matrix[0, 2], intrinsic_matrix[1, 2]) + pinhole_model.intrinsics.focal_length.x = float(intrinsic_matrix[0, 0]) + pinhole_model.intrinsics.focal_length.y = float(intrinsic_matrix[1, 1]) + pinhole_model.intrinsics.principal_point.x = float(intrinsic_matrix[0, 2]) + pinhole_model.intrinsics.principal_point.y = float(intrinsic_matrix[1, 2]) return pinhole_model hand_t_wr1_pose = get_a_tform_b( @@ -179,11 +181,13 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou rgb_intrinsics = cal["rgb_intrinsic"] rgb_t_depth = cal["rgb_to_depth"] - depth_to_planning = cal["depth_to_hand"] @ hand_t_planning - rgb_to_planning = rgb_t_depth @ depth_to_planning - - planning_t_depth = np.linalg.inv(depth_to_planning) - planning_t_rgb = np.linalg.inv(rgb_to_planning) + # hand_t_depth: depth camera expressed in hand frame (from hand-eye calibration) + # hand_t_planning: wr1 expressed in hand frame + # wr1_t_depth = wr1_t_hand @ hand_t_depth = inv(hand_t_planning) @ hand_t_depth + wr1_t_hand = np.linalg.inv(hand_t_planning) + planning_t_depth = wr1_t_hand @ cal["depth_to_hand"] + # rgb_t_depth maps depth->rgb; wr1_t_rgb = wr1_t_depth @ depth_t_rgb = wr1_t_depth @ inv(rgb_t_depth) + planning_t_rgb = planning_t_depth @ np.linalg.inv(rgb_t_depth) # Converting calibration data to protobuf format depth_intrinsics_proto = convert_pinhole_intrinsic_to_proto(depth_intrinsics) @@ -191,7 +195,7 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou planning_t_depth_frame_proto = SE3Pose.from_matrix(planning_t_depth).to_proto() planning_t_rgb_frame_proto = SE3Pose.from_matrix(planning_t_rgb).to_proto() - gripper_camera_param_pb2.SetGripperCameraCalibrationRequest( + set_req = gripper_camera_param_pb2.SetGripperCameraCalibrationRequest( gripper_cam_cal=gripper_camera_param_pb2.GripperCameraCalibrationProto( depth=gripper_camera_param_pb2.GripperDepthCameraCalibrationParams( wr1_tform_sensor=planning_t_depth_frame_proto, @@ -209,10 +213,11 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou ), ) ) + logger.info(f" Request to send: \n{set_req}\n") # Send the request to the robot try: - # result = self.gripper_camera_client.set_camera_calib(set_req) - logger.info(f" Set Parameters: \n{result}") + result = self.gripper_camera_client.set_camera_calib(set_req) + logger.info(f" Set Parameters: \n{result}\n") except Exception as e: raise ValueError(f"Failed to set calibration parameters on the robot: {e}") From 32980542c0ee5cb7dea7012d17bb23521db09d88 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Thu, 5 Mar 2026 14:11:10 -0500 Subject: [PATCH 09/12] i think it actually works now (i tested it and images look pretty good) Signed-off-by: Tiffany Cappellari --- spot_wrapper/calibration/README.md | 216 +++++++++++++++ .../calibration/calibrate_spot_hand.py | 17 +- .../calibration/calibration_helpers.py | 227 +--------------- spot_wrapper/calibration/calibration_util.py | 245 ------------------ .../spot_in_hand_camera_calibration.py | 32 ++- 5 files changed, 252 insertions(+), 485 deletions(-) create mode 100644 spot_wrapper/calibration/README.md diff --git a/spot_wrapper/calibration/README.md b/spot_wrapper/calibration/README.md new file mode 100644 index 00000000..418a28e1 --- /dev/null +++ b/spot_wrapper/calibration/README.md @@ -0,0 +1,216 @@ +# Automatic Robotic Stereo Camera Calibration with Charuco Target + +# Table of Contents + +1. [***Overview***](#overview) +2. [***Quick Start***](#quick-start) +3. [***Check if you have a Legacy Charuco Board***](#check-if-you-have-a-legacy-charuco-board) +4. [***All Options***](#all-options) + +# Overview + +![side by side comparison](registration_qualitative_example.jpg) + +This utility streamlines automatic +camera calibration to **solve for the intrinsic and extrinsic parameters for two or more +cameras mounted in a fixed pose relative to each other on a robot** +based off of moving the robot to view a Charuco target from different poses. You can also calculate the calibration from an existing dataset of photos or make a new dataset from a pre-defined list of poses (if available). Additionally, the new calibrations can be directly written to the robot. + +The CLI tool's saving capability allows to store multiple unique calibration runs in one configuration file +with calibration metadata, to document related runs with different setups or parameters. + +For more info, see ```calibration_util.py```, where the functions +```get_multiple_perspective_camera_calibration_dataset``` and ```multistereo_calibration_charuco``` +do most of the heavy lifting. + +> [!NOTE] +> For a more in-depth explanation of this calibration tool, see ![this document](in_depth_calibration_doc.md). + +# Quick Start + +## Full Calibration + +To run through the full calibration script to calibrate your Spot's hand camera (depth to rgb, with rgb at default resolution), first set up your robot and charuco board like below + +![spot eye in hand cal](spot_eye_in_hand_setup.jpg) + +You can then run the following command: + +> [!WARNING] +> The robot will move. Be sure the robot is sitting (not docked) in front of the charuco board and that no one else is holding the lease before beginning (you will have to release control from the tablet). + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw +``` + +To save the collected images, add the ```--save_data``` and ```--data_path``` flags: + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ +--data_path --save_data +``` + +To also save the calculated calibrations, add the ```--result_path``` flag: + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ +--data_path --save_data --result_path +``` + +To overwrite the robot's internal calibration with your newly calculated one, also add the ```--save_to_robot``` flag: + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ +--data_path --save_data --result_path --save_to_robot +``` + +> [!NOTE] +> If you are using a legacy charuco board, add ```--legacy_charuco_pattern True```. If you're not sure, go to [Check if you have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). + +Other options are listed in [All Options](#all-options) below. + +## Calibration with Existing Dataset + +If you already have a folder of images to calibrate on, then add the ```--from_data``` flag and remove the ```--save_data``` flag. This will *not* move the robot: + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ +--data_path --from_data +``` + +You can still optionally add the ```--result_path ``` and/or ```--save_to_robot``` flags to save the calculated calibration to a yaml file and/or overwrite the robot's internally saved calibrations, respectively. + +## Overwrite Robot Calibration with Existing Yaml + +If you have previously run the calibration script and saved the results, you can send those directly to the robot to be saved on its internal hardware without needing to run the entire calibration script by including the ```--from_yaml``` and ```--data_path``` flags and ensuring the `````` goes to the correct calibration yaml file: + +```bash +python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ +--from_yaml --data_path --save_to_robot +``` + +## All Options + +Below are all the various flags available to you to further fine-tune your calibration. You can also see them all in your terminal by running: + +```bash +python3 calibrate_spot_hand_camera_cli.py -h +``` + +```bash +options: + -h, --help show this help message and exit + --stereo_pairs STEREO_PAIRS, -sp STEREO_PAIRS + Capture images returns a list of images. Stereo pairs correspond towhat + index in the list of images' corresponding camerato calibrate to what + camera. Say capture_images returns [rgb_img, depth_img]and you want to + register depth to rgb, then the desired stereo pairis "[(1,0)]". If you + want to register more than one pair, you can do it like "[(1, 0), + (2,0)]."Make sure to put the stereo pairs in quotes so bash doesn't + complain + --legacy_charuco_pattern LEGACY_CHARUCO_PATTERN, -l LEGACY_CHARUCO_PATTERN + Whether to use the legacy charuco pattern. For Spot Default board, this + should be True.If you aren't sure if your board is legacy, try supplying + the --check_board_patternarg to verify that the cv2 board matches your + board. + --check_board_pattern + Whether to visually verify the board pattern (to check legacy and internal + corner ordering. + --allow_default_internal_corner_ordering + Whether to allow default internal corner ordering. For new versions of + OpenCV, it is recommended to NOT set this parameter to make sure that + corners are ordered in a known pattern. Try supplying the + --check_board_pattern flag to check whether you should enable this flag + When checking, Z-Axis should point out of board. + --photo_utilization_ratio PHOTO_UTILIZATION_RATIO, -pur PHOTO_UTILIZATION_RATIO + Photos that are collected/loaded vs. used for calibration are in a 1 + tophoto utilization ratio. For getting a rough guess on cheaper + hardwarewithout losing collection fidelity. For example, set to 2 to only + use half the photos. + --num_checkers_width NUM_CHECKERS_WIDTH, -ncw NUM_CHECKERS_WIDTH + How many checkers wide is your board + --num_checkers_height NUM_CHECKERS_HEIGHT, -nch NUM_CHECKERS_HEIGHT + How many checkers tall is your board + --dict_size {DICT_4X4_50,DICT_4X4_100,DICT_4X4_250,DICT_4X4_1000,DICT_5X5_50,DICT_5X5_100,DICT_5X5_250,DICT_5X5_1000,DICT_6X6_50,DICT_6X6_100,DICT_6X6_250,DICT_6X6_1000,DICT_7X7_50,DICT_7X7_100,DICT_7X7_250,DICT_7X7_1000,DICT_ARUCO_ORIGINAL} + Choose the ArUco dictionary size. + --checker_dim CHECKER_DIM, -cd CHECKER_DIM + Checker size in meters + --marker_dim MARKER_DIM, -md MARKER_DIM + Aruco Marker size in meters + --data_path DATA_PATH, -dp DATA_PATH + The path in which to save images + --result_path RESULT_PATH, -rp RESULT_PATH + Where to store calibration result as yaml file + --tag TAG, -t TAG What heading to put for the calibration in the config file.If this is your + first time running, the tag should be set to default for the sake of + interoperability with other functionality.If this is a shared config file + with other people, perhaps puta unique identifier, or default, if you'd + like to overridefor everyone. + --unsafe_tag_save If set, skips safety checks for tagging calibration. + --dist_from_board_viewpoint_range DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...], -dfbvr DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...] + What distances to conduct calibrations at relative to the board. (along the + normal vector) Three value array arg defines the [Start, Stop), step. for + the viewpoint sweep. These are used to sample viewpoints for automatic + collection. + --degrees, -d Use degrees for rotation ranges (default) + --radians, -r Use radians for rotation ranges + --x_axis_rot_viewpoint_range X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...], -xarvr X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...] + What range of viewpoints around x-axis to sample relative to boards normal + vector. Three value array arg defines the [Start, Stop), step. for the + viewpoint sweep These are used to sample viewpoints for automatic + collection. Assuming that the camera pose is in opencv/ROS format. + --y_axis_rot_viewpoint_range Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...], -yarvr Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...] + What range of viewpoints around y-axis to sample relative to boards normal + vector. Three value array arg defines the [Start, Stop), step. for the + viewpoint sweep These are used to sample viewpoints for automatic + collection. Assuming that the camera pose is in opencv/ROS format. + --z_axis_rot_viewpoint_range Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...], -zarvr Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...] + What range of viewpoints around z-axis to sample relative to boards normal + vector. Three value array arg defines the [Start, Stop), step. for the + viewpoint sweep These are used to sample viewpoints for automatic + collection. Assuming that the camera pose is in opencv/ROS format. + --max_num_images MAX_NUM_IMAGES + The maximum number of images + --settle_time SETTLE_TIME, -st SETTLE_TIME + How long to wait after movement to take a picture; don't want motion blur + --save_data, -sd whether to save the images to file + --from_data, -fd Whether to only calibrate from recorded dataset on file. + --save_to_robot, -send + Whether to save the calibration to the robot. + --from_yaml, -yaml Whether the data is from a yaml file. Use this and the '--from_data' and ' + --send' args to send a previously saved calibration yaml to the robot + --ip IP, -i IP, -ip IP + The IP address of the Robot to calibrate + --user USERNAME, -u USERNAME, --username USERNAME + Robot Username + --pass PASSWORD, -pw PASSWORD, --password PASSWORD + Robot Password + --spot_rgb_photo_width SPOT_RGB_PHOTO_WIDTH, -dpw SPOT_RGB_PHOTO_WIDTH + What resolution use for Spot's RGB Hand Camera (width). Currently, only 640 + and 1920 are supported + --spot_rgb_photo_height SPOT_RGB_PHOTO_HEIGHT, -dph SPOT_RGB_PHOTO_HEIGHT + What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 + and 1080 are supported +``` + +# Check if you have a Legacy Charuco Board + +You only need to do this if using an opencv version after ```4.7```( +check with```python3 -c "import cv2; print(cv2.__version__)"```) + +Through using the CLI tool (```python3 calibrate_multistereo_cameras_with_charuco_cli.py -h```), you can check if you have a legacy board through visually comparing the generated drawn virtual board to your physical charuco board target. Some legacy boards have an aruco tag in the top +left corner, whether as some non-legacy boards have a checker in the top left corner. +Also, check to see that the aruco tags match between virtual and physical boards. +It is important that the virtual board matches the physical board, otherwise this calibration +will not work. + +``` +python3 calibrate_multistereo_cameras_with_charuco_cli.py --check_board_pattern --legacy_charuco_pattern t +``` + +There should be an axis at the center of the board, where the Y axis (green) +points upwards, the X axis (red) points to the right, and the figure should be labelled +as Z-axis out of board. If it isn't then try without legacy (```--legacy_charuco_pattern f```). + +If you are using the default Spot Calibation board, and there is an aruco marker +in the top left corner, then it legacy (so supply true argument to legacy.) diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index bf623052..330a9fb3 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -80,7 +80,6 @@ def calibrate_spot_hand() -> None: logger.warning("The robot should NOT be docked, and nobody should have robot control") logger.warning(f"the ip is: {args.ip}") input("Press Enter to continue...") - # sleep(5) images, poses = get_multiple_perspective_camera_calibration_dataset( auto_cam_cal_robot=in_hand_bot, @@ -104,19 +103,24 @@ def calibrate_spot_hand() -> None: in_hand_bot.write_calibration_to_robot(calibration) in_hand_bot.shutdown() - # Send previously computed and saved calibration data to the robot + # Load and send previously computed and saved calibration data to the robot + # Assumes the user wants to send the calibration to the robot so the -send flag is not needed/checked elif args.from_yaml: try: in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) with open(args.data_path, "r") as file: calibration = yaml.safe_load(file) - logger.info(f"Loaded calibration data:\n{calibration}") - if args.save_to_robot: + send_to_robot = input( + f"Loaded calibration data:\n{calibration}\nDo you want to send this calibration to the robot?" + " (y/n): " + ) + if send_to_robot.strip().lower() == "y": logger.info("Saving calibration to robot...") in_hand_bot.write_calibration_to_robot(calibration) + else: + logger.info("Calibration not sent to robot. Shutting down.") except Exception as e: - raise ValueError(f"Failed to load calibration from {args.data_path}: {e}\nIs it a calibration yaml file?") - in_hand_bot.shutdown() + raise ValueError(f"Failed to load calibration from {args.data_path}:\n{e}\n") # Load previously collected data and compute calibration else: logger.info(f"Loading images from {args.data_path}") @@ -129,7 +133,6 @@ def calibrate_spot_hand() -> None: in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) logger.info("Saving calibration to robot...") in_hand_bot.write_calibration_to_robot(calibration) - in_hand_bot.shutdown() logger.info("Calibration complete!") diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 41c3ab78..73c14ef5 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -2,19 +2,16 @@ from __future__ import annotations -# import dataclasses -import logging -from dataclasses import dataclass from typing import Optional, TypedDict import numpy as np -import numpy.typing as npt -import torch -from jaxtyping import Float, Shaped -logger = logging.getLogger(__name__) -DISTORTION_COEFFICIENT_SIZES = [0, 4, 5, 8, 12, 14] +class Intrinsics(TypedDict): + dist_coeffs: np.ndarray + camera_matrix: np.ndarray + nrows: int + ncols: int class CalibrationResults(TypedDict): @@ -29,217 +26,3 @@ class CalibrationResults(TypedDict): R_handeye: Optional[np.ndarray] T_handeye: Optional[np.ndarray] average_reprojection_error: float - - -@dataclass -class Image: - # Note: this is a jaxtyping-style annotation; - # when jaxtyping is active (currently only under pytest), - # the dimensions of the input are bound to the names provided, - # but only in the context of dynamic type-checking of the - # generated __init__ method. - # These same names are later defined as @property methods. - image_data: Shaped[torch.Tensor, "channels height width"] # noqa: F722 - """Image tensor with shape (C, H, W), dtype uint8, values in [0,255]""" - - def __post_init__(self) -> None: - # allow for single channel images - if len(self.image_data.shape) < 2: - raise ValueError(f"Not enough dimensions in image data: {self.image_data.shape}. Should be 2d or 3d.") - elif len(self.image_data.shape) == 2: - self.image_data = self.image_data.unsqueeze(0) - - elif len(self.image_data.shape) > 3: - raise ValueError(f"Too many dimensions in image data: {self.image_data.shape}. Should be 2d or 3d.") - - def to_image(self) -> Image: - return self - - @property - def device(self): - return self.image_data.device - - @classmethod - def from_numpy( - cls, - np_image: np.ndarray, - dtype: torch.dtype | None = None, - force_copy: bool = True, - ) -> Image: - """ - Create an Image from numpy array. - - Note that conventions differ between torch and numpy for how to index an image; - this accepts a numpy array with channels as the optional last index. - - Because this method uses `torch.Tensor.contiguous()`, it does not interact well - with multiprocessing, specifically using `fork`. See the README for details. - """ - if not force_copy: - logger.warning(f"Cannot pass without copy between numpy and {cls}") - - tensor = torch.as_tensor(np_image, dtype=dtype) - - if len(tensor.shape) == 2: - tensor = tensor.unsqueeze(-1) - - tensor = tensor.permute((2, 0, 1)).contiguous() - - return cls(tensor) - - @classmethod - def zeros(cls, width: int, height: int, channels: int) -> Image: - """Create an all black image.""" - image_data = torch.zeros((channels, height, width)) - return cls.from_tensor(image_data, force_copy=False) - - @property - def channels(self) -> int: - """ - Gets the number of channels in the image - """ - return self.image_data.shape[0] - - @property - def width(self) -> int: - """ - Gets the width of the image - """ - return self.image_data.shape[2] - - @property - def height(self) -> int: - """ - Gets the height of the image - """ - return self.image_data.shape[1] - - @property - def shape(self) -> tuple[int, int]: - """ - Gets a tuple of (height, width) - """ - return self.height, self.width - - @property - def pixel_count(self) -> int: - """ - Gets the number of pixels in the image. This is (height * width), _not_ (height * width * channels). - """ - return self.height * self.width - - -class CameraIntrinsics: - """This is an interface to OpenCV's camera model. It contains a 3x3 camera matrix, and a vector of distortion - coefficients, which must be of length 0, 4, 5, 8, 12 or 14, matching OpenCV's convention. If this vector is too - short, then it is padded with zeros to the next largest size. If the vector is too long, then an exception is - thrown. - (see https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html). - - There are also conversions to and from Open3D's CameraPinholeIntrinsic, with two caveats: - 1. Open3d does _not_ support lens distortion, while CameraIntrinsics does. An exception is thrown if a - CameraIntrinsics object with a non-zero-length distortion vector is converted to an Open3D representation. - 2. Open3d _does_ support skewed pixels (i.e., camera matrices with non-zero entries in (0,1) and (1,0)). while - CameraIntrinsics does not. An exception is thrown if a CameraIntrinsics object with any skews. - """ - - # jaxtyping, but not in a dataclass, so they're not checked - camera_matrix: Float[np.ndarray, "3 3"] # noqa: F722 - distortion_coeffs: Float[np.ndarray, "*N"] # noqa: F722 F821 - - def __init__( - self, - camera_matrix: npt.ArrayLike | None = None, - distortion_coeffs: npt.ArrayLike | None = None, - width: int = -1, - height: int = -1, - ): - self.camera_matrix = np.array(camera_matrix) if camera_matrix is not None else np.eye(3) - self.distortion_coeffs = np.array(distortion_coeffs) if distortion_coeffs is not None else np.empty(0) - self.width = width - self.height = height - - if self.camera_matrix.shape != (3, 3): - raise ValueError(f"Camera matrix must be 3x3, got {self.camera_matrix}.") - elif not np.allclose(self.camera_matrix[2, :], np.array([0, 0, 1])): - raise ValueError(f"Camera matrix must be affine, got {self.camera_matrix}.") - CameraIntrinsics._check_skews(self.camera_matrix) - - if len(self.distortion_coeffs.shape) != 1: - raise ValueError(f"Distortion coefficients must be Kx1, got {self.distortion_coeffs.shape}") - elif self.distortion_coeffs.shape[0] > DISTORTION_COEFFICIENT_SIZES[-1]: - raise ValueError( - f"Distortion coefficients list too long, got {self.distortion_coeffs.shape} but the maximum length is" - f" {DISTORTION_COEFFICIENT_SIZES[-1]}." - ) - - if self.distortion_coeffs.shape[0] not in DISTORTION_COEFFICIENT_SIZES: - for next_greatest_dist_size in DISTORTION_COEFFICIENT_SIZES: - if next_greatest_dist_size >= self.distortion_coeffs.shape[0]: - break - - self.distortion_coeffs = np.pad( - self.distortion_coeffs, (0, next_greatest_dist_size - self.distortion_coeffs.shape[0]), mode="constant" - ) - - @staticmethod - def _check_skews(camera_matrix: Float[np.ndarray, "3 3"]): # noqa: F722 - if not np.allclose(camera_matrix[np.array([0, 1]), np.array([1, 0])], np.zeros(2)): - raise ValueError(f"Skewed pixels are not supported, got {camera_matrix}.") - - @property - def fx(self) -> float: - return self.camera_matrix[0, 0] - - @fx.setter - def fx(self, val: float) -> None: - if val <= 0.0: - raise ValueError(f"Focal lengths must be positive; got {val}.") - else: - self.camera_matrix[0, 0] = val - - @property - def fy(self) -> float: - return self.camera_matrix[1, 1] - - @fy.setter - def fy(self, val: float) -> None: - if val <= 0.0: - raise ValueError(f"Focal lengths must be positive; got {val}.") - else: - self.camera_matrix[1, 1] = val - - @property - def cx(self) -> float: - return self.camera_matrix[0, 2] - - @cx.setter - def cx(self, val: float) -> None: - if val <= 0.0: - raise ValueError(f"Principal point values lengths must be positive; got {val}.") - elif self.width != -1 and val >= self.width: - raise ValueError( - f"Principal point values lengths must be less than the corresponding image dimension; got {val} but" - f" width is {self.width}." - ) - else: - self.camera_matrix[0, 2] = val - - @property - def cy(self) -> float: - return self.camera_matrix[1, 2] - - @cy.setter - def cy(self, val: float) -> None: - if val <= 0.0: - raise ValueError(f"Principal point values lengths must be positive; got {val}.") - elif self.height != -1 and val >= self.width: - raise ValueError( - f"Principal point values lengths must be less than the corresponding image dimension; got {val} but" - f" height is {self.height}." - ) - else: - self.camera_matrix[1, 2] = val - - def scale(self, scale_factor: float) -> None: - self.camera_matrix = self.camera_matrix * scale_factor diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 05c256d1..802fa657 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -6,27 +6,17 @@ import re from datetime import datetime from glob import glob -from pathlib import Path from time import sleep from typing import Any, Dict, List, Optional, Tuple, Union import cv2 import numpy as np import yaml -from cv_bridge import CvBridge -from sensor_msgs.msg import CameraInfo -from sensor_msgs.msg import Image as RosImage from spot_wrapper.calibration.automatic_camera_calibration_robot import ( AutomaticCameraCalibrationRobot, ) -from spot_wrapper.calibration.calibration_helpers import ( - CalibrationResults, - Image, -) from spot_wrapper.calibration.charuco_board_detection import ( - charuco_pose_sanity_check, - create_charuco_board, create_ideal_charuco_image, detect_charuco_corners, get_relative_viewpoints_from_board_pose_and_param, @@ -38,161 +28,6 @@ directories = ["parent", "child", "poses", "depth"] -def camera_info_to_dict(camera_info: CameraInfo, camera_name: str) -> dict[str, Any]: - return { - "image_width": camera_info.width, - "image_height": camera_info.height, - "camera_name": camera_name, - "camera_matrix": {"rows": 3, "cols": 3, "data": camera_info.k}, - "distortion_model": camera_info.distortion_model, - "distortion_coefficients": {"rows": 1, "cols": len(camera_info.d), "data": camera_info.d}, - "rectification_matrix": {"rows": 3, "cols": 3, "data": camera_info.r}, - "projection_matrix": {"rows": 3, "cols": 4, "data": camera_info.p}, - "binning_x": camera_info.binning_x, - "binning_y": camera_info.binning_y, - "roi": { - "x_offset": camera_info.roi.x_offset, - "y_offset": camera_info.roi.y_offset, - "height": camera_info.roi.height, - "width": camera_info.roi.width, - "do_rectify": camera_info.roi.do_rectify, - }, - } - - -# TODO -def load_CameraInfo_from_file(file_path: Path) -> CameraInfo: - """Loads a CameraInfo message from a YAML file.""" - logging.info(f"Loading CameraInfo from {file_path}") - with open(file_path, "r") as f: - cam_info_msg_dict = yaml.unsafe_load(f) - - cam_info_msg = CameraInfo() - cam_info_msg.width = cam_info_msg_dict["image_width"] - cam_info_msg.height = cam_info_msg_dict["image_height"] - cam_info_msg.header.frame_id = cam_info_msg_dict["camera_name"] - cam_info_msg.k = cam_info_msg_dict["camera_matrix"]["data"] - cam_info_msg.distortion_model = cam_info_msg_dict["distortion_model"] - cam_info_msg.d = cam_info_msg_dict["distortion_coefficients"]["data"] - cam_info_msg.r = cam_info_msg_dict["rectification_matrix"]["data"] - cam_info_msg.p = cam_info_msg_dict["projection_matrix"]["data"] - cam_info_msg.binning_x = cam_info_msg_dict["binning_x"] - cam_info_msg.binning_y = cam_info_msg_dict["binning_y"] - roi_dict = cam_info_msg_dict["roi"] - cam_info_msg.roi.x_offset = roi_dict["x_offset"] - cam_info_msg.roi.y_offset = roi_dict["y_offset"] - cam_info_msg.roi.height = roi_dict["height"] - cam_info_msg.roi.width = roi_dict["width"] - cam_info_msg.roi.do_rectify = roi_dict["do_rectify"] - - return cam_info_msg - - -def load_images_from_path(path: Path) -> Dict[str, Dict[str, np.ndarray]]: - """ - Load images dataset from path in a way that's compatible with multistereo_calibration_charuco. - - Also, load the poses if they are available. - - See Using the CLI Tool To Calibrate On an Existing Dataset section in the README - to see the expected folder/data structure for this method to work - - Args: - path (str): The parent path - - Raises: - ValueError: Not possible to load the images - - Returns: - dict[str, np.ndarray]: The image dataset - """ - - def alpha_numeric(x: str) -> Any: - matcha = re.search("(\\d+)(?=\\D*$)", x) - # \\d+ Matches one or more digits (0-9), - # \\D* Matches zero or more non-digit characters, - # $ asserts position at the end of the string. - if matcha: - return int(matcha.group()) - return x - - def load_images_from_dir(path: Path, suffix: str) -> Dict[str, np.ndarray]: - print(f"-----------------------Loading images from {path}") - files = sorted( - glob(os.path.join(path, f"*{suffix}")), - key=alpha_numeric, - ) - try: - return { - Path(fn).name: cv2.imread(fn, cv2.IMREAD_GRAYSCALE).astype(np.uint8) - for fn in files - if fn.lower().endswith(suffix) - } - except Exception as e: - logging.error(f"Error loading images and poses from {files}: {e}") - return {} - - # Initialize an empty dict to store images - images = dict() - - # directories we care about here - parent_path = os.path.join(path, "parent") - child_path = os.path.join(path, "child") - poses = os.path.join(path, "poses") - - # load images from both directories - images["parent"] = load_images_from_dir(Path(parent_path), ".png") - images["child"] = load_images_from_dir(Path(child_path), ".png") - images["poses"] = load_images_from_dir(Path(poses), ".npy") - - return images - - -# TODO -def load_calibration_parameters(input_path: Path) -> CalibrationResults: - """ - Load calibration parameters from a YAML file. - - Args: - input_path (Path): The path to the YAML file containing calibration parameters. - Returns: - CalibrationResults: The loaded calibration parameters. - Throws: - FileNotFoundError: If the specified file does not exist. - KeyError: If required keys are missing in the YAML file. - """ - with open(input_path, "r") as file: - calib_data = yaml.safe_load(file) - - parent_camera = np.array(calib_data["default"]["intrinsic"][0]["camera_matrix"]).reshape((3, 3)) - parent_dist_coeffs = np.array(calib_data["default"]["intrinsic"][0]["dist_coeffs"]).reshape((-1, 1)) - parent_image_dim = np.array(calib_data["default"]["intrinsic"][0]["image_dim"]) - child_camera = np.array(calib_data["default"]["intrinsic"][1]["camera_matrix"]).reshape((3, 3)) - child_dist_coeffs = np.array(calib_data["default"]["intrinsic"][1]["dist_coeffs"]).reshape((-1, 1)) - child_image_dim = np.array(calib_data["default"]["intrinsic"][1]["image_dim"]) - R = np.array(calib_data["default"]["extrinsic"][0][1]["R"]).reshape((3, 3)) - T = np.array(calib_data["default"]["extrinsic"][0][1]["T"]).reshape((-1, 3)) - - # saving out reproj err not supported, currently. - # does not save out reproj err. - # So we set it to 0 here. - calib_results: CalibrationResults = { - "camera_matrix_origin": parent_camera, - "dist_coeffs_origin": parent_dist_coeffs, - "image_dim_origin": parent_image_dim, - "camera_matrix_reference": child_camera, - "dist_coeffs_reference": child_dist_coeffs, - "image_dim_reference": child_image_dim, - "R": R, - "T": T, - "R_handeye": np.eye(3), - "T_handeye": np.zeros((3, 1)), - "average_reprojection_error": 0, - } - - return calib_results - - def load_dataset_from_path(path: str) -> Tuple[np.ndarray, Optional[np.ndarray]]: """ Load image dataset from path in a way that's compatible with multistereo_calibration_charuco. @@ -246,28 +81,6 @@ def alpha_numeric(x): return images, poses -def create_calibration_save_folders(path: Path) -> None: - """ - Create a folder hierarchy to record a calibration - - Args: - path (Path): The parent path - - Raises: - ValueError: Not possible to create the folders, or no path specified - """ - if path is None: - raise ValueError("No path to save to. you can do better than this.") - else: - for folder in directories: - cam_path = os.path.join(path, folder) - - logger.info(f"Creating image folder at {cam_path}") - os.makedirs(cam_path, exist_ok=True) - os.makedirs(os.path.join(path, "poses"), exist_ok=True) - logger.info("Done creating folders.") - - def save_calibration_parameters( data: Dict, output_path: str, @@ -525,18 +338,6 @@ def get_multiple_perspective_camera_calibration_dataset( return (np.array(calibration_images, dtype=object), poses) -def ros_image_to_image(ros_image: RosImage, cv_bridge: CvBridge | None = None, ros_encoding: str = "rgb8") -> Image: - """ - Converts from ros image to our generic image datatype - """ - if cv_bridge is None: - cv_bridge = CvBridge() - - img = cv_bridge.imgmsg_to_cv2(ros_image, desired_encoding=ros_encoding) - - return Image.from_numpy(img) - - def calibration_helper( images: Union[List[np.ndarray], np.ndarray], args: argparse.Namespace, @@ -588,49 +389,3 @@ def calibration_helper( unsafe=args.unsafe_tag_save, ) return calibration_dict - - -def setup_calibration_param( - args: argparse.Namespace, -) -> Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: - """Set up calibration parameters from command line arguments. - - Args: - parser (argparse.ArgumentParser): The argument parser to set up from command line. - - Raises: - ValueError: If the provided ArUco dictionary is invalid. - - Returns: - Tuple[argparse.Namespace, cv2.aruco_Dictionary, cv2.aruco_CharucoBoard]: - The parsed arguments, ArUco dictionary, and Charuco board. - """ - if hasattr(cv2.aruco, args.dict_size): - aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, args.dict_size)) - else: - raise ValueError(f"Invalid ArUco dictionary: {args.dict_size}") - charuco = create_charuco_board( - num_checkers_width=args.num_checkers_width, - num_checkers_height=args.num_checkers_height, - checker_dim=args.checker_dim, - marker_dim=args.marker_dim, - aruco_dict=aruco_dict, - legacy=args.legacy_charuco_pattern, - ) - - if not args.allow_default_internal_corner_ordering: - logger.warning("Enforcing bottom up charuco ordering. Pre-computing correlation now...") - detect_charuco_corners( - create_ideal_charuco_image(charuco_board=charuco), - charuco_board=charuco, - aruco_dict=aruco_dict, - enforce_ascending_ids_from_bottom_left_corner=True, - ) - if args.show_board_pattern: - logger.warning("Checking board, you'll need to close a window in a sec (press any key)") - charuco_pose_sanity_check( - create_ideal_charuco_image(charuco_board=charuco, colorful=True), - charuco_board=charuco, - aruco_dict=aruco_dict, - ) - return args, aruco_dict, charuco diff --git a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py index fc7a7322..b1195a34 100644 --- a/spot_wrapper/calibration/spot_in_hand_camera_calibration.py +++ b/spot_wrapper/calibration/spot_in_hand_camera_calibration.py @@ -181,13 +181,17 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou rgb_intrinsics = cal["rgb_intrinsic"] rgb_t_depth = cal["rgb_to_depth"] - # hand_t_depth: depth camera expressed in hand frame (from hand-eye calibration) - # hand_t_planning: wr1 expressed in hand frame - # wr1_t_depth = wr1_t_hand @ hand_t_depth = inv(hand_t_planning) @ hand_t_depth - wr1_t_hand = np.linalg.inv(hand_t_planning) - planning_t_depth = wr1_t_hand @ cal["depth_to_hand"] - # rgb_t_depth maps depth->rgb; wr1_t_rgb = wr1_t_depth @ depth_t_rgb = wr1_t_depth @ inv(rgb_t_depth) - planning_t_rgb = planning_t_depth @ np.linalg.inv(rgb_t_depth) + # cal["depth_to_hand"] is rgb_M_hand: the hand frame expressed in RGB camera coords. + # hand_t_planning is hand_M_wr1 (hand->wr1 transform built above). + # rgb_M_wr1 = rgb_M_hand @ hand_M_wr1 + rgb_M_wr1 = cal["depth_to_hand"] @ hand_t_planning + # rgb_t_depth is depth_M_rgb (stereo extrinsic: maps RGB points → depth frame). + # depth_M_wr1 = depth_M_rgb @ rgb_M_wr1 + depth_M_wr1 = rgb_t_depth @ rgb_M_wr1 + + # API wants wr1_tform_sensor = wr1_M_sensor for each camera + planning_t_rgb = np.linalg.inv(rgb_M_wr1) # wr1_M_rgb → color.wr1_tform_sensor + planning_t_depth = np.linalg.inv(depth_M_wr1) # wr1_M_depth → depth.wr1_tform_sensor # Converting calibration data to protobuf format depth_intrinsics_proto = convert_pinhole_intrinsic_to_proto(depth_intrinsics) @@ -221,12 +225,18 @@ def convert_pinhole_intrinsic_to_proto(intrinsic_matrix: np.ndarray) -> ImageSou except Exception as e: raise ValueError(f"Failed to set calibration parameters on the robot: {e}") - # Optionally, verify by retrieving the parameters back with the following lines - # get_req = gripper_camera_param_pb2.GripperCameraGetParamRequest() - # cal = self.gripper_camera_client.get_camera_calib(get_req) - # logger.info(f"Post-Set Cal (get cam param req): \n {cal}") + # Optionally, verify by retrieving the parameters back with the following line + # self.get_calibration_from_robot() logger.info("Calibration parameters successfully sent to the robot.") + def get_calibration_from_robot(self) -> None: + """Utility function to retrieve and print the current gripper camera calibration + from the robot for verification. + """ + get_req = gripper_camera_param_pb2.GripperCameraGetParamRequest() + cal = self.gripper_camera_client.get_camera_calib(get_req) + logger.info(f"Retrieved calibration from robot: \n{cal}\n") + def capture_images( self, encodings: Optional[List[int]] = None, From 9a85b3e8004485961ce22953b0fb284c11b6f6be Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Thu, 5 Mar 2026 14:38:06 -0500 Subject: [PATCH 10/12] readme updated Signed-off-by: Tiffany Cappellari --- spot_wrapper/calibration/README.md | 354 ++++++++++-------- .../automatic_camera_calibration_robot.py | 132 ------- .../calibration/calibrate_spot_hand.py | 2 +- .../calibration/calibration_helpers.py | 128 ++++++- spot_wrapper/calibration/calibration_util.py | 2 +- .../docs/registration_qualitative_example.jpg | Bin 0 -> 47508 bytes .../docs/spot_eye_in_hand_setup.jpg | Bin 0 -> 39733 bytes .../spot_in_hand_camera_calibration.py | 2 +- 8 files changed, 322 insertions(+), 298 deletions(-) delete mode 100644 spot_wrapper/calibration/automatic_camera_calibration_robot.py create mode 100644 spot_wrapper/calibration/docs/registration_qualitative_example.jpg create mode 100644 spot_wrapper/calibration/docs/spot_eye_in_hand_setup.jpg diff --git a/spot_wrapper/calibration/README.md b/spot_wrapper/calibration/README.md index 418a28e1..2cdd7922 100644 --- a/spot_wrapper/calibration/README.md +++ b/spot_wrapper/calibration/README.md @@ -1,216 +1,246 @@ # Automatic Robotic Stereo Camera Calibration with Charuco Target -# Table of Contents +## Table of Contents -1. [***Overview***](#overview) -2. [***Quick Start***](#quick-start) -3. [***Check if you have a Legacy Charuco Board***](#check-if-you-have-a-legacy-charuco-board) -4. [***All Options***](#all-options) +1. [Overview](#overview) +2. [Setup](#setup) +3. [Quick Start](#quick-start) + - [Full Calibration (collect data + calibrate + optionally push to robot)](#full-calibration) + - [Calibrate from an Existing Dataset](#calibrate-from-an-existing-dataset) + - [Push an Existing Calibration YAML to the Robot](#push-an-existing-calibration-yaml-to-the-robot) +4. [Output Format](#output-format) +5. [Check if You Have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board) +6. [All Options](#all-options) -# Overview +--- -![side by side comparison](registration_qualitative_example.jpg) +## Overview -This utility streamlines automatic -camera calibration to **solve for the intrinsic and extrinsic parameters for two or more -cameras mounted in a fixed pose relative to each other on a robot** -based off of moving the robot to view a Charuco target from different poses. You can also calculate the calibration from an existing dataset of photos or make a new dataset from a pre-defined list of poses (if available). Additionally, the new calibrations can be directly written to the robot. +![side by side comparison](docs/registration_qualitative_example.jpg) -The CLI tool's saving capability allows to store multiple unique calibration runs in one configuration file -with calibration metadata, to document related runs with different setups or parameters. +This utility automates stereo camera calibration for cameras mounted in a fixed configuration on a robot (e.g. the RGB and depth cameras in Spot's hand). It: -For more info, see ```calibration_util.py```, where the functions -```get_multiple_perspective_camera_calibration_dataset``` and ```multistereo_calibration_charuco``` -do most of the heavy lifting. +- Moves the robot arm through a grid of viewpoints relative to a Charuco calibration target +- Captures synchronized RGB and depth image pairs at each pose +- Runs stereo intrinsic + extrinsic calibration (OpenCV Charuco pipeline) +- Runs hand-eye calibration (OpenCV DANIILIDIS method) to produce `wr1_tform_sensor` transforms for both cameras +- Optionally saves the dataset, the calibration YAML, and/or pushes the result directly to the robot's internal calibration storage via the Spot SDK -> [!NOTE] -> For a more in-depth explanation of this calibration tool, see ![this document](in_depth_calibration_doc.md). +For implementation details see `calibration_util.py` (`get_multiple_perspective_camera_calibration_dataset`, `multistereo_calibration_charuco`) and `spot_in_hand_camera_calibration.py` and `calibration_util.py`. + +--- + +## Setup + +**Physical setup:** -# Quick Start +![spot eye in hand cal](docs/spot_eye_in_hand_setup.jpg) -## Full Calibration +- Place the Charuco board roughly 0.5–0.6 m in front of the robot's hand, facing the hand camera +- The robot must be **sitting (not docked)** and no one else may hold the robot lease before running -To run through the full calibration script to calibrate your Spot's hand camera (depth to rgb, with rgb at default resolution), first set up your robot and charuco board like below +**Board defaults** (matching the standard Spot calibration board): -![spot eye in hand cal](spot_eye_in_hand_setup.jpg) +| Parameter | Default | +|---|---| +| Width (checkers) | 9 | +| Height (checkers) | 4 | +| ArUco dictionary | `DICT_4X4_50` | +| Checker size | 0.115 m | +| Marker size | 0.09 m | + +If your board differs, override these via CLI flags (see [All Options](#all-options)). + +> [!NOTE] +> If you are using a Spot default calibration board with OpenCV ≥ 4.7, add `--legacy_charuco_pattern`. See [Check if You Have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). -You can then run the following command: +--- + +## Quick Start + +### Full Calibration + +Collect a new dataset, calibrate, and optionally push the result to the robot: > [!WARNING] -> The robot will move. Be sure the robot is sitting (not docked) in front of the charuco board and that no one else is holding the lease before beginning (you will have to release control from the tablet). +> The robot will move. Ensure it is standing in front of the board, nothing is within ~1 m of the arm, and no other client holds the lease. ```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw +# Collect data, calibrate, save result YAML +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --ip -u -pw \ + --data_path --save_data \ + --result_path ``` -To save the collected images, add the ```--save_data``` and ```--data_path``` flags: +To also write the calibration directly to the robot's internal storage, add the `--save_to_robot` flag: ```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --ip -u -pw \ + --data_path --save_data \ + --result_path --save_to_robot ``` -To also save the calculated calibrations, add the ```--result_path``` flag: +--- -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data --result_path -``` +### Calibrate from an Existing Dataset -To overwrite the robot's internal calibration with your newly calculated one, also add the ```--save_to_robot``` flag: +If you have already collected a dataset, you can skip robot movement entirely with `--from_data`. Robot credentials (IP, username, password) are only needed here if you also pass `--save_to_robot`: ```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --save_data --result_path --save_to_robot +# Calibrate only — no robot needed +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --from_data --data_path \ + --result_path --legacy_charuco_pattern + +# Calibrate and push to robot +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --ip -u -pw \ + --from_data --data_path \ + --result_path --legacy_charuco_pattern --save_to_robot ``` -> [!NOTE] -> If you are using a legacy charuco board, add ```--legacy_charuco_pattern True```. If you're not sure, go to [Check if you have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). +**Expected dataset folder structure** (produced by `--save_data`): -Other options are listed in [All Options](#all-options) below. +``` +/ + 0/ # RGB images: 1.png, 2.png, ... + 1/ # Depth images: 1.png, 2.png, ... + poses/ # Robot hand poses: 1.npy, 2.npy, ... (4×4 body_T_hand transforms) +``` -## Calibration with Existing Dataset +--- -If you already have a folder of images to calibrate on, then add the ```--from_data``` flag and remove the ```--save_data``` flag. This will *not* move the robot: +### Push an Existing Calibration YAML to the Robot + +If you have a previously computed calibration YAML and only want to upload it to the robot: ```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---data_path --from_data +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --ip -u -pw \ + --from_data --from_yaml --data_path --save_to_robot ``` -You can still optionally add the ```--result_path ``` and/or ```--save_to_robot``` flags to save the calculated calibration to a yaml file and/or overwrite the robot's internally saved calibrations, respectively. +> [!NOTE] If you are using a legacy charuco board (Only relevant for OpenCV ≥ 4.7) you should also be adding the `--legacy_charuco_pattern`. More information and how to check is below in [Check if You Have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). + +--- + +## Output Format + +Calibration results are saved as a YAML file with one or more named **tags** (default tag: `default`). This allows multiple calibration runs to coexist in a single file. Password and username are never written to the file. + +```yaml +default: + intrinsic: + 0: # RGB camera (origin / parent) + camera_matrix: [fx, 0, cx, 0, fy, cy, 0, 0, 1] + dist_coeffs: [k1, k2, p1, p2, k3] + image_dim: [width, height] + 1: # Depth camera (reference / child) + camera_matrix: [...] + dist_coeffs: [...] + image_dim: [...] + extrinsic: + 0: # Keyed by origin camera index + 1: # Stereo extrinsic: depth expressed in RGB frame + R: [...] # 3×3 rotation, row-major + T: [...] # 3-element translation (metres) + planning_frame: # Hand-eye result: hand frame expressed in RGB frame + R: [...] + T: [...] + run_params: # All CLI flags used (for reproducibility) + num_images: 135 + timestamp: '2026-03-04 11:19:12' + ... + camera_t_body: [...] # 4×4 homogeneous transform, row-major (16 values) + # = RGB camera frame expressed in body/hand frame +``` -## Overwrite Robot Calibration with Existing Yaml +The `camera_t_body` (or `camera_t_hand`) 4×4 matrix at the top level is the hand-eye calibration result in homogeneous form, provided as a convenience for downstream consumers. -If you have previously run the calibration script and saved the results, you can send those directly to the robot to be saved on its internal hardware without needing to run the entire calibration script by including the ```--from_yaml``` and ```--data_path``` flags and ensuring the `````` goes to the correct calibration yaml file: +--- -```bash -python3 calibrate_spot_hand_camera_cli.py --ip -u -pw \ ---from_yaml --data_path --save_to_robot -``` +## Check if You Have a Legacy Charuco Board -## All Options - -Below are all the various flags available to you to further fine-tune your calibration. You can also see them all in your terminal by running: +Only relevant for OpenCV ≥ 4.7 — You can check your OpenCV version with: ```bash -python3 calibrate_spot_hand_camera_cli.py -h +python3 -c "import cv2; print(cv2.__version__)" ``` +Run the calibration script with `--show_board_pattern` to display the virtual board and compare it visually to your physical board: + ```bash -options: - -h, --help show this help message and exit - --stereo_pairs STEREO_PAIRS, -sp STEREO_PAIRS - Capture images returns a list of images. Stereo pairs correspond towhat - index in the list of images' corresponding camerato calibrate to what - camera. Say capture_images returns [rgb_img, depth_img]and you want to - register depth to rgb, then the desired stereo pairis "[(1,0)]". If you - want to register more than one pair, you can do it like "[(1, 0), - (2,0)]."Make sure to put the stereo pairs in quotes so bash doesn't - complain - --legacy_charuco_pattern LEGACY_CHARUCO_PATTERN, -l LEGACY_CHARUCO_PATTERN - Whether to use the legacy charuco pattern. For Spot Default board, this - should be True.If you aren't sure if your board is legacy, try supplying - the --check_board_patternarg to verify that the cv2 board matches your - board. - --check_board_pattern - Whether to visually verify the board pattern (to check legacy and internal - corner ordering. - --allow_default_internal_corner_ordering - Whether to allow default internal corner ordering. For new versions of - OpenCV, it is recommended to NOT set this parameter to make sure that - corners are ordered in a known pattern. Try supplying the - --check_board_pattern flag to check whether you should enable this flag - When checking, Z-Axis should point out of board. - --photo_utilization_ratio PHOTO_UTILIZATION_RATIO, -pur PHOTO_UTILIZATION_RATIO - Photos that are collected/loaded vs. used for calibration are in a 1 - tophoto utilization ratio. For getting a rough guess on cheaper - hardwarewithout losing collection fidelity. For example, set to 2 to only - use half the photos. - --num_checkers_width NUM_CHECKERS_WIDTH, -ncw NUM_CHECKERS_WIDTH - How many checkers wide is your board - --num_checkers_height NUM_CHECKERS_HEIGHT, -nch NUM_CHECKERS_HEIGHT - How many checkers tall is your board - --dict_size {DICT_4X4_50,DICT_4X4_100,DICT_4X4_250,DICT_4X4_1000,DICT_5X5_50,DICT_5X5_100,DICT_5X5_250,DICT_5X5_1000,DICT_6X6_50,DICT_6X6_100,DICT_6X6_250,DICT_6X6_1000,DICT_7X7_50,DICT_7X7_100,DICT_7X7_250,DICT_7X7_1000,DICT_ARUCO_ORIGINAL} - Choose the ArUco dictionary size. - --checker_dim CHECKER_DIM, -cd CHECKER_DIM - Checker size in meters - --marker_dim MARKER_DIM, -md MARKER_DIM - Aruco Marker size in meters - --data_path DATA_PATH, -dp DATA_PATH - The path in which to save images - --result_path RESULT_PATH, -rp RESULT_PATH - Where to store calibration result as yaml file - --tag TAG, -t TAG What heading to put for the calibration in the config file.If this is your - first time running, the tag should be set to default for the sake of - interoperability with other functionality.If this is a shared config file - with other people, perhaps puta unique identifier, or default, if you'd - like to overridefor everyone. - --unsafe_tag_save If set, skips safety checks for tagging calibration. - --dist_from_board_viewpoint_range DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...], -dfbvr DIST_FROM_BOARD_VIEWPOINT_RANGE [DIST_FROM_BOARD_VIEWPOINT_RANGE ...] - What distances to conduct calibrations at relative to the board. (along the - normal vector) Three value array arg defines the [Start, Stop), step. for - the viewpoint sweep. These are used to sample viewpoints for automatic - collection. - --degrees, -d Use degrees for rotation ranges (default) - --radians, -r Use radians for rotation ranges - --x_axis_rot_viewpoint_range X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...], -xarvr X_AXIS_ROT_VIEWPOINT_RANGE [X_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around x-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --y_axis_rot_viewpoint_range Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...], -yarvr Y_AXIS_ROT_VIEWPOINT_RANGE [Y_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around y-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --z_axis_rot_viewpoint_range Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...], -zarvr Z_AXIS_ROT_VIEWPOINT_RANGE [Z_AXIS_ROT_VIEWPOINT_RANGE ...] - What range of viewpoints around z-axis to sample relative to boards normal - vector. Three value array arg defines the [Start, Stop), step. for the - viewpoint sweep These are used to sample viewpoints for automatic - collection. Assuming that the camera pose is in opencv/ROS format. - --max_num_images MAX_NUM_IMAGES - The maximum number of images - --settle_time SETTLE_TIME, -st SETTLE_TIME - How long to wait after movement to take a picture; don't want motion blur - --save_data, -sd whether to save the images to file - --from_data, -fd Whether to only calibrate from recorded dataset on file. - --save_to_robot, -send - Whether to save the calibration to the robot. - --from_yaml, -yaml Whether the data is from a yaml file. Use this and the '--from_data' and ' - --send' args to send a previously saved calibration yaml to the robot - --ip IP, -i IP, -ip IP - The IP address of the Robot to calibrate - --user USERNAME, -u USERNAME, --username USERNAME - Robot Username - --pass PASSWORD, -pw PASSWORD, --password PASSWORD - Robot Password - --spot_rgb_photo_width SPOT_RGB_PHOTO_WIDTH, -dpw SPOT_RGB_PHOTO_WIDTH - What resolution use for Spot's RGB Hand Camera (width). Currently, only 640 - and 1920 are supported - --spot_rgb_photo_height SPOT_RGB_PHOTO_HEIGHT, -dph SPOT_RGB_PHOTO_HEIGHT - What resolution use for Spot's RGB Hand Camera (width). Currently, only 480 - and 1080 are supported +python3 spot_wrapper/calibration/calibrate_spot_hand.py \ + --show_board_pattern --legacy_charuco_pattern ``` -# Check if you have a Legacy Charuco Board +Press any key to dismiss the window. The rendered board should match your physical board exactly: +- ArUco tags must match between virtual and physical boards +- The displayed axis should have **Y (green) pointing up**, **X (red) pointing right**, and **Z pointing out of the board** +- If the match looks correct with `--legacy_charuco_pattern`, your board is legacy; if it only matches without it, do not pass the flag + +The Spot default calibration board has an ArUco marker in the **top-left corner** — this is a legacy board. -You only need to do this if using an opencv version after ```4.7```( -check with```python3 -c "import cv2; print(cv2.__version__)"```) +--- -Through using the CLI tool (```python3 calibrate_multistereo_cameras_with_charuco_cli.py -h```), you can check if you have a legacy board through visually comparing the generated drawn virtual board to your physical charuco board target. Some legacy boards have an aruco tag in the top -left corner, whether as some non-legacy boards have a checker in the top left corner. -Also, check to see that the aruco tags match between virtual and physical boards. -It is important that the virtual board matches the physical board, otherwise this calibration -will not work. +## All Options + +Print all calibration script options: ``` -python3 calibrate_multistereo_cameras_with_charuco_cli.py --check_board_pattern --legacy_charuco_pattern t +python3 spot_wrapper/calibration/calibrate_spot_hand.py -h ``` -There should be an axis at the center of the board, where the Y axis (green) -points upwards, the X axis (red) points to the right, and the figure should be labelled -as Z-axis out of board. If it isn't then try without legacy (```--legacy_charuco_pattern f```). - -If you are using the default Spot Calibation board, and there is an aruco marker -in the top left corner, then it legacy (so supply true argument to legacy.) +### Calibration target + +| Flag | Short | Default | Description | +|---|---|---|---| +| `--legacy_charuco_pattern` | `-l` | off | Use legacy Charuco pattern (required for Spot default board on OpenCV ≥ 4.7) | +| `--show_board_pattern` | | off | Display the virtual board for visual verification before running | +| `--allow_default_internal_corner_ordering` | | off | Skip bottom-left-to-top-right corner ID enforcement (not recommended) | +| `--num_checkers_width` | `-ncw` | `9` | Number of checkers along the board width | +| `--num_checkers_height` | `-nch` | `4` | Number of checkers along the board height | +| `--dict_size` | | `DICT_4X4_50` | ArUco dictionary (see choices in `calibration_clis.py`) | +| `--checker_dim_meters` | `-cd` | `0.115` | Physical checker square size in metres | +| `--marker_dim_meters` | `-md` | `0.09` | Physical ArUco marker size in metres | + +### Data collection / viewpoint sweep + +| Flag | Short | Default | Description | +|---|---|---|---| +| `--dist_from_board_viewpoint_range` | `-dfbvr` | `0.5 0.6 0.1` | `[start, stop, step]` distances along board normal (metres) | +| `--dist_along_board_width` | `-dabw` | `-0.2 0.3 0.1` | `[start, stop, step]` lateral offsets along board width (metres) | +| `--x_axis_rot_viewpoint_range` | `-xarvr` | `-10 11 10` | `[start, stop, step]` rotations around X axis | +| `--y_axis_rot_viewpoint_range` | `-yarvr` | `-10 11 10` | `[start, stop, step]` rotations around Y axis | +| `--z_axis_rot_viewpoint_range` | `-zarvr` | `-10 11 10` | `[start, stop, step]` rotations around Z axis | +| `--degrees` | `-d` | on | Interpret rotation ranges in degrees (default) | +| `--radians` | `-r` | off | Interpret rotation ranges in radians | +| `--max_num_images` | | `10000` | Cap on total viewpoints visited | +| `--settle_time` | `-st` | `1.0` | Seconds to wait after each move before capturing | +| `--stereo_pairs` | `-sp` | `0,1` | Camera index pairs to calibrate (e.g. `0,1`) | +| `--photo_utilization_ratio` | `-pur` | `1` | Use every Nth collected image for calibration (e.g. `2` = use half) | + +### I/O and workflow + +| Flag | Short | Default | Description | +|---|---|---|---| +| `--data_path` | `-dp` | `None` | Directory for dataset images/poses, or YAML path when `--from_yaml` | +| `--save_data` | `-sd` | off | Save captured images and poses to `--data_path` | +| `--from_data` | `-fd` | off | Skip data collection; calibrate from existing dataset at `--data_path` | +| `--from_yaml` | `-yaml` | off | Used with `--from_data` and `--save_to_robot` to push a saved YAML to the robot | +| `--result_path` | `-rp` | `calibration_result.yaml` | Where to write the calibration YAML | +| `--tag` | `-t` | `default` | Tag name for this run within the YAML file | +| `--unsafe_tag_save` | | off | Skip overwrite confirmation prompts | + +### Robot connection (required when connecting to the robot) + +| Flag | Short | Description | +|---|---|---| +| `--ip` | `-i` / `-ip` | Robot IP address | +| `--user` | `-u` | Robot username | +| `--pass` | `-pw` | Robot password | +| `--spot_rgb_photo_width` | `-dpw` | RGB image width: `640` (default) or `1920` | +| `--spot_rgb_photo_height` | `-dph` | RGB image height: `480` (default) or `1080` | +| `--save_to_robot` | `-save` | Push the computed calibration to the robot's internal storage via Spot SDK | diff --git a/spot_wrapper/calibration/automatic_camera_calibration_robot.py b/spot_wrapper/calibration/automatic_camera_calibration_robot.py deleted file mode 100644 index 2900993a..00000000 --- a/spot_wrapper/calibration/automatic_camera_calibration_robot.py +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright (c) 2025-2026 Robotics and AI Institute LLC dba RAI Institute. All rights reserved. - -# What's below can be used in standalone tool -from abc import ABC, abstractmethod -from typing import List, Optional, Tuple, Union - -import numpy as np - - -class AutomaticCameraCalibrationRobot(ABC): - @abstractmethod - def capture_images(self) -> Union[List, np.ndarray]: - """ - Capture images from the cameras you wish to calibrate at the same moment in time - (time-synchronized). Then, construct either a list or a NumPy array - of these images. This will be called every time that your robot reaches a new - calibration viewpoint, so ensure that image order to camera correlation is consistent - across all calls. Also, it is important to ensure that the photos are as time-synchronized - as possible, with as little motion blur as possible. - - Returns: - Union[List, np.ndarray]: The time-synchronized images from all relevant cameras. - """ - pass - - @abstractmethod - def localize_target_to_principal_camera(self, images: Union[List, np.ndarray]) -> Tuple[np.ndarray, np.ndarray]: - """ - Localize the charuco board relative to what is considered - the principal camera for the calibration. This could be done - either visually, or via kinematics and fixing the board relative - to the robot. Visually is more flexible, although this - implies some knowledge of the principal cameras intrinsics. - - In this case, principal camera could be an actual camera, or could also be a "virtual" - camera. The principal camera's frame is just what frame is used to sample viewpoints in. - - The board pose's translation should be at the center of the board, with the orientation - in OpenCV format, where the +Z points out of the board with - the other axis being parallel to the sides of the board. - - Your camera pose should be in OpenCV/ROS convention, where - # +x should point to the right in the image - # +y should point down in the image - # +z should point into to plane of the image - - If you'd like to do this through visual localization, you can use - - def est_camera_t_charuco_board_center( - img: np.ndarray, - charuco_board: cv2.aruco_CharucoBoard, - aruco_dict: cv2.aruco_Dictionary, - camera_matrix: np.ndarray, - dist_coeffs: np.ndarray, - ) - from calibration_util.py - - Args: - images (Union[List, np.ndarray]): the images that could be used to determine - how far the target is from the "principal" camera - - Returns: - Tuple[np.ndarray, np.ndarray]: _description_ - """ - pass - - @abstractmethod - def move_cameras_to_see_calibration_target(self) -> np.ndarray: - """ - This is a robot specific sequence to start the calibration. This - could be as simple as a heuristic to move the robot to a known pose where - it is generally looking at the checkerboard, or this could be a more sophisticated - sequence to look around in the scene until a board is found - (for example, grabbing images with self.capture_images(), - and checking if a board is found with detect_charuco_corners from calibration_util.py) - - Returns: - np.ndarray: the 4x4 homogenous transform of the starting pose where the calibration - board is visible. - """ - pass - - @abstractmethod - def offset_cameras_from_current_view( - self, - transform_offset: np.ndarray, - origin_t_planning_frame: Optional[np.ndarray] = None, - duration_sec: float = 1.0, - ) -> Tuple[np.ndarray, np.ndarray]: - """ - Move the robot to a desired position, such that the cameras move by transform_offset. - This is how the robot visits desired viewpoints. - - IMPORTANT: You likely have to convert transform_offset from the "principal" - camera frame into the planning frame (if the camera frame isn't the planning frame). - This can be done by calling - - convert_camera_t_viewpoint_to_origin_t_planning_frame( - origin_t_planning_frame=origin_t_planning_frame, - planning_frame_t_opencv_camera=HOMOGENOUS_TRANSFORM_FROM_PLANNING_FRAME_TO_OPENCV_CAM, - opencv_camera_t_viewpoint=transform_offset, - ) - from calibration_util.py - - Your camera pose should be in OpenCV/ROS convention, where - +x should point to the right in the image - +y should point down in the image - +z should point into to plane of the image - - Args: - transform_offset (np.ndarray): the 4x4 homogenous transform of - how far you'd like to shift your cameras - origin_t_planning_frame (Optional[np.ndarray], optional): the 4x4 homogenous - transform of your robot origin to planning frame. Could be hard-coded - or read off of the robot. Defaults to None. - duration_sec (float, optional): How many seconds provided to execute - the move. Defaults to 1.0. - - Returns: - Tuple[np.ndarray, np.ndarray]: the 4x4 homogenous transform of origin_t_planning_frame - before the move, and the 4x4 homogenous transform of origin_t_planning_frame - after the move. - """ - pass - - @abstractmethod - def shutdown(self) -> None: - """ - Cleanup the robot connection, and disconnect so that other programs can resume - control of the robot while this program continues to solve the calibration parameters. - """ - pass diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index 330a9fb3..6d1e8ff3 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -8,12 +8,12 @@ import numpy as np import yaml -from spot_wrapper.calibration.automatic_camera_calibration_robot import AutomaticCameraCalibrationRobot from spot_wrapper.calibration.calibration_clis import ( calibrate_robot_cli, setup_calibration_param, spot_cli, ) +from spot_wrapper.calibration.calibration_helpers import AutomaticCameraCalibrationRobot from spot_wrapper.calibration.calibration_util import ( calibration_helper, get_multiple_perspective_camera_calibration_dataset, diff --git a/spot_wrapper/calibration/calibration_helpers.py b/spot_wrapper/calibration/calibration_helpers.py index 73c14ef5..8c27b426 100644 --- a/spot_wrapper/calibration/calibration_helpers.py +++ b/spot_wrapper/calibration/calibration_helpers.py @@ -2,7 +2,8 @@ from __future__ import annotations -from typing import Optional, TypedDict +from abc import ABC, abstractmethod +from typing import List, Optional, Tuple, TypedDict, Union import numpy as np @@ -26,3 +27,128 @@ class CalibrationResults(TypedDict): R_handeye: Optional[np.ndarray] T_handeye: Optional[np.ndarray] average_reprojection_error: float + + +class AutomaticCameraCalibrationRobot(ABC): + @abstractmethod + def capture_images(self) -> Union[List, np.ndarray]: + """ + Capture images from the cameras you wish to calibrate at the same moment in time + (time-synchronized). Then, construct either a list or a NumPy array + of these images. This will be called every time that your robot reaches a new + calibration viewpoint, so ensure that image order to camera correlation is consistent + across all calls. Also, it is important to ensure that the photos are as time-synchronized + as possible, with as little motion blur as possible. + + Returns: + Union[List, np.ndarray]: The time-synchronized images from all relevant cameras. + """ + pass + + @abstractmethod + def localize_target_to_principal_camera(self, images: Union[List, np.ndarray]) -> Tuple[np.ndarray, np.ndarray]: + """ + Localize the charuco board relative to what is considered + the principal camera for the calibration. This could be done + either visually, or via kinematics and fixing the board relative + to the robot. Visually is more flexible, although this + implies some knowledge of the principal cameras intrinsics. + + In this case, principal camera could be an actual camera, or could also be a "virtual" + camera. The principal camera's frame is just what frame is used to sample viewpoints in. + + The board pose's translation should be at the center of the board, with the orientation + in OpenCV format, where the +Z points out of the board with + the other axis being parallel to the sides of the board. + + Your camera pose should be in OpenCV/ROS convention, where + # +x should point to the right in the image + # +y should point down in the image + # +z should point into to plane of the image + + If you'd like to do this through visual localization, you can use + + def est_camera_t_charuco_board_center( + img: np.ndarray, + charuco_board: cv2.aruco_CharucoBoard, + aruco_dict: cv2.aruco_Dictionary, + camera_matrix: np.ndarray, + dist_coeffs: np.ndarray, + ) + from calibration_util.py + + Args: + images (Union[List, np.ndarray]): the images that could be used to determine + how far the target is from the "principal" camera + + Returns: + Tuple[np.ndarray, np.ndarray]: _description_ + """ + pass + + @abstractmethod + def move_cameras_to_see_calibration_target(self) -> np.ndarray: + """ + This is a robot specific sequence to start the calibration. This + could be as simple as a heuristic to move the robot to a known pose where + it is generally looking at the checkerboard, or this could be a more sophisticated + sequence to look around in the scene until a board is found + (for example, grabbing images with self.capture_images(), + and checking if a board is found with detect_charuco_corners from calibration_util.py) + + Returns: + np.ndarray: the 4x4 homogenous transform of the starting pose where the calibration + board is visible. + """ + pass + + @abstractmethod + def offset_cameras_from_current_view( + self, + transform_offset: np.ndarray, + origin_t_planning_frame: Optional[np.ndarray] = None, + duration_sec: float = 1.0, + ) -> Tuple[np.ndarray, np.ndarray]: + """ + Move the robot to a desired position, such that the cameras move by transform_offset. + This is how the robot visits desired viewpoints. + + IMPORTANT: You likely have to convert transform_offset from the "principal" + camera frame into the planning frame (if the camera frame isn't the planning frame). + This can be done by calling + + convert_camera_t_viewpoint_to_origin_t_planning_frame( + origin_t_planning_frame=origin_t_planning_frame, + planning_frame_t_opencv_camera=HOMOGENOUS_TRANSFORM_FROM_PLANNING_FRAME_TO_OPENCV_CAM, + opencv_camera_t_viewpoint=transform_offset, + ) + from calibration_util.py + + Your camera pose should be in OpenCV/ROS convention, where + +x should point to the right in the image + +y should point down in the image + +z should point into to plane of the image + + Args: + transform_offset (np.ndarray): the 4x4 homogenous transform of + how far you'd like to shift your cameras + origin_t_planning_frame (Optional[np.ndarray], optional): the 4x4 homogenous + transform of your robot origin to planning frame. Could be hard-coded + or read off of the robot. Defaults to None. + duration_sec (float, optional): How many seconds provided to execute + the move. Defaults to 1.0. + + Returns: + Tuple[np.ndarray, np.ndarray]: the 4x4 homogenous transform of origin_t_planning_frame + before the move, and the 4x4 homogenous transform of origin_t_planning_frame + after the move. + """ + pass + + @abstractmethod + def shutdown(self) -> None: + """ + Cleanup the robot connection, and disconnect so that other programs can resume + control of the robot while this program continues to solve the calibration parameters. + """ + pass diff --git a/spot_wrapper/calibration/calibration_util.py b/spot_wrapper/calibration/calibration_util.py index 802fa657..9a6b269d 100644 --- a/spot_wrapper/calibration/calibration_util.py +++ b/spot_wrapper/calibration/calibration_util.py @@ -13,7 +13,7 @@ import numpy as np import yaml -from spot_wrapper.calibration.automatic_camera_calibration_robot import ( +from spot_wrapper.calibration.calibration_helpers import ( AutomaticCameraCalibrationRobot, ) from spot_wrapper.calibration.charuco_board_detection import ( diff --git a/spot_wrapper/calibration/docs/registration_qualitative_example.jpg b/spot_wrapper/calibration/docs/registration_qualitative_example.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d091dc88b56b3c4189613d7abc22837c1da83da5 GIT binary patch literal 47508 zcmb5VWmKD8^Di7+ij^Y8p+K<$MbaWgiYIu`5~QV22<~o0Tbw|kc+n8tT?#E;+}$NO z#Y&K#-sk_Ev+n0TpWgYf*EQE(*Pgld+G}RdZ)X0?{#gZ3fR#W>04yxP|InWWfII*P z`ycve#Q96O__+TNAwE7HJ`o`?F%cmV5itol3GoB62Sh}q4@t?$DJULN5R*_oqNI5A z_nG3KMX>()5(k&??}ro*h#vfX`~Q>vd;>fr#QK4Cj)V0Cfc+2)=ONag9{^^+Urhq@cg>#=bt_mqeasD1(zUSk2uVWJ z)c#$b@vmSkynjmv;QkeWM}SZGclPx|05%p5HZBf64jw)(!9P3tkMxIlZ1|KyuODeA z{3H8gj&SD~HH}==8Uecq1b!l4J$}j|3e+)o$^A1AAi=@<`?EL?0Wts#(kNR;q98{? zs=&fFHgAu{E?KO+`g;u%td}x{$xt5l9R_LA#%aj3M{LKT@hfeZKnKUdWMAsckQ!E6 zvz&)jvc~;GqEnp(jr;1Hyo>o9JbwAH;_E@^wx^fTyDP@|#(8cvMlSj8>i<%|{ zuGs5tuLiBX2iy6(fm(+&Q7g4QrBlQ&u4>0C=HGa{Nhr7c^<^(|L+B02`dL`tDV8jZ zfYmBpBMsWT_(MAB`_zl}0(PkUUd+;PE-2ynK^*7Q!q#sHdnur^A#oL7&Z$cuTZ5<{ zXX(m!sJ_<@;ngzqfr{{1xf%*W#Q+P}f)_N_@nP`*C_#sY41zl68eb=$p>2WJO*7&z z^nnlfpAUu!K(kDHvdQdc<>KD8c!>9iSEOSkMaImCbn)(R z$@$iO4Qz<;4?tT6^$R-ok}Jt!f++<^)^FJMWQp_Q=qiPQ^w_>2Br!BdH)%%wlB@A| zrrv&A*6?zDNa3M@)T^P`sS=hiP071^4JaF>ey~m%w#I5$<_-8IR)VaDzJF<3*2*5s zrS6Ju8JA9aa2e=x-RL+HCxl9c`$Tp1bS>;?eQoze$Zq)@8CaN zE#W1j)Fmt@K$c}%w}Ym2*sV?T7gDF%L}s*(JxZcz=R!@Q_?~Y_J$?5K1XuC{D#Ged zDnpO;od`yG+Q&0#8sq{tUK&>}8jjhWF?ws85t1zF)nk903t2A#yidBXpGUu2+YZ}g z^q;>@nDob+_N`E_^GLZbvYktSjx%v8Hm{I9gRfgCdp~tC=ZHc1x8?olAyxtDiHd9eQ% zI1cl)7VZBQJt_V$uc?Gh0%a5mQ@WewxiyIdw~Z-QdWZpbJg~iRgPl#u_oXxT1Ty!N zR7}Lir9i>U5tH;9q`Br6W=Fx6F}#+-EB2EWi|KD2AG# z1kx=wX^*rTYC5jcXhNgy{CpEO_4$M3GVia6s$n8`7?))FDVeGl&TbbAINYg}d4*Ji zh6sn4?&5JpW+r>Bd)7DFlu|152AR&fWdA=*(g|x5I`YS ze`V%ly`0~Oep>O!!sHny96u%Fhp!s4^+EKs{Mcv#TXlT!9G=F`cV$Z+uE-I$GgR}- zVjpiKq=E^xQ+=*mi%XW!qSfsakpauiLnlr{)lU%DXy%P4tyTf(Y(ooom$Ra>`Ip+c z{3GS`#F)+SH0bMnV_(Qap^Me%>If{=)INfcwDD%4vEh6}V12|5OFsRT`rd3NsD0@I zKR;#trAHC#_Vk5gxem0A2WE~x z!HEtHrP=iI+Fh!{oUBesVMvaRV}P*j&QiqJ`5N$zu-jbJh+-XZMcW37$(m73b1>j} z&R{ub{v@*j$dkv`Rw5@44S_8pN-6Q5-%0M88I-}BiBNT#>h5WEQsKQN=d+Fd^25A? zlFN&Fh&e-dkb?x}dJ^qvo&y#ikp=+c&=9QTQE_KHT|th*$OUSZOYz4^O`z;wG|r?t z{j|c?CcUCr&>$f8;N!D@Z>`Nvz-ln8k$!G4?<1q;{!^XbZl>ASpby=A=;>kE=|L<= z`(237D1jecK_~J+PF5{B-M%^k4p#y&&CH1)iznL0jCu`WS<$v8czFU6PloW9mkaEF z@I&VOyD$(&_ilEEh%S|WV#CL#D$Jv&_#z?K`l-b}-4l1zZp{lolS7Xji>g4wBg!p# zZ;Fj?r@GS@poKNZW%w)B+@1Fgxb!C#dd<*7<$Rx(;)+axN;8LMPo%+Bb4&rdsIieC zygXJxq2%px{I^Hb6q1fAy$O|jFRMrNm4Z6l4$DM}E-2G?Up-YF@SmYiEe=EXkK_;p zP_t5(y?(VNsmy8~bak1v*Ap>CKYloI!@1<*n5^o?P`Z1+w}+!$i1&DZIX|?-Nv@p& z94?Ms)?6`dLOy2uKqA|;!_9p5rbi|Udq`kCpZf)p$mmJ^dVEk4H=+nP`fZk?m*ATl zakC2x=3<*np>Zjgovw5;EZmLJ{bmv5e3USLv^^f$sEZn+2Q&sz#WH<%o{2$}%1J+iU6VM=O)jI*T8 z5m?+hVE8x=ik0OvLxkNG^=K3taLfC6qao7^-%1VmlljZdOQ=u5c&2-CEq?l#YQvsC z5Au8a9rMM};Zu=n_WFHWXF{w=5|)oPxR~k2_9XXQflTy(RGx(8Wb9Y;Cwk#y(kSZS z_06cT!e5(Lj18$_$ao}^8s#8Qyl#GdE#vB~1$ltto_<2Yq_@;l#UU$Qg&JU>&%@YN zqTr1IOERx?55~qY(i~{Gu+no|7y3@nAMk?UMf zUXa8YVMFFM{jyPqvT@mB7bvu>?fky%?tLK)HqT{>?xWqO?&nb}dX#ob=V}uScixnP zBd`fsgI@QQda5_ePqfF+TfC8QE_gThL@nNOfCN$2FG%rdU1B3rCzj6|hgbU>fDF$; zoVoTZ2UIo)i=9AhggRtA?Gb3#Ya$0NNd<+{@+GCW)+*x?6snkBW$A138Z`kcb%nW! zUUA%>o;@v`eXax=8UCxdT6JP{;z>rVuTH0i>&S$2@0K2qtO^vw9Qd&t=dE}4FtPxP zE8n9EB;jV!2Lxu~ zz=jW8|JfGM8VBRq^<@za+~43fO zeYL+E%kb#z7F`7dauZvIGH>j<_7#O|0UjTo{ap>5#L4xf@c(g7U@U!4)eOm3Tt^0U zMCPJy$L`(Oa>^Cd-uh=b3XMuGjJv!35s7s@)4~XHIc^fs82{W{=`Ry?{>dVPvd;W6 zQiY2=?5u~7kDIRZ`O4a&I3cb+JDS{n<V zhOeYNW(jjr)%9%)UYtnShOQZH7qu{<)!*4_1n_^wX<@uP!$yu*oxUyH3Rbhvm=V>i zB`9jfCCPL~e!E_r{R2otkrle3!Q2|{6E<2$c+=uC$1~se!=b!qMIlLk z;>Yn~Kxc=_6Zvo0NT$GJABZXf$K95oF_c&40H}DuFP@#BBxn)1( zuxig!gF&0joj{%AexQ7;ljX1VfuO5)ZsaCd{s2;9{s5%>fC$ui_Z~F)@HHM10Ez!x zrbGw{I>_tM+|Yl?kFO9|`wTBCRKs;GhtnE^yr(XYQR)7*=o@$7Gk;4hQOh)(qGu2c zUGp8>VeT-ESgLPMpGa9$Rog#&u|74&)1u6AkL8v)8G}U;+CM|@o%@vvC*=jDiqxlp z7K5EbHX%m2arngNO)6Gw!gC;6x{a5*YcS_1SCVVlt@VkDJx3AJ1vD&PG#@rnsU%{{ zlh6naU3qB6$D8Q3FyhF*|I^<+kE8DR;e~&~bX8_w)_4$$R&lbjmK%o^jxez7Ba)Dq z(4;@9gkS{Eq-1S5bZ@!|9lZ%2!-W0-e|`|Y4|5U8y9DbkoK)yr$99|tmZ8ItJ{<*z z9#LIHYM$A0S`(4q`e9=+kz9RAw>1#%!5`69f?Y#2O$EAR<$Fdif3Dy8#pMp{Jq?iy zHYh74;_bm|dM=W+Qf9zqh*r3ooc`)BbWwpZVW3KN&vKJhDIHwgOwV;L{#`7H_FVw_+Zx#A24&At~I8A=p*6s#c z?@xtw4d>M;W~2ZtZ=&(jBAlRsm30MpWFHp^D7?>5iFw?;d?+*gH?ubyF}8V&vPM9Y zFZnV3Ph1UIuj8WXRNR*%RTkaDqDeOX0LDpT1E(vp?j$bLCTM5JO6$d^qdsa(exZ%S zUvc8W$t>czvPyatOa=EBf!d+kfKx9Op{by{#Q~iA-J)Vk-$2JsXV!#lQ3UbCqUlUS z7N;iP>d*5s)YL*c$(4KcpNHqyoUd8LXh@ zeSg09KDGuWw_@0f11#_+)su9~x(rURQDr>Q^)zRD?fGZeVpbvAo#e2uStJ_ce{_-O z`%W%0?!!^Ongibx`Fe9i(^|{`){nQ37UK|pU+NY%7q_Y$gSI)_*m$D}H+pb_0`h2x`s(`M`nT5mn7ni>~ zw`9JjDXDwAPMK1WU{rzrPEiLH{TBRM;159l@R!8aSc9>ux;qraZzD!T>p>**y1Bg2 z-Lv5GmN!tV_WUhKMbMZ*mcx@V`&r~~t`{@Tb)b_E%EsQLSzBW4qdV7;$1|!KQ7bk2 zQ3>K({Q*)vTTH!7Zy>R{a3Ze6ZskdV94kfr!pX$vyNrBN2D;(t&Rpt)_S4tbBKxJ% z$n&E$mK9c8=(9qZeZ%C4hxgSQyvxc4K4zq6lxA9P`{ZP*4vP$Oe*jNh&8S3`4S`o` z4%iyeQR81Mv}xwv6pXrKeTTX`+p6clm4F)YAJh9q}kPmm&Cc{pi`YEFD=) zYtu;vM?By-(G$K`NYmNQRny4hn z{=>L!8-Da8Lcr!m=Ho@Tlo2{Hx>(id{#&6|ZkU(=X%eSt=nmk{MrOW3y;VO|V!L-; zI`fcgMLXruURD6xc||Y=`o`9T7gIRz&QVnCFWWrqS$!RCMGhe`$D#3auEj`Lw6ff5y*4kc9_AX*iqi^}B=&(_Iw!tD}DrKA84!+qo=Y z4U|CH?=gRBS%lWkU)bjw_XccwSmtQ(?Z&&B@=y4CjhiTiukzHIKC$buk96)=&vK!C z2qUj2DE5q;$H+6Ns_%VBZig*2=huHgNiu@Cq;W#%wD36I$2$e+A;T^#pte}!v*!2TH4D*TpD^Cg5dLJDv-GE zr(};hIT)>`#p715mO1tmgX~cgQ=pWAB{AONEu?G?nq{ceQfgOxoQcx9N=1H)YGyTN`)DFbyJ5 zdX(F(C~=U+3LYaPrQ}peH_z~RkaMsQ+`G6!RTf-Nb@Xb}b)>+sjWa)3_a!%y0Ek%Z zDee+XcAe~5WCL&ZcQf}OX0 zID=OXFCSqq8aq(8WCU&Msnw99lwk?U+ZvJ2u{M@3*{2;vWD-h#DqK97_Sk37gr!S1 zPm@ktc+S0rK48FE4V5r~MEt0jBSD`De*G8q(7|U$pf{+_Y3pT>U%5i5EImcamW1p zgNSz32seXq*>A5OGgNDAX3)h8T>+J#6m`b_%JPP8db4d-$xzzSYd=F9ru zs`ILX0^QLlpiV(cZ-LADxcCW!ot%+IqLNM4Ga`1PF%Q8@LuzAZ~l_zpFFqO(iFVQgNzk z$bk;imP9*@Bt91Ay|h;BJ+xI-RSjNog=AW?dd#}e%pDKtN+W_3Y@ngxmrj{~0GgTa zr%3I&QInjq8F(Ci9qx8c#r$Kjy53j)m$VpZcZfSuE_;%&L6V@S9sK%Tzb%|1zo7NN z=$=1ym=e@$H&a;B&ocH2aA~7O+(#ej#dw=uVXP}{YLY;yN&-pBdHsK4IqejlPuhzq zoRgx1JbcdGMV#C!bi5oV007*YcoL;mxlXyR!Y0oek`bBqFZ4@`7pG0BsyTSPWvN`T zuBrTo&h9*mwV!=PNW!%iBXWV_58%4mohmwdZYm`1E|+i9hf4e}WCr5m0>o+9?!BzT zkMUH!5xxM z$VHZ`38Ncn;AYR$!IYnVWM}6Zo)G>eU!rNei3U zMNlGQofAfY6@7KQ;;N&@MvMfW-#bf7W+*CEcsLlHTy^izyh_Ak>>W?a5MINA3}4vv zJ$~tN_oD=2Jgy(To+;LK`)iq-UJ{~K#j3*cw#l7)3*5x#dzHTL+A&BqZ>~qZM6TFK zB&Nt~h(0g%MzS-MB>#+|zVvv@&FwklHk@q8ZxjXUz{ey{(F^G+5mQIhmr|By%O}JY zSOXu}8bWrwRrz?_B*@A3F~d#18uM|_O86)9Q-phan{ut8584!b&L{P=RT>l(3gHuq zmNo*o@TwJS4ZU6&()Z=lQRS(g!s)PWD&2gE#HN(`45p?{mW0mo0;K)-JWsx*V+EV* zhmPE;;0U7GwdsPY#ElnqK5=Y4{_hB%dmo+{G!Fhi&VuAq%zHWsWo!&? zy+NMu)4W>~5YmY~#QLQ~C9>jar*ANqhoreL+i!_Y&MsSn)s@d8*#zE#6NFW?h^&Zv zyto=(Ilqa`wN^J7^VdI$oYLN?|CLVueB|?nLef~@;yB`z!>aEWsTx9&&ZW2H)qzVv z%4oma&0I)Po3V3`8(ve>2gv~TqvoC9?*T!LIWcp;=*peTef&`vdsHEs31;iDukn4-dBG&kSRB@`7Q2g%PsXlHjkY1(qpd2MGc7u%^DDC z1n{*O;IAJK3jMF^JTC-mSdr#y0^Anb>Q1XX`$62XWjQ^DEf+f-LY)y$h=Y@{XyW$OJ%OSRUmWUjQ`IGWNFP57RSRO$HY8|{J+QbYp5r>ddcDkW;$S4*&nEVb ztP5uN2k@cV#Zo!3q(Ytwd8A2nK4-Yk;R#X~cC3qRuz<|9F>qlpaFwvE{&Zk!x(s()6Z62{&}Z zrmr}X>*vmn(X35M?aMHOJ;Rboe@(`vjy(B7;j7}_av&pixf~O}bi#E${g`0(ScIR_ z1$wze!HdG_bMhAZRgO~I-8$dn-Yp#hq5dKDf*+Uy{we)?OFwTn7>nSs*;xNfBeBef z#q*`^4%Sp#7#M@cbv>Q=GIx1g>bb@22lVdIi~c)Gb`y5~w~4d)MT`d14at4%0F3jZ z-~NuHp%1izb`m=I^uiABhi-RKImF}?f?&J^gmjk2biNc>Pdm#A)FI{*16ja zu9e)W9{x2@$jJAe_b%04e_S*vk4QAjFPe4+;ja4GWD#^WQju^G({F%QAuVJiq0~h&N<_nOl@J9ZEwnSeQSYf<4+t^}$itF7arVt?)PUwUTd*64u ztzml*n}a4-1xn-RqK~p0-%^Dy2r35^+()tGz^c4%jQbDx8}*bMs>LcAZ}I1)uK8YSl24a0ze!ZE)F18Ah|^tf zSI$@q^H)gXUDm()h$v8a>(`0V=M_MCZ&CHdE_{~L*sK`bN^1(gjjB@RZQy(umE(XQ z|7^`PPVV>6wD{HU?WozV0?yhL`905b6Nw1_)am-ECN7BabN_%XA581e9gFK>19O;U z4*ff}Fa3~{;t^)&q6i113vsg5gTCqOV)gfZ10?>|qW3ftEj4i+mr~8!ZmrK-nswkC zR9+_3=^K{zDk0@5uxl3XCRaoEk$!LZW+yzT9t|MKp3DNB0gaB`W?qsCxzVSq^)%!N zD3MaBJXnUGENYJ8W(00pz^TG0Aii0)Urx8Yizm@mOFhn~oD20|$IrOH;CM)R?_ise z=7a}BNdVKppkq?VlAw}Y=Pe=7FD#lR0%*X)QnHeDvBqYk?X8Zf-qE+s6JOey+s{q8 zTd_TFP;YAymtGCczLYJaJUie!nDgc}auY7w=MoPCPJhFYB$%8tW?j5cLFu|Pq-j1! zab8$bzd=r(-ko7YF;dT-&DFgqtbYU-#xN7fps~8WuZo%bo1ytB28S<(8wb>eK0EK0 z8+qb7&}m$7zO`bUnV8A=#_^tg#}sr~feue>w>3!L@|r($k= z3^rjR(6GS&;#@8|#l%py-{>>@IwX#lYjQ@-v>TAwDL?om&o%?(TwtSj$Q?ikDnNuV zbsw5Q0w&xAya!8bCV!)+k3UGGU}XxQ=~eSFaf=VwrXI)Z)>gOm25G|sln0tVlq>67 z^@VP?H`p|^4_PZ)dJ|eOMd;Nf^9#3(^AKt7}9AFOnF^3n~dC{r+!d<4J$vFqi5N6m~YmId{BJ7{rjSi z`@~=qZ|g3c&em*aAm|h7A9>sR`kl=*aXw8eJ}IsWxBeN=XXi=JsRtnJMrO(dHdHms+~An*+HJ2Atb*Cs)#cU1SBJ@Vgx8pYo*+MEN;59Hr5 zOEeEYiz_YbZv4gxX2k|we4|d`AiE%mnH|y&$xjaF@m%>{9VzewtE-N zQ!+(5#z_(8WtGcLU*k`3jxD0S4GMJSbfOazV-=mtD^gYbuamFLiP(4u1;6syG|^GF zuO0EE5Sx`uss2!ZPqbEJRthtUt+GA9STxyKG6;lrbcf`!sWc_Y#XaT;pRS47C0Nrs z6UX(pIaax#qfh<=kkl+(-lL}VE%IksN3aATT!+Ci-aGz1{rEhSwAq3F4&foOEwx^; zeATa4NwSv=Fpj3MaQDMxwC!XHmH(HG%FXJ*&pN_dYk-J{E3DDc9u+#UuB%o9NZTE^ z%T^OGE9N-FvHXS{A69=31RpMrJT@Hh|K*d=&3L9e_~i`v#vZ#gxGdj+fNop(vP z*^yK34-rPbB(j^Z9Zdk2Nqh!NG1J)@6LVNK43r8SG(6=iMX@|U@fY2G>MVulwvvxh5I4x2YHHjOO{HG-t)$@dzDGIpL&7* zW&AzkUEBVtYYo(lF$Fx2S6yDU7aL{xj7-+5w?^quXExu-_UMhb&$)eNo2cU#AkZ>@& zvHuuy!P%)yAX8=9QXvwkd{;jBVb?F+J;W<=3S;)I8a$et{3|hA$TYR$y+&>c9&qt& zLPHo92|;*U{|6AB!Gy*=lCydG2jDOvB$NK2EVvTWU3rw)_Cm=Mx*6?nq=#ic(fi5- zr!e!bMS(NG?nyoU6vnWFw$KN(xZ@wWTzdDu0jl^fAM{mLs#~d4(DMs@!2I3v?(YvA z7Dw(~L_`|0!+2Pq_5e7bpYrz_m4GCTG-o!l++2fBUn(}|M{$4X3lf?xb8QCK& z7A8F+4fn&MH&V=*w7fOFtt)l~7kfT!ZwyC<%chC_dGG8q5ZK=LKsI^9u8G9+a!oxk zQ1`3V5seMIFL!(FX~EZ`gB^m=jo&jR<@Dfwuh-@lO1e}|`P_cEAuG(vJ6nda znQotlL}r*oO87m+@#(iwf{NbIaq^bNc&(gUa@4Vjnda0Eq?B4Y%batomdaw?SBSl8 z1fdSeJBr5WP92aw|F6skZ;Qu}wNdcYR4Q%1<1CQBUaIOH^TA+g@H%17K_;>zrI5UW z;SMZ2@}vlZlRgl~y0<@GAQnN^=L^;jA_y;y%N7?eNvfq((cIEKdazJ(Os6lSNG`L- zTW-|V1n%HnTU1D&+?V-WTet{&@?n0XPIEOep+_2_2e2n#c7K7zv0dBx`9L|-*a7;R zITDCHpSbO8V6EZWt!TgbD6+5U70vG+VX(2PqKY=IL3Z=eugwL^o{vE1a-!EdHdf3} z511N9nC@)Hwt^vEy#>w=)o$$@PG647YdV^Kh=w+Hz7}JT`Oea+PoHtQ{{zYVp@kun zOD7YdsQ+MDIdt9n&RG(&VB!8qdyi;YbdvK~Yw*0$-$e3pbS&Sr6cF8>dS5Hw0UkSD zXo`64Bm0^V^7-cOG9v+mQa8hw$DtXw zCSl-GZ{1TPXMPT`EZO3)oCXeEDkRs4bVbF_Aq_BmGfn~O=RH71UiSR#6VwK<`7o_w zjrIDWCMmN*i2l%#X)$}Gt-2=Co_?MtEz`|hYIuu$`Mr|cp>Ak?Qc<2%pV&s5N$sE} z$4ky!=M#k01I_gS(^i9wj{L%&dVUOxUS&c3=9}@H6etHj100cGAYX+C+`&1tnx;BG z`VjO|Kff3(&%b0sd{zI2Yu(XlbbT^jrz0yq4%YM;dDx#! zC#QiE;$)QxhT8;9FP9^8z@ihw5A#tkAsT9sq|&Y6|AY%HMGx0!%SrDSl@qUX^x{jP zX>zM7uUWhc;x5&(O3%Fm1VL^(*>y(p*}WZj8QfnJaVoM+KKi(09PqNXLD{&G!LH2L@uP_Ni2!b| z_D(|&;I<^qxsDid1pDng+|x52x&s<=Q#O;AC7r`BoR{J=soD3J^sy7SYJ(zKe^E1o z8T)RDN*7%7iq;fj{q{SD1+pfhG}ohewMyn_*@&erwCY7hfVdL#9RoPOr*v@}m8P)$ z`JTbLe4l=r%kaq!PFEuH+yCr7XTWD;p3^ws!kGW;)h>vQ@p7YHIny(!>nuJvAh^__ z{!5BsEObYCc@RBYo3N;71Di@>KqAgLp(ex2Hn7!GM$>R;`SACI{G9>Hq+gcFle_vC z$n{RCZ+!>0O}nW6Bn8+_I}v+p!vE40W@K29WZ zznQPhNJqatWuiqrkV66hFo1!3q^mZ-m(!t zhvRF_zK zsc!gg!PML%J9wQWpYn~x+fxyQrYy? z9nCYdpMIzZ-HN3B{#vvLCIjKFV)arxP}UD5jVCQH%faO|XCgz)Zlr2`$4WTQ6eJKi z{97d1-t0@c(lJ94VO*E+ugcrJTc}fgIMvS9ju~+olgi%H3}@m>+mdtA4$p5{t2n#D zvbQDWZXFAOrOhu+XiI^LBieJ5aiq@6M)E`1TokP?$~mjswJEP?eQgOOM09!`qPq~S zn|TxKBbD~WB44o1T^F9_7VV1@5@}r<42pG(zn^;iQk?lkQnfLEvAbfZpsyzW*-01= z7oOpfvk8mZ9{^Y*LrV@QsD4cG`KVEN^jCl%AAU|7*`3bMT_34|X3?6XNvYd(A9m!q zGt{w3lHyUK_5lH|LGt;?`DZt5Owv<_^6U?g#_lf4Z!CITPnz3PFC)+b6+{xACgk#?AoL*-UTtemc@&E3G=)R|P2>HwZ1)OP=>_AqEqI?7hk zbP=T=!J;GstELe;TW{Cv4a;&3Eou0^W%4M*VYAFiQ6M_oRKV>$@R#(LK&3%xf|`~X zkM4U|FS2XNr@FQ6j;ZeiLx_24MSJ~4X1BpKr#ytnRZ#>g@E0jEHTnwH~F_L@ljTcwf2pK!+iFDFu|{~`C8L(77WuEY-FkGyYl8N%Roc% z-vrM8P5~7;FxL)S6Hbe0znKcG8K`}bG|PNG=eYc?k>V^+6i!CI_C`XBIbeSav7GEH zvp=LxdFIyAM2zDC|B1#eP;7vBYS^Yvc)>etQOQ<)OMmRz5`W_#do{sT=T}d zc^X=AN=Dz)UF-Py>NF7~$cwx_9py6+b*tfB4hCHiEZwsg{?(p8h;pIkmD~H9a-6iu z%!ikj!VGV?u&_YinCm{&HpKg{i4Th;ZJ?pVmoMrjc0$d@XeG+|h%zy~cdYlL(;PXE zcHTI0YXqP>2HTzzY3Rr7)|{#uBY{I<^inXAq)b#E{ldVCYk!=?fqOq)9m_3 z9z?0#{J5g5kKawj7DuoU|KbJpNn5;+T&ZoS@2I&f;m;+|v$Jnpdu-!!H)oOkOpE|{ z>Qz>$S9h71_3fE?n0nbeUic^n^FC7vUh?~ zo!>aaBa6L@HGbr6QYy$F*PoGiy^c16-CO5G57PS`=f~GSfCz#G9RVR6a*{+HPMCou z`Ea`~ED%k%-<|SZ{QzCE5BsJaAQ)6fo|dL{dpYBT_B@XPmQ|1%9y9Sn9gBjHWiz5; zNq3M+^5Y6g@9z?s#qQ!ds{IJ90=;!|*CjyW3bp!=f;WA@?hEodL6vj4w|r0=5p$P~ z_(8{egt6!1!xn31WL%~lz3cp=ym=Hmre%s>L>$~nC=x&a2f#^TJ2F|{?6i$|8TX17 zJY2xz;7x#0tZk$p1ldrD26mMl%R_28NOPk%%4eTv1 zVa^oxIMA6dKXk}7nXsw^s3nq~OC&>$l)DPBl|W22S$r8ckAJ_PS60k^$P39rT&eu6 z7E*GpbGxT1(RN+9`;{*hZ$u_c>T!F&^pZmGUoFgxXU^NnliL37WMIa1X|=@6!37;mg` zH#e{IF7&%-1X!d^4Q;;nJ1eKLC9b@16y;X}M867LR(k{^q8Ig;#)AMaLufuiVB@gY$dv z{2WCEKrR_kZ?dzIVuacSGt>67h2chpS^MmXUR7CIX~y@G%2;piU#0!nZE@Ux zWjuU*kp2b_=OlaYw&UK!|760+k;3jYcljvb4**yje-QMX#>Kgp*^IZZ&wi`d{o&Hd z5`>DPT9~rb$$2MaOCQAZ?0LEgd1#ua=8Yn3hDvlfb59MgCfI+RWb>d8*OE=_z@M-3 z{HCORv3~#ii;X)^kLR6lT3|EFvVhA)BZ7wE_Dr;i;AD%*{HqvH#hAyZQFF`i zyF30;9oRd5J9i`Fn5g0eX0*ch%)s{MJj~*I6}8bN(_xKswPs-8&6R4Y)e}6miE5~W1Wg8yLXwM< z2B3mQ3B9AGq5~T>%2c%oEqWlDf#m5M>Fjkjv#Jo<>8~G%u}wXKFTJxif6Wl0@$jU? zF^e3CM8^nCLjBiY4ZSWmYU~D6u~?Ge(k5vKRWu6)WA*sOp-|I|;*Nz#deWm`KV4c- z+(nn-R+Z|xXO_EBV}i-4(`eqd0Vkk<#M~PY7@!6ICblu&#`aecR#zQ;Ooll7fRdx& zlfN!k>p$H>Dm$zs8(h><;10o}ucD@*a37ick4NcHp4(~}gd<`EgZ~;$mfCqpg8b!d z-i-Gj42*^{(OGzdmoNIMm{HrN7=k*Hw7!orwNTF%YDW7KY*?2NC_b;y*pwlLiE=LB0RkAxt!nq?*sjC-;3=u z*m-ZaBoh=6LwxfC^wc}^xN2fZELLO*9@*#Nr^M&W-?XCoRvZ#*9UBRwE|&M{$Yl}> zACT2yMq5U%C*=(USBFGXdST2P2=5rsjCd*U+!|Dr&C=4Ux?kUZcW+wt{8lu@k(B&c z4DO>96<%mLxTAz|e}d!in>e{e*HY$aq564nV#u$ZL8jrC3g@bMjRZGO$1+@G&T%zl zj`qF&?%v^i*~wb;ySEN48MX#)0ezF(Z^rK^(Ld4yLsMv)^c#mtU(t{);@=+;IBAYv zmUSq(&8WP2^U?c$M6|5%_W+U3^o_1Z@_g*;`C4{%p5u^hC3S~cf<~%uB}wM>Akw~Z z#5-Z*Z7z9psX7Q<{2u(Ka}&7)r&Q4fZN1SV>hj*#Y^({TNJs3VKDKsb*pUft;5K=|ga+knhtjv=m1CWS`{q+2MyD~^-uIZZ4Z z4o*%pGQSvi(^Pj=u)B_0&!mr$&Ex_Kg zdC$%k_A>d>?SeSo2BB8eKEl9IAeRRou>FJ;8S}qOrzyUj7u$GqvvPk-j4UyluC7`Z ze9m-3y#2|5#>jOs6N1m+l@Lb#wulRmANmWT$+|kqK(9;NcTdfz&!Y0JbwTv79Y3e> zrNkDqFav_K@@s?%&N{>sUdodCv_=&BzlyT+oi-e=!Zab0bL_Wkx7K8Vqj(Nz{)pR2 z73D3?vD=cI8Hvk41|MxJ4e9+vooh8e1I2%VIkiplF2;01v&jcm%rI;Dp$%U!4=ca; z2J33{b>u{a2h)eUY)zxLD~!6k1=#+h;;RUIOBKkVyyU>ZXQCUF=@f-*x1&4Xr{&j& zk+ez<#s#7|QpxX!-}L{K+$O?fbc zO)JJS&9l!Ce&qa(cHNML$ibbwS@S~@dV$~%5#!ZtybR4QE31U%-w8=LIG(QuC(i<$ z;9sefQ|T%sUSGc5n~s_^O1qdCE*1V>lhgfS=IPEu!B<`BZYcGW=gHsS@vpt&=7wWY zTWoRXkH65beO8Vh>?v6r;;+egnJwL0tW_<$5%LQa`RUmB1(zUFtVzLYepk$x) zRIzKt$IQY42`I%a@j7`j6RX{1Y18tG0E_pG(oTJQVc$J!s~J_h)}^IZ28=lQ#g!*qv; z*>PA8+3Dup$;6+v;Z<_5CWq4B?EPiS#m>>d#}agoQ45|wTVs)e(a#j}ojI8gzlgiM z#(b}9A=+*(yV6f5em8va>Q1O=YPR92ai^WM&0SGz-Or_mdt^C#(8%a=@4wv;kF+Ab z9vSAYAK&JwcLf$&>r#3j(@Z!GEqApNLcO=;M{At2C1*_Xzr6SswGl;r=MiCptk3fU z`typ90M?&YX%P=SYAzXeMJl8>EJ1|Lt@3^(03q%FTX0lR@De6mAYugV(Esahx90jg z)6yrO$hYU^eDK_Oe`xog5t}g4llZO1sNnHlp-tWY=C{QoEI-f4pUD8yyXhOPFS&SL zUY~H_mMju!>3GUO07(i20k8{6%<_nVWY1RLq*KEmEF4sS*hs+;w%_oMc zcKK1$U_;<4NV^YqNvf-Z13ePZfAb~hhIx;-|8!I{xcNE%U_xo@P$`!~Q2F3Oz+RO7 zGi9}JN#Q*HnGbOkc{c)%B?F0qweU@DPyRJvHbWnn{d!2{edh`I@PbgiiG)V`~UzRga%%hIc}{h)px=IJWf zlD+l#{s-Ct^*+aMwT#AHTDhnZJ@AS>o~*=5EBm~fx#N3vliPoQNuyieH>%oG=e;yX zkmR-ZC$y`=`OyHz)RBikI<1}nwS_uU*{Ltcolo;zr1uwI$N7z&cErR7RRkD*PG?>R z-O&TSYfQ3Mjuy|`)+TEo93WC~j4GAijl2#+)iVkWA^*^}I{6sr>G#AQOGJjN4k* zhRWHmNJy{H{Z?F;T1il?mE4imqEON%w63J)0mCGROw599gIMVk_hf9*GgzGkxqGpj zDU#jpSQ9Z3)9p=)VJb*zr)s_fk0K$S-#fSAg9CLF>FPFsq9FYq;eV2!nu#jkOk~dJ+&UQdH z?FP4%z`^YCda1`)sLr;Z=B5XnH~7Crd2Ck7T9~Y)`%~p94K(cm@fotB3Eo1>U}JCd zL~oD-h1cuIE$iVw_;7$Vu`lbaVYXP>pvj_vDwt_zw|YKeAwD~RpA*UAL*NGNqk^yX zEBMu`f;sZ!t8}O*E&l;dD)fQMMc+k@Lr@+i1FeRRmv9|3-QSl|p*GB8OD{vNrgANi z#iI#C7ZT0R8an1aoIK9d`-4mxf?%yh{S)Ttg^G}|4#8%VLbcp7!$xUtcVFSUn*MSP z`IAsf%G3rG98>EK@9FyVc#2<%(<44^io#FI+Zuw?-As)Rnwd_lT~vMidriR%27Qc) zQZiD^cSw)MwE4hX4ZXvEdNMW*Nw={M#nC5jpwkd!_O)$!{(S-@kS&f#-=u^ z3@MKLzKm7Fzv%Z?{)JAr1H?wJ|f>UW-03Db4Q`+ zCx4`#hU_1|B}t(di~m5vkb8XHzZ~+62#^;PKLeH`o)+fT7S7iykanM>h1&e78?;hm z>!vmI;7CA2zzYlPQz0S^xZ+z)j<<|6MLBls()pe+D0XV^8S?8PKWP3jrg!Y852wk1 z)}S0CT-oES$6OxH+3O0?s&}K~F@ME+gIGyP`8iFUmaWvxU;eSy$QGy#1>IpE8c7$)gY=xT&+Ckab}5JNSCKw4 zc^TdEbk|;|Y)2>h@c6JNTU79ZWPldU_~cJ7eSF9NZdL=~+ZAIc19m7`nh$)a(gn5m zhF$P#V%3{SeUzNT{6ev`$nHbX^t=pBXOAS)bgR7L$LFrc5gBkIh#z`&MEr($oqlxg z>ubZ-0mMZ^vAUVIos|M|_=CMO@_p@`JkO`I=pRTiR)3db&*=yaD)W44bbrWFQ)N~4 za4Gn=w4&_?moHrzdnX9CEd?M@ zO=eA!Kk}*_9~vVA^`zEo$tRvLIY*1GrG@qs8g-?mz$^U$sq;-$0nTX%iMf*pCS9@J zGT2GvFf^}qr%kJ2ehJQ2L&LW70zyD+vd@_8-^uQOkUA>X6PR6+)p#%^O@%yynsC_e zI?;3}&mNtc+QxqK5U@?h=eT35rcMSAzF)BxA)dMFLxEgelMK4wvHW2k$z1|^DVxaV z3FV{0FLRlKC$qACl?VFSbnIr!#&v&g{OvgwLuV(UXR458z-tl*UD3^N#UVa0Fne_Q zekVznKM-rn*I+MsC6V>YsWSCcOA?uYGjpNK=?D_0`Ru63Y7H?nN@3)^}RezgO zXlJlM~IK_*AIkD!~Vs{CHXx<3I z9mBD&X74^Q@Su%FJ8&JPTVOf3v;0}D38VE|5{4L%gYoVX^YcyPg=LJravc$T3A^qY zgynhUZPn^*sDJwFQFzNfV_u{Z{jN>rP5+4lMqil@zSd72d@mSi-ba;|PQj5fW~R(~L$pdW|M16>rSVXFk}yb6iu{ zP<7~8mR==dNUhmNf<~xu@X3fO+~cD346GCb0we_W(&G^ufpJ+4OPs)e0I%Q=+L=#Z z@olKM*w{KpyVsxfOXY8X?scz3?fL}f<1NE>kr;?UTs#N0Gr)3VTU<(ic%?8EX>5E* zYP#Aa5(VV(;vldV8y&3t{a+U+&9i?1gFjuRDB7RFN;t*mCadFQudqk&I*LzQ1~7;9 z=82@O<{Ptr%}c%(pSFTAyZ_Dd@_$B+4=weGagLzSZnK4xo|)J7VEEdn=fVj5n*4F3 z$%n;i$&|2Q0*f*FD(y-aqk5&&4HU8MoA3UGRtiSY6}TK^It(l2aO$aj@nmypW65UO zqGdOr%ubPNUX8CmaoibYsNq|v2y-v+lwa{OUzoX-7_L13@s}^yx;NUxp?=qYJYC}U35h6lWko8^v!e_ zz1u}!J~?#wjMy&mb_$Dhj$m@@Ya?wTtA)!Et%mx??~85KNb~#-`MlFU?h)>!OUw0C zkC03BH}2X1+gsO0vBlJHqdmWJ=!=7YkGDHGH56+-UpxdhYvj5n4^Cx9m>HS4U{sE* z{n(@K*zrW3&7_r$Yh@1!w`-$}VYi4w7xp3b^IN}Ap?L4TN?q?r+FX^HLSFUGX4A!V z0(5MG_)95;w8H9SNS6GcUr!!rK7BfhI(OL}@T=*~v~D)ue0fUJ3Ejkd12Qvca^y(#i(f0K$fO(osT~rgRJ|;6t)`KNB8C&~3JY$F z38~hQwW`Fx5qDKqM(arZnJr3<$CHe+A8!d}{~RLs@J$SiEJbkl#04`Qyn1Zadlu6k z`3~0PE|rFp~77l#DBJJ@9CY1l%H2x*InajM9>6s9%+rO@KrsizfPA-{)B9+=w6#tepg z@UVdl*ZoGB_1RPBB8EWCtua%zHsM9LTBSfF;`cGQ2FRFjaIM1yaXK z(GAtjX2i#96(dQH{!gR2B3&iLf$f+{l8LtyXp%m9fxozhro%1m)jrVt7Y*@4hS7(_ z8q0o2k!6rhvWCMIDEp&2i*O)#zh?HZUY|QD*?90n!4}J#~2-7qbR9~ z(@*|Rd6aQM5 z`#{xw<#2$l(Q*u3*x1yx2h%(j;Q`Xa26%WhF}V+QUA0Z>0#GMh#b}*qlB~~B5}Zx5 zTLI6m_T6O58h4I;o|L7R9-O$IaqavJVelg&_v5wXP2SO&nk)pm*HQy`S!eO5>-eQ9 zrVoBzw6q!d^q``fU{k(Z=C0FTI>`v+TLhdknWm0-FczQw_HDJj>_{27>2-zxi1!7V zJENSrP?Gj3jSmFoFI?_FmnXOr+u&OMhl}$o8v5Sg{gE?dX3B!}ZO8jAXtXT|Xb6S8 z9}(*D#z>B~NmO&Sjhwy3Q8O@mODyh1s~EN`L2<>f);4}pB^W$Pw>x!@Ti?S6Sf4)UA> z#yo+69;m05>BR3Rdz``gBzpBrnBgZITr)kL=^AzrPnIaXcQyq#;t>zU`)GP^`zl1p zvg;Q2XFo2N$GW|4JopM@i&2=#RA~>|BT-QP2Y84(U=yA!YnT^IIS*g+<4!TvowqK| z`i`1;s?!=A2zyY_kO}YWP&H1U_74!4W!4Sq|&bv|Du;*48C!6zlK2O8AhhJJu|Sj$bCVNz|wia zE6l1;@e6v~KQ8GaF%06goxwRamo}Z#g%rhGnEZ)?=WJ;bVacf8F;7qrceDh6(lC(g28<*56SD~EZNrW%t(;z(eT<+uAlD6C_=fb$fGxCrY>OoGI}``vpOG{ zdF^3lo2eMb%xmL05hKUC;&;|#S0Ht_VEqCdx36OE2C}trb56O^5bM7y!_&P3Z#^UT zjnO&0clY!N?>j?}&$k?D`gj=(MKu0y9Cm##T70(T|?0ubxUx)@z$ zR9+V*z9j?F5Fv~Nt^F^l>|f`xy#N0G zS3l99>L~N)QN|N<%Bkh4!1cmhaCFhQCHC!AKi@3kpT(R|-`a>;6>3PtzM^sAm5kX( ziFYM&bsVO-J~i09*3Zc%%`$O)#Xm{9Au;~e!6OO#X61vG<=Di9ABY)m;6i;Ans;?l<0k=6}V=L*VbgN=fyH zwd`?*k~vaIQN$2Q6Gg({6E_V@l`ws%UVSjI`b;NTbH|j3Xi>$|ozU67WviANY5L!i zarL7U`yG9dN@x!p+rcXCBfl_~wKrG}eHT%+ZT6t$9{?;SF~CxTuC!=x#Gc^(-*WN) z`gQ*!tjOJDCGb3={;Nh$bH7l^6z-v$Dco9uc=fbWg2bAWUwn>OM((P|tn(vZERtVo zc!wwXDsJ#e6yjw(&ME?dt1K@rb!I42vSY5&zB2Z`x-}^vn^t!GofJ%WW3)kV#ulGI5_NO6Rfwo zYa&qWO+I+w7e+gq#>Hk94L@AA5Z`mY5qKa(3$&^qRN9 zYieUwUe?#^jQ!20;_P|u3q9LF+lXND76jx+O?eW?-_#M&t4=LJUzV>4SKh2(;vcHvjn!}xJDLb%(h8!svJ=y zh7?uhf=Gd2IxYP0-FN6uJLrR@HCN8dfxqI&&9hmH zm=oOll8j8<2S1Dwa7y#m1uMH*Zh2bnqfsGTLXKEXm{ZV)gS7~AJbr`j9zD{lhyS+v z?`_WluS5_|P>e?sMptr$qW_|7Do#gkKF#t~!6=m8sY zMwhp&+&#`HaiA(OWWxQSzAM;pA$8MKXLD)@+B7*dJ?|=sG4+1;D=3g*XMydZvMw8v z!C(kx!je@mn! zsGk46cbI#X1PM0|^5tsy9+U-{S^zJq`u0qrYi3zKj=KRdd!?Im`0`?ltVnBKB(}SV&*0MD|wo`{Lzix)Am7^t(k^q z1kxmoTl&wGe(z;%*2l>2f`6#z1^oIwD?5xPn;xc?D2Z12!yxp2e2}*;R5ji2G8^Qs z2%fcCi2;bG-hyL6_dk-C895D&m^zuzu8eDqce0@e=jHltGG~25HMSFKnIsHU6KPcn zO8=ER=p{ZXh|5EI9WrXs6GHrRE3+A0rYB{C1g5dF=vGp%`;3#oU8>L;bl%_Nx4i!V zG~UGjTfX@J1>`9{E7tmUj-?yMs9@wU?5)u_B8u!YpP3tC+jkwFm>^50fxIuwZ9u}o zNB{vRfeji&_*6p-e|xa(Q4bb!m5vU?g3BId#h4xcjH&K|9M(ST^)zjve4?S%I}MO}Nvzbj7(>l)(v<^9o%a zwdkq-z`)wy`bcvF`J0ClRD!;HsF!gL7bMJN~&%q=qHe{|(uG&~V`&scC-n zzUc*27-_-S%lN95vJd`^2IX{`4GgN^M5p22)m^9{At>natreAeULlB`hxGO zKM#1Qo2IXKbGvHIF>)L*e-hvBa7D2qEUzl&pJSGa66$_HwO!^>Yj&lk&jNMQo7n#g zY9wz~zHDje+&p(ZAt*Fo`EVFg@R0Q;9!h_{Wb7H?KL~2HYer2VMQS{8AIo}sdTi>w zGti@LIHc-b?J*sVTYMFfOg5dGghId6icefFYVIvH-;K6cUeo+6E2v<)Q1te=!1f2u z2q7%h-cyx(dz9ImpQ8dm#BBA^T>m&Sp5g+dSNh`&{^ec`E$RHsy&uO`mI$j9>+>Oh z@*uAr?|9+pc%~}ZFNmdQW7_?;7z0vx=pmH^DXnDhC=(`KCHMQ&WrezYDSndm_nE+N zKh_YRaerB@Qr-ZguiZ@+G=E77^C^Wg->j_6IK&$VS6^(l%q;$q*s+V`Q+%tYtT>$9 zI3NXt5b1ni*}65*-YH+a$22(jKuj(THjHOe(y6Q0k|Xzq-quR>v);@!%{Yo~8BUn# zg&{@HXV=(pfCR)O@;)G)q=J6tmNKE@^W32_G!>F6=BE4j#C@uKtU4QN zIAxq$Bx|*gWhn;?bYA7l`bB)bD$YuCWU~52dI8yAWokEZrn=t)S0?

-b`QR&tlPeuhysY0de?uIJdNS%i7aek}vbuDNZ@KY7K+ zQF!ky4!$R&FO@tEwK}rLQamXB#~*e7l_ZfZdo%2){tw`ULC|U>1sHCy2`+4J!ppON z1(y9Jfbt1nUGcwYx-%B>6XYp-HC|=$hao>yg2!WE*fFld2TX`XF+^( ztzc3j`ujq&km?_x_AAojNI0uFOUc?r+&e9Fn&xiKcJfWJt`Dm<-t9uNOe5Ow&Z)qF zZfriuP-!YyPFGqo1hP0)<}vlkCyoRr+RyIRU6@Bn7UF-38I~9_3Qgb9Ut(EmkvBN@ zEi0uRlXS$?ex0Vh$2(>ymg|!+lP(t&NXow6B;;DcfXtPCNL8wH`HjRk)-8)*AUt|SMJqU=vQm7H#ppz zUP4yxSbVh(d-C_4y12XQ-~A~~{d0xTJ~M<6_TSSq;KpJ$9Irp8^kvxOE@^BLWo)!Z zsLrr?;N;?jEyh=|_%jFM%2}v{^P#BIM<8L-wXu?Tijs@bKLFmj-2!B0aw;z$oq}=p zzZrE1rP6Jhl74vF*if<0EAyeHI`l;Mb3j@;am}5!)9>M!^bhuWJ9CVEUro&JH+n~p zMFJOpSDkieWXS$7)cQNMrIZ6dLIRPgeT@Z_CZ`I&(6^f9kQ3b;PoXlJse z|D0BXwDo{j&Eka+r&^@QpZZK&-d?l*0Z1}uEX^QISjkjMGCb@74$TU zG`KjYWfNhw`LunO;a#ynne@tJNM`4VDyG3uihlM5h2Ho3XS%l{UipexZe9HOFUqWr zl2+#*Q>6(XIW{N>=~(=)%6QyW&4WQK098DWBpH|$JSdU(bIas$gU7fHC?lb*Y9 zOC@sf(w+8ryD0K?2$fknrB#TyG_yTv>q;WBapjWSm90%jsh8bwjs6=%wp4-nQJltF2B2lOhrB(yGOn4~i~c$6lm z7g}xrIczAd>9+#UP!d9DU+}H-YaU&5a!=t|%3q0HIX{_|CsctR1RRXk?x!Nzzt7uN zC;4|e6s-ft`M(`U#V~2<#JfL^Dk@NgYEgQf(QI93#!7*F&|OlA`MiBO7M^oZl%vka zj1$z4x{dMXjG4N$sdC#MMqIB`^tUi-Y9cWjapOM+Ed0E=D#7E`33g4-P2R6nLy;_H zm?4dVXY-Rb8^x@5EeFPf&@E-G_04)Rjx&}rbdRXYq*MP!gjwYd{P+EpkFCyAJ4=8R zs%dJ>PlSGNt2u@c`4S{tz!t7vgq0g80 z=*MSpQUjzm;x?0O+;TJP=gCmxHTwSSp=`H)Tb6-_SSZ-Cu}b<%(fJ=>wW0%*gYK`A zY99_bz$;qbJm9pqm?2U52Ou@-nD~;TDjp>FllJdkQ0VE*0>rbgzXe*}3eJKBDosLr zXS_vnQ?PE6c#8W&(ERFzG;w3x+$4q#%ry3AVEr#m-14seqkjP6kAC(VhiNEvAHm+s zEoqS2bWZUf{_zjcjRj=WnbHg_)ZMo{)IQLqT2a&7h`q-3;&~`7scVziP~y@1X!7~y zr>7NxVR$$kaHlwnG4xw=JuE=!R4B-$&)nCET~3jR$X*ZDtCH+`{SR=)RkR#)JmtG* zW+?+}Lxmo3%>1B4gx7OFLyB`thO);c9MA|P_Kde<^1j;M(l5~azVi!nv)Jus&C~37 z+5Y%EON5~(A~;fa$3>IZ$ejNNmq!y{3o4|g0MNA30BpB2$ek!T5_3iFil>4+vSER0(nlz;z)+1+ms6fv+z6N zp)b5%jij5~OVYeq%ud{=BTowv<;HMUcQT!t8Y_f4g1~eSnmUB+?Z)k*>1I;adft+v z{09htL}Jo+Xpug$KjvtTe}L~!pZ~tzLSZ>WL5rg|?6^B~w}M1Eo{wqdqOS;(-ssVk zJ-9&kzG8R`Q9(jCN{>L+ZtNlr#3$Mg!@tk!2Hw>_+(rVlq$S$JJnzJcq zRZASF_|_y0o0*bpU&XZ?Y0Z8HiL6I5m#vxncoiRUiW?(UXU10;g@5}9`jIUo%jP+s zWi&S@OAsARdv!qxyfBAcau&P-RDstXrOACkt(Daxfso%Z&Hu4>C5FJ*)H-O0#F>LC zK8YW8ihNeQxX`r~Up86{%41fSmIq4ef!SIAD(OJ|DC$!#D$-A%I{pI$0HP|qlGqIq z9la0SrH0*uHF}MMrw%q3O8c`3M`{gl>ce}*F3L%Cbo&4|cEv49hD7*!NQS!PoVZ!r zOIzzYo`H;?ab>#H(JIOC`2}NKc!7T>VTAa>_Iyi8<)nMXrBj+TyNIa$g00h)*xvCt zJ7M}p{uePkSw8?7Z;|Pm#2!Z>ei^RHEY0^9^LEVD*@16**JCli3PW@c#TehdVB|pp zuT2dAE>Qo|GfZW!kCCZ@N2)6W;@0I0(U&2TR9#}VWMN?3aOFGytnfZ)cNs=-aiX@KfPfMgF6eb)?c)iY zH{YS}@r^BT9?BM_`T{V-`Smb0qE-x~dd{q}Y4%Ei|K^#shuK8J{M7sS`h9P_!<{ZU z5STR}Veu^0>`7jeeWhfn&-<3|4rj>LfPa9H7&58UNf+k|=X~Ki4ZDd!a0OFeczAW9 zRFB-@sYZytS#pCYaf}9`Fdv$`!?QQfIAWoOUdSG_-ii**%}=Jsez!b>Q=UR8ctq zsSz0Z<6>$Z;C8KIvn=KbGOUu9leavhK|hcIB8e@xM6la_HiCzg*Bs|G=0t*Tdz>-_ z-dw}oSnx<+8*8UgrdLT1KtfL8;0-(UTrx@UG&XUfh%!nBr&FV%gEe7?x0(lp^>-`{=^s#u;jA==Z2rt4J}Y98n%fNt z2cj7qYiqX+(}Mcvg{8Zi>QQM~Q6}F_M6V2YZSiXp{D1c+-?I*rG4O0!4Y^qb(1x^8SB zXb=l%?&!%UzfWZ!Mfg#T%P=|8_Qu4^nJ0NV^1X8X`$5d0uCqt%lyDiyz5Z(RsOn|8 z%z}E**xrf7*Z`48O&sZ|45QwlgG>Kth=(rsS|_T%k7gJ+{#6^yU7SpC|_BNE?YW zB5VZ-Y?~7J1Fl8?1H`z8f_`6&3V&LdJ_sINPO!1~JGqVx2LFGfX#H1Oj9^F@YrK~} zRxyf6(@WJX5aCTFj=PGkTm3U2eu{o4R`(5)u?c`Ag~@QdQ>d0_%!z=LJ*?#IHs+ml zGmg08Y`q%*6?H0hYRNuPy`7?^<8=YEM{dCOS8lY}IRc^S#sf9$Zz~wx%Cr2ms`qFm z`8sZR)SRx1Q#zX7#kHYbe`!jbInBG3(--S9IN?rG4)LiaaT&!#a`0FKqLMv}JVpU{ zeIiG;jY#UW?PHo)C%K(sKib}kH_u=i@8oiDtLB0Gnm-bGgORYTnWSM7+E2r8ODA_u zxO6*`By#mOBjxJ6<;&ifm}UkA4`^HYYw)omOU<+xa*of!{QD+a)k}V6+bFL^r>Ak? z)(Yf6m3sAN`FY4-5gUT35xb!heLlgV(75`RvPJ7w&7x2|eO?&siNryessx>(|qpxF^M-( z|FuoR;(lD8%W~Q(K+dJP0%t^&xHbjJ-5SkF8--L6*S7YX9BIrgp7=y#vD6{-^!R@N zi-P`)oIyM(_oFRS#9)Z+i}iGJ5wRf;d$ z)i@X;`B{sZqazHD+htHe%7sboamuhvI-VC(ZhPos`)S1{ttP}G1L(A2^)go?SBXyo zHa|4O8h8L^JPNuemMl__w&^OSzbfv0^rBd=b98ZxPfi0~05<_-0XiSJ^M!Vd0(7LP zfqc}cgE(V~X54nN@3OKIeLHB(#F1^1;Dx>Cq_}15n%FKQk1Qu^vOHr_x_W8p{KOsg z+L!1=#4o5VE`zgYT9}NSrQWwv*2k;n&X*B;c7;11)g961UnjK>)iG4a4xa1DREBha zp1808qn}YIQ>NvHsaENb>3KB7*hl}6n&-g$DHZW+KkI(wjpIyui$%k4i-gSSxjONI z&lc;7CekX91phr`lH$EVuvgHpSabR=(EkJc15|{9e(lLuC*<1C5RLw-nWM$_UB$3x zW&iPG|M7uHwZDR9;#pQuw_~kVC74;c?g9j->55T;`M7}SF^?a(J+3 zT#Qi3Rq6807=AJu{H~w6-*j{EOmOL3sui&8kks(SsRbnmmu86sMu~I=r~VdACW=T? z$xaROa~npFPGQ#}z6t5WmD-%g(RJ}G7<PvrvED4iQJ38wS za>Kai13a2#Wh-P|#gP6Ca%v=~;Qyq%xYXD|C!ah!SM@B$tVH-v&HNV&%*+MEd`1_f zV9d{Qrtp4PpGMo$KW?;t7?)JJtp3`=P<5SdSpPteZ%Il}7zxqC=`7R{ryiw4`4uGN zY+P1o6o0KN#q4ICIcaee67ZqqUJx=gMtGzoPAi|Z0aN**DT#M?Wz-KxPpgDS_8!-B z7@;g3rP&1#K$F)dDQZ<=#Zgh>@l0*S@9?cf*5PsJu#AuPND3ulIHXR{9uYMC04vmC+-@JXP& z$tQLmhJID{NKyfj`JYMnt6g4{d=Eq`MbF7aXvq4|4v%~K7zd*xXEdENuIH>wA zG=5-f_T@!%2ndfFA4b;$F;mc>_P98Zt1q()%YIg@69M-x+olE_gHqwR5A@`iZp6w< zYidlop7bE6b<*M&e0lDBW1W_*X!;QkOJ8VQVsPA&npa zYH%L(+c3j+K{#LkHqy>~6IF{&^<;4KTkayJO6bE+F6=*AM#PT&awNq+Q%@}F-X#co zISFh0GSBuSzTTZgLyS6;D&B@q-t0z1e7k0q<=X|!wpRI_-KdNj4Hd9Jm7kscYqwYx z+Y40da&KF7vFW+sKY%lA2HBb!#X?bf^;WjXS7rrqr`U{tmeOOF>AmaPJh{Ik^Wmpy zlT>5!k6}f>xaM)px76nPSlw30@k0W3z)IFfXoJK3Lz{tzK-Cz1taRF92-=oUWnx%^FpP5nGHRwHxi}Bnvgd`2V=3&=mQ)DiN{@CV3u+rY)d%- zNc5D3OT4e$nhUAQ2`94Bd3(OZY{Knn?8uvJDor6~I+Tu#FOd`SLtJkn;a~eQSH$;$s_L2ggSv zFj7tTxW!bTX4R;14>td3CZ#k{oWDAV<|0YTTBl&cen$HgV~aQB%Hi)zI(5>{r`D;LP?9qUEf`)BIHD8tjl~y-jlpLMD^zgw-=4NeoFiP$e&N@b(>gn5h`YZUJ`%+hu zUL9Yhvs34Ao0vE&y}z_Xv}L4sC^z>xhA<%0Z5rF{VX@vhj95%pFor)X&+H&ucQwCm zMbNI#0TInFw+Z5~yh)?T>n+~L@5O|eSG1dzn9sTd2n=zleJ~9mqb$%PqFWA2ejrts z56X0Op}!4kO{-p}w|M;GbKcfR?^TcOx|^H7p?R7PRNv6Pn|*1sAL$h}N6oLiZPA9QE|1AsHfQ%&LW|1+8D3GX44nA?00;vA7s$T`vzcGQb{96AwA$QSo~<49 z_^JMWjxEF@*A5(Pu1Ej@Qw&-6!;h)xo`~|_m&Ve@fGYy84$ugRD&EpdpM^>d(-$SP z&amKIw!(GbbRW)CT0}lU#~im9SghiePGI3d3*tiL*oRP4C7*a=(Fe8g#&01i!wlkS z;z3M=mB9cZ;$Ru8%8o3F-oIbbR+@Suh>t_S4V<{eQA z+i&`?7^<8uhB(!Y&{G+Ma#_?3$EcSJ%ar->>MYrfdNfrMcz)MHyx-x~4Ix+V&FbUG z!M*{2Je?yYoRh>YIM7 z;B=qTT8Jk8%cXrKkI(E*#MCx(IaPKTVG=+>#L0opBUL;uZVV`9Li){wrv+?8oSJu~ zMfVQ56bzi&q+%9=x!Ha_%tczXZJNlcNDv_c#a*wQ)FxeD5x#pp(4r9l36#?rJgtwc z^VM7iU|Cj(EW_}(li|d4gwM*9$48uu(*Ht=QoCy1PyiQ`sO zA_(uK8+4%-y=de3Rf1+&eS1vF%a7_Mj`<=*Ooy;r%`@(s2agpsr0v=}cd{Ai{6$;EKJlLtbw+KPbOXtPESSYQ_jL zUzOUk@@X$yZLA-#y=E$C+|dGpSy#yYd!5o?7M9Fajz9M`$IMJ(1Cu+!8Xoe zHe828dL^S$PKRHkCCfsdqF%)OxD=faC4y0}vpAv@O8)^kkSuvW|Kwe{T;{p(*Y27s z+(1(MW#6wgC)EpGHcK?gX=XP|?0-0+Xy9Ixx8*ekGlkFPYe>!}W6q?@S{XuIzY7kP zud6ii2u>WRdfL$Ht@Mj^D)cQMY$r~47?%7|_L2C5@WHOt^eZhiVo6)4oOHetL{xCE z1>?1gHqZNS(~A4Ca4;2maaR6IrdhZHWGbJGwKo z_8d1CaC%KY7t?ir|A!@cB>omYos_!Ff>5&a$gv3}@;}L&`x5^uDS|n9H zSu~Z6C}IHKglH!@7;~zqp?eMgvP+s-!l9JClbX-KTZyh@Lt}{%Z`uJ zLBZb=4D1^BhB$9MIqMug9Y0p_l`(diVUp`NCT~EvE7SCgG&?K>Og8yIBPX8l7yG!W zNcGGMvDk2iI`rgyWh7rXDtyOEopA-As=hqM)Vn&icy4>a+l(v85LWXRSowqX?*h#q zXJ>_S*sZeU8i4hD$cu=_BCH8HslJSQWM1apyM6Uv^#Uz?6Eiw;sw5IeN(Y%gJGF{7 z%_*;?WDw&l`m?(BN~V}3X-Jn!7)(K3)K%JAtlP}ZJ*7;fcZ#2wBsK~nIJtq^-?&$~ z*|YlAzDu<%wsay>A5XsuF5Penx7w*-5}ZkE?tBi~15O^jHEQoZ`5*Uv z+|T_S$MYgD^X7VSUFYw2e!rg&p7(*_=hu?kxx&2(V?*kb`T|X;;uL^-?SRgFuQsf}acN?T-V=Sdo63VQ@ZwwPtsFgJ0{}K6CDAu1p)Uri6xFJ(>zUz zBR-60v5rOxqRIGvtx5n8mKjD)G3BWjYIux6lsQw7&FJ(+=Iu7L6g;M!@4{xSG_?8g z3gz@(BXP&m${P!=^;jOnm=E)>!UVPpYn}54!GW_On*joSZ&@hT@y~UB1O6n;RAOx> zCq4q*5A!Gn%!yIB9o;Y;;xd)#)plz7c1(;%)B_&(Z3^1WIS!^H2dYO4-V^5>*?lZz zDtLH$Yxn=MdRUy?!?LL;=yU-&h*#?uC}7h%Kpt2AqYv%inl~Hu zLHXa*M&{+dkB)tl>VM$yzNMmNNG0#MR9f#J9^yqU*g0~*vXGILiwmolH@5U_T#&C! zPoH4VCH8y5IYy6Utr~6~rJzYRemR+hTr{r#hlfHvWJ^5a5_656Nkapv*b8q|2l(r@ zZiL=u*%}xmo(hder2wjU<=W(gQhl{;3@}d|Tb0ODYQOB*-!;LeAC(SS$fP|GjZ$7! z!t7gKbL1jXg~<)@k0g|2AIB0gR5W)E!1Kza11-f+x|o;812#p4jUs7dxMnU~uOI59 zn@h3e9E8g(VAS2ihQ^r8&SLZGdn2ryW-}v?=A7{nEk8p$nTm-G)MF`RO;Qk{rv-Ic zW#!CXL(7&{}ftbiG_0=ILkI@n@*5PYpqTGG2V+eicde z#~ZR>xdyNS624DLnoShEr=lhDldqn^^1RZg=iN5t-pp?&7Y0-KQkaqe?YU%HvdX5e zQomDGG-7t9dJ$I4V^j3&PhB%S%J#_p{c6nlEYFhH!rE#6L(G9sM9e5_3#PD57sL4F z!ns5GX&=5kK#5)aptrBe{F?Pz!`e^a7mxdQ-TYMoMsF85rX}1#%^tJDsepO9u$1P4 zq1O393Svh)G3Q^4ohpl6TGv`@sGWE}Qg@+KoHYz0NJVuLo8f~YtWKIzdV7ETxD4FL z=GbgyQ+GUo+^ zaQl9$pEgy}4t@{Da1PPk2ZjQMhSy|!_#K5sZ&a1Sfgs$Wz4UYy4_T z&CpamsB8VpIc(_bl3<85blgmdwZzg^pDB_`!!b*|!BF4!TJ@@mRkG^xZh6Val&Zyy4??(2dN4fjPHBdH&*UlGqDy=!JtAXpw53*myPR#R;+n;3^!Hx7y9{x0YC~C~9 z4u!lXCK;`JR@$f50XFO97a{*qoP%pq|b61tXEd4u2?|qa~CX7QsA#$O&pmhR@beuytH+HkS@j}T=>;Ke|3Te z03l+YpdU5_{6||T&&^DXFY-TEk`f0Ft-Oz>#<+3wsWZ;z+eG8~8?U%Z+mCS!M~bYK z$obF6N|y2o_C#-d{eFTzYi^ZkuiCy=ZiTz8N;DIwg|kt&w)$AD@VXO!|aP6}k-#=q!AA!69X#&(X?e15`I-ZzN0_7H3SyZrGYsG;*?7+01j# zxj%0C?i624C4ERW4H|K{@!Uk)-fDiH*w3?hun=v#Sg`kNe+Ve}&c&j5a@IvE1uFX!rv{T?Z^uyeJ zE^ivnP6kI56`Ydl`3W$;yKwJ#^OZaN{i~1^K|SZc;ybhUYaWZoJu)~7GouGS12w@fjvnG&TKVHqM^H-rc=EzFvSF^Xxg#{1sxKImHyo&z>`+E%DVMf8L zc+rKtv~0ppnAoQKbwJK!%NSBtE@=uiuCcZlH(U{Os$4S#)4e0jC*<1n$8fEY=2UgI z@paxe0y3wBkhLe7dVVhG?9a=?D9Ztn^)@w;z4g`MO!HX1Yg}fvN27V0Tc4IWIZgMo z@w$9wVGh#Uw`yUCzP5rseZ+nYl2HTf^RU&;_|yF@AY{)?yhHV3OgAf%@$rVUaoJ{t zPyY!4uaAzJO0sUUet@w(JhxYnjV1_W8qR!#QKx}`? z@O(J*Y2JD}*9h9XtJ6?bOf0l}Lj6c+l=mPy7q0{qq5bRP8dd1qy2jTA`W}S~7EzaSDrKj4Hrz@ zjj~M6`k%^n<kGCf9248Mo4P+lYv6hn>Elt&ikYVm-=gE+0yZ&q-pBY{k-OL6qM@R0Ik zW%`0}a>QAdBVuuhj*qp8@+dx(yq#N{#2H`S`?%X~6QkF}Q{k?!_rjnqrQUBs3{cUA zG4GaQ_wDahoD-jU3F7K?Jr+3u|NgW6t66%A|D_vwqkwo8F0(V|xWD&IF+%Oa>luwr zrN~*-mW7vs4`PJOrA4@CA@#gIdG&xmfg9h*H9FH%(OWYUQNYk(>spdY;M1BH3-@fO0!VrK)$8aWF*1z6L*eF?p>Z znEV?uy+fc2%}bbLCeyTgyCa}3sch3Iev8Y+&+_T>{B>@4&`@QnUZR+Jp_h1{FcBAJ zT9sHS>i@LkhEa`gLKVJ;KpU#JA3}a`X{(gvYrJ#{b_E5}}%IXEN9;wM(PySw+c5jVX z#CPWR*fwF8;QmW*0FH#sxrP$Rk@YnhwBH~43{hF~l8H;WvF%?Qcjz%>7C`bZxBfaO z!P6!T%2t(iAK1_0Znp7?os)C~YSi1=klFG|*uL?M^qwxtwcql*_PXKDhL)fTAygfd~z=P-%DY;xX{V{Ks!k!IOWz)UJ~&pX^Pd2JIPY{B@5 zZ5=#69fsA@W=f`#JQRCre~ z-@-+2UuzS%!jEUi?Bu^Z0!Y7njb^0Ab$*wcyy6FFZN&l-x85pi)8_eSfNBV{|U6+STxi4UfF)@{p?Dkk37`xtny@%9=y zW3pGYm)<7txhHy%dMX4+gc@kIv2j^T^8{7u-rqzcgcX&^ZL-caytW8VHZDtaf%#B&01=t&N=dxaOXO~4wJK0NK&j;jnOo* z8-Ip)DjYN;G9 zLff$YMFVHz#rsxqax50uwiRtJ25o@L9f7rVaHy+j$8#WPoB@x3|=1EJ&`S#c<+C_oCkGtb9(eVGMg7 ziH=|vx1h}3HT9ctAPV|K`DE8*ZESp|%_hTrK{BW_e2W;Q`NKJS$(wBbM)~K6Rsqk| zBjL<%Eq4r`iffu08^-syx6kWV>1%>A&dsAgj_n@4a$qj1Q;bLPa0doxwF>1dlqEFY zs)K`!zio$_u3dNn*l#!NEM>X9h=Lfn*AOFJ*%DW}_n}k;p$_e<|M291M`Y@Ur$Rhm ztoR>)`ko}x<0zv9TD;Y$YtkaIJk0&1oStA9Y=kH6kQ1bOtGu1^53eP8a@b&|ptUo; z0`Y9^(&A1;`{mL7ekx~-#PP@taI0=niK`Bpw2+Ro^jxJ^op#)1vMppR-e0;4dMzg1 z*lnV9So*zE2Y-=EVdukHG~$g1Tu~Hv zH#x;ec&sB2b9c8Y2B>VhyWR6Y-{JFbA$E9nnI0%Fvzz-~>-+SSD}2>!;8y)ENjq-B zBm*pEZr4m0;{C!`HRrVaMfbNc$6{Z}?6K-Q`^mUFU>^w6nJ6kplct(K3;rHnVSdG< zHbAl{jLJIly@;pR|EmFW6^PKGeaQ6qJrB6x>YHK1j_8jgM{O;&n-+VR=buOyh=1p;Txo-& zrqD1k!RW_wT^Wg>gz&2Zn9%L)?9M;Dk7XqV#c}nX7%ZQu#HxGW1HwZcm6FWnJfNHN z{)n8`j)4YLdn!tMgu;gI<{zGio7QD{?#+t^^ayxyW>+~;+97vAaSG`WrGD^=W}V{& z;yVBj9YmoZZ50^A*I6I(&!<5J97 zJMHyS%QoU4UiqnkiN-9AS<z8jwLR@!unli`Pp z_*j&drn820OeLqu*%>r%P4&xZCC%iJViv4CfkYcV6B(`sH79-)fjt8*Zcu2^ESxa( z!z#JEwJ7Z5_Oi{x-J5(K$r$#v{>9`C_`DCDzlt5IF)F2a{vp@woYDmHKoiTuMUt%` zJyoBUnE&ZD6()2pOX&EzJ9 z3f?~}EzLI+@lfw}rkgVDMVTKfjTkM-Vi)XKfqd&vwBXZU%zvLkocYB`b^?FgYZB~X zrH)rSuGE1hZ?}L<7xd~&t+6~^UEf9GHZ;8%OYpaVkzuFgjdHO-n4@$^w)7UYuAr)I z$8U@GHLnAHb9F+vMzO_RJiX3?=D1s`^9jM~7G2On1#AbZ-62@Q+4u}Y>R~{Z-a!Q^ z+%u%PnmfOmQj9%%XTzTxM2)}FIMa_?)luBv+%$-Jh@o7YZjK8J2+g$$%#4)-p3N~- z=oTBl9q}5Fd*h(eilk4zflc@g^%p<`@K?ujVL{67N}5`bm6ArzR45XKE#zwCz>`H+ z8KKXzg2u9K)sK^(8b0iZUfrj8B%g5I*E9T0b0bEehocRtYjMT?L7N%9{@pHyu-wv! zZs%Tlt!7i*;43n9${C#zu}~ULv+6|cq~N+I*{3s=e)27x*pr=Khs7`l zhdQZU)YE;jI@1i}9F-iMz-vLb;xf@&XLh)8-AOaG2%IU7bT`-5S22YcCjI{Rr z9P%(JJQ>il%aUy*f1oZl=^%Sd+bPTa#_@ihJ&9;6wW6-zeUj+Q$scpA#^nwemYv&M zQFY5E`S1wx4hN+q4$RNai?3ifICk(+cq#BAfKb}EOa_9b*V0p~eJ4BhRr1aA{?_iB zC`7A$(0ylAkFSH(=a;&LL86J&uOwf_kb#BYudgL=8MZa9@bsDJX-9vnDacB^ygh%Z zFK2n{UGm<`^HvNQM0Qqc3s*4x8%m6n0DE-{WgZ(l=J|+S(|j{Q(I*)gW<_%5>u9`i zejAdw*tysfgb(3xP8!l4lZ3x^Cm2Ue;9>)w0L{9wqrIWN61(?K@zh|bJYn!Ri{r%? z7(L*n4wg!_s#!_eZ7vHj;*+1!w|#3MAX5_UPEtm}#8|V#~Kohl-%1^5lTzn1W{# zKNtxihe4lNO?}Q;a3HRg*$BD)m4vSl|bWa2(uTDia8tC4DXkd+& zY12znLrNueZ$M2{vouoqLAQQO_F*9br5qnVT^vUA^Tj|pdmHVhie z+(|CC)sqE19nY4NPR?eK=`lC{hi9;5ZP+BFW|Z{Q0;*48ZVgWD=vR{fH$d-;L$%zl zDDRK)WpQA+8!%2-sHwVNSpSvk zqu{jO%fzJER~ODh76zl*>_RI=I4tSFr{DG!GiDzpoq9?idh<6+<0d zwo_+2jtm?hEWK5GomHyN4OSzYLnej%ZU1r5Tl(@H!)W&juLFr3x#i1z74e?_+(Kq2 zCqLrXPEH@(_3u|d z8BIx0y#?f~BYqfkUwZ8l=5Z7RhV`9@2r33XAUi%TPa6Eh-o>L}+O%rP7b?0y!qsXY zQ^-NdLvCl;mBQ@_DS?IpUi&7lnC?*+|xxYncsVU)e-H6s-uGIi=W8s zQrshPhU@e$gcjl$5EZ-X2r%L;F;T2tigQ;I(wzY};ZZGaefX^SF2GY7tH#t$6d6QH z%8}CvK>#@(TAfdY{eAewKE;>7rA9*f2{bGNNQXzM;(d8m+8@+L^YGU8>kKI|s|Czl z=EyQQ(YMxU8!>87_cMOsExBsLoFPOS0ZHCw`TV@GyQZlF`=sr68+?WKi-NjC9_Irv zh;@|-H?%Cisb6ffEmtor6!P_60$rE-k+(b==Yg=yixk+d&zd3EL9t*7PJ9VqPcHL! z;O7cE!#aRYeV$lv_Ox#euEbBS9&X#eAlTvcsVHhZDmz;XDge#1=^P9+R~O{h;V9>h z?Kj^;5R$AoJD5`3$ID!$L3liIr*0H0U$f{c*d-Id-<6 zYCBNpeHO;`j^8B`vfVcwQ-LJY6njX`ySkaaqJP)mwlU!JVRZiYZ^x$ZL}l#CYl5B# zPJ?c>ZjW<`l_r_kG?$94ao5d~NfVRp?-Bdr=9=hbjH^!CoFk=YVU8vTP32Pu#HXmWmraj-qj>S|3=Ru zS@guJO@fbrOh5XDOG0ywdE&QQ8`&5qQev(PToC(-PS$r-U{4O-Jc;ujXc_`P?d(r& z`8*zys$^S}pL(yx#lH7IiJzFO*TEWnLs}-~lS9W*Hd|E599SyQtS%MqZt0*~N>!p8^RowWyU)U1NDMgMz^+eVJy=5z ziG4i!X?%Nc>v6Z46g?DlipK}+!G0)-&E4L*yw=%cFdzh;?^=XGbIQ)B{n7B>CQAxx zH_A$#+n%zaq5iQs1$WUCd^64J#?+4;vGv9McRSjzT5+izx6Hep`MMy{ zMo#k;VgSQuhmCKwe|&tlsQK$3*ymP{#L-Yt$&f-M%0wN~w~CpZd!K$rl*=>74u3h3 z&%E5TvC-P#@$I-UMDfEm|Kai6KGYYn)-5`>`*rsjw+MS3a!#WOmHwc&aFm!g$N1Rr z%cz)m2yh6?r%r7p#UYKw{=-|WN8+DAN*MHY^Fr^>WVgEzt@-Yu>`NZKsoRiq_ptr- zmvv=IN4HerfUc@i|KvJNIV6R0*UW%RVpG-#yKPfbLIC_LASiQrM1sYI@f6X zi)%kiy+h#niLs!ddW}hpE4z88uV2TtteCxk%02RgV*Mv7WqdV}^hZeT{Tvs=`m#yW z@k^1bwV9qU3A&NV(w0Ku*+eRpjWIo;9f>ULI^R|zmy8r?1WrhlxWJ) z4?N$qZB}yE8>piLL8;?QM)H1T`V^+e(0<;}kH?ggMQHLluJuW()U}S6KgY!X>E%vG zTV)-ruY9uNXX$Top8OPV`wXLwn$_F+%ROs(-P7C=j^R+VPt*&kasq=a)g>g_e;hqK z{SHnIJNb~N*l>tQSgjhcowa9;`%yTVHx*@E`Pb#Ob(NPZw*PfNU3`kDP!NT;bV3}` zz3uS?&;;UURL!5mavn^+H^IceKGonA>7UIq-REXzfkB%1lYv@aZ6l-d~d<2*7)NdE3?+}AbMa7if*k0@O5p+c`-v`Z71Ail3L4*Ae^@kBsE0OyJX#)BeE9s!6p&^SW?thK5HJ;9vxgd0sNt zPc_%B2ax_uE++sTO;|CRZnU%J_p*0+jxnIqh@Ho7Vaqu7ILQcW+|5myqkmjOVQA1j z=jf@JO}Cz^s~0XPsu#O`1k2?0h$F{!U6Jgs4v4l|xgnP?Em#~k!KF+`<1Qn!pGP*dvlxC==Zg7) zTc_1?uy?`*sSB6l9|2aydr)c(wUb~)tQr8%s@}D|_|J8Nf~$@4Mul~FJmzgmJQcrz!hOEOU)Dnq{hX;ny8j=%)NrrC&J>rh@*_%@%Fl zKfG${e|W&H{}{HOap-5+r8QLM>qiS!cLvC!YvOsBe9Rnr!0|%W@EGr5?r-`{Zc|?S zQ3d=uaCJ=%tK*HL!$}nmP!MkL2oCbh^J0)FCZ2b5b z=4VUX!ng+GR8;Gu|7tzTh4g64|M>caY;JuBf!Dn^P_L`htdU(&YJisSbvOi~=#qS)K_y06Q4*h=^I4*`ode04$(*MCJ%o!4 zwVGDC>gSg)qWZh~7E8yC(MP{=mdbhJ+Vg{C75+hHM>TlAaoYq6C_L zJyXK>;$)?8#eurWR@uReOba|S8lr~r7< zUs6P0OhjiHF1%7Xg6Racq&Q*N~NI9le*OkuI~vudkMK#HN-i{S;tMj&APM z>xf)ahuI|PzMv{lVWMJ#EP7bHZKR_irXQ|u&MWqoLNpD|Ck6J zzi8q5yZ%p-Pa=tCrDdE;B8}8zjIz;+)>%Jr!ph`j@y(TC*KRGJE*Xv@m(g&+jj(4P zN`ZMRe0=pnnfTeDRhn`+l=~Wkxu~D;X={|_oyBZaNjy^cPk<3LQK=aga$%!N3exv+ z>w(o}dD3i+uRh-toPMv`fGj3sNTyu{e2;skn88-#5hQ;kvR}8|AzY%l;~lx6aqj=r z$7hnIdB(U)-3TC3FNut{s^93!lPq>^-D!3&8x4z3#U}ffo3a7GDSxRlZ)A&OPq(Q# zLM&JVQi(4;8JEms*EX8X?i(X;v<4Tpq!(i0Q1xnKKFOE4UXV4b zXR^5o`qm)Q8?98_`I(ZHd8MM#DLaGZN1Wtq5fX;C&hd%)RJr*%;g3V{gS(wHfmeC* zZ!ey7wdc-EQ%chyJE_Y%|GH!~YvRbWwcW0OwYUQQUpah zl$+QS--~13y4x3`kSJm@sHR{d#3NAxu!>gPEUv(m*kTqCFN;;+;ofi5%=kr(z(I-m zDFmcsc)@d&|z$=bvHIM8@~*t9+{7C;!pMy3@Q1Wtp77x z@(E|%9YzE=3z81-U)H~GB$9dYWplXvy3Rcld2;7oQ(V;aYmlQ={pDnLZAl%fSqGkl znft9&;w+^A#fJ(lk~CDBRtAa?m^&({3#QBWY-jQ%%6biXaw^D`Ej`a=6W7G2D?!%* zuwKr6&r=4seH%n}i6ca#e4VIg6ya)MXm%g-*K*{H848Y#&dCu;)uj#S7PJk@a3*=6 zaPJ#zj8ob7usvo@qh+oBwJ5_6MFROF8sqzTpM<-m(k&uuUPk+{GbE+y;hwr^f;Jbk z?)*`O17LpuL`J2lR*@BLHWinOja#r|Fpy$-;42$^F?kS{pIQ#8iaX*T=ziGmi=6=C ze!t%zMO&2Noznz0*BK@>a^bf}zpp)wYZAgyobG{T*$nxg*PaakI47m@FQkPCk_BoS zXyk$yIaYDESAn}aOUdVzFwJ1UH?c(zWzO0OA#mopIm-O;2EC0*Nd06|GK!2534)Lj zd9dnbxsJ)mw#;5lhuA zC2z=im`hdL4j`q&VPcw1gj=)EWU3wlr1 zNQzT66tIZO{bhRX43Nwn_YhB-u^bg#wj{R|M**u&ZkUgnCmekQ>|-nZ1NPl}wo$A`nCVeoQJ1dg;`&+77NA{#7%Ww#<0N%u@p{yeqVIKr zbfA9h{Z02iNkhq4GpX6b)$i23GUcjM5FeD}8wZaPBbo98>@{Qve~YW& zbRB>@92|Kg2D4XskW881&!Vkv z`3WAW!0LxD2TUB!sNt(l`sg85TmJ~B)Pw$dmp$s;lO{b%`~P+x|EB=CIWz4)mZquy z5AQ%s_mh*Z|KXGZiUc0*&R)#3hpXIGa^LKYu&iH~fBJ7^1c9<>ij|83jk(~+JtzI} zG0##?sUH`g$EvV`NwKF9!4^qm$Flp)dt*ATV`JLH>-FZH(=XNKS-ELE_11Z^r%O&z z+*5DMiS9zp0jdQ6wQOOL;u%r0fL?J)p{$9J+38a`kxq22@vka_%a-?8j_b z7$%R1G3807ldV0Wp>HhRxP#uw-y0@jOzUdc;ilcZh&0-VO5K1*zR?QEB40+f4DuL z7g5o%v-vknnr_=UZ~89o#$D*DWCYzK$o_}-*1sdCiN5O7yLVgWp9uX1RbziG@#1_= zg(!zTJttz-t6IXdY&qDY3)tENBVT82^H2KGYm?(D%gxseTl%P2*xy$JNzB_m*{32( z1O>))yu8msb)4~SxMj_0pt;IWC!MIMt@cXssgO$rnU<0uK|4+q=z#n5#PG9wB3rHr)Eqs(FGXiFJiEo_6)bk)_*P1C(9>t;e`(aoS8se&S0SX4 zNodF7;{(0#=5m}Nt@PgVfM37+d5Cvd=_9$ z?<9!HjMt2t_qj~NRqfr z(!e8LZfo5XYGaj1`A+creC3AN0)uowzHD+u8L!6)m$lV-I!w;9%72JO`*tcJ2?;PJ zid9E*HISso;_ZR|ml^s$pHyJLF&@)mjnU85;_;u okC6grLWrWzxd^>AKlifSpa#(WTZi=?!Vvv2faBU<^TWy literal 0 HcmV?d00001 diff --git a/spot_wrapper/calibration/docs/spot_eye_in_hand_setup.jpg b/spot_wrapper/calibration/docs/spot_eye_in_hand_setup.jpg new file mode 100644 index 0000000000000000000000000000000000000000..737fd7cd29a3cd71be218a4f0e74e2830e9dbffa GIT binary patch literal 39733 zcmb4pRa6w*7w^#Br8F|c(B0h(J#>h4cS%TtbPWs*QbRWkB?8i^Ff;>5E1jZ*dcXg= zPxtZu&RVDT+VOJsIy?TY{o4VMg4HzD0BC4v0FD0!;2#R048TDDAO1IE{0B^I%>Mxw z8ygE74;LRF4;K#)pOAzQpMaPE508k9h?sHmbF{f`p^6Zbzx zQUW}J|8D<3@oxw~h6{K^H^V@C1wbc5!yrTZHws_`0021uS&R1H`hS6rfr*8UgNp{h z!~c)2Lkd7c$HYR%#K6MB#>BwE#6UyG0AON~VdIdq2r3%jQoOPi@{OisRmv)-g4nfv zrUnZ8#Vp_{L$BCG3~5CDV=LNs?H%^60|Jr%iK9mYpkw?$>i!e|e--{mRUiYPqhb7a z#{Ewd2G;-V`5%>xoCQlz5!=Al7l-0ibXNK21xlf|T~^^MY8oYo9rT*2qTSCwW*GTz z13-xJp8_%rGJrgwz;^J(x}?QWlVHC8kqKPrvy)#=fjFaKG6Gf>T~@B#OZ_R;{xR;5 zyurLy*R}wZWMNTlC6qd#RP0%yo2hZILF-Wa*B0NmVJ!ylhn*g`o{JcHCffJJTa!(( zwBkc4^ZPz$hk(=9y8Vz3igb7oahyN9rgg4unA?L(}jPmJ88a0{eFl2erwR^fph(X!Cn~GD=!Oy)(Y*BO+TzXYi8N~!= z#|%=+6;;Ima^_b$Dk&+7rB`$#_GtqAJj5~danW5<(bf4Mt$?+|-ppc{Xg@P3&x6C9 za^M7+ED+a3Bsx+~za7&RL|;;FNNBai{BCz^I!ptIi7sbwpb4j;MyIe5$xW2qFK0*L z`Wv+?BFeZNh#}0Aahjeg)+ohV#S<0_$d|RVP&^9oOvaqo~Vqj>Q(6JLn=|Y(m*@?AO?qVTBpT5jHU>+;(VdgH`nnJS} zlpt8U2k7J*V~vC_a;-Rg@gaK(y@De`Q4w^1m?!a4IKqaV%{2VyIBCyoOfCIg%=SmG ziWCW!8j2J%$s|(9r9b0SZ_KxiFNOMIfLqPt0Xi(A18u?DH6yVL^!UC~_4nwf+3#n56~xwfqv`Y&{h>Ze2|cJ^LNm|3={AUfwA$S;=;qP@9?)PnqwvHnJ$v8%B8M37H6 zzMFc+I^)qk_7a{%JCg6D4+*Xi4=J!Zg}CyIBn*p3wHexo`v+b=7SCE5N2^yqfJ9^G69!bWkS)(YfoxP%9dd>P3ADaCQt@oA zY+T^@MAl4oWz4*#7)qEItDbYZ@^Zav@o3HzogE%PN7uaVAD~o$>EZp*bI_K;JLL%P zcf)rbwHy$SFy$Hp9jY%yu_X4SMsP}4o79uzV1jZz^@G{ydu7QE#vJkjdV51p5H-eE zE~c!WME6u~|1c%T^|pSUF(JC#=Zxh3MR;66VQT6l#bhB&b8uNq(Cj;-OO3c7#S3Vq zA}q7S&Vao6Wni5~05cS3Zbe6D!D{%WZpd(6S-CORkC5}T>OrC3&;mrxFQdv_y_*zy z05uAKH4gOFibwm5x`}Jn>VC?7z(oj2S-#LN#|g$3Ne}o3{S+&7WhHph&~niB%WftgTPq*Zfpnci% z>5}nD@&}8dCMr?Cz!&j3HWQCF7kC}4?*J|LvxU$i5@R#hWmyg|aiI#En;(j~RO&@g z8vR^afD4%d;SL9hSvs@kmwwPx27xub9j;waNbED zw`P_yK~cq2MaTuf%t@d3!|ILs=^1=tq$dCfKH4fRk~8?mu*ef_%=z~x~A3jL!g-gqKWlIc}so=s$EF8Kg6U_M@mn>Zf&_LTypZEUPZbqWj&ukf8otUBp)dSn=`wC?bJMB7f5 zSEd8znD~JL0q`!~P0!LA9T0_C`bJBEa#h-wfHF!`z&d&ZK*~FiS>k!=`&>S!9i~9o z!+v#x0p92*-B()el*(8vK@6LHJ@?uKzbwp~4pS?0i>xspx ze`aJ^?oH%gf&UN?|((2YrRcOa{Illh5mPL>iLtOQ%h2{ zm6m@;t@;MNtvl;nxJ?o7)^umKLzE~(6KUPLj4f;#=5)lE`9TD)pruWRyh-m|81kHC1`7ognWH3jV0@QalMPzq3a?0Dp`RdCn zRiYzjRA!U$;ok*aSRtd~MfEDOs$}s@k0WK=_!=Y2a~AK+Gxqqe(dAX{X?0vn9=y{?94dhGLGzVeH0%tBm zyqqzkstnmtv_B8g-Gvm(3LaHI8OMb*pF(%P{sgWh9_OCk$@zY%oL-K|oDk~plN4-y zwhVT8p~LT_hLg3dRA^ct%QNi-G-(;llyFtg*ka$b5Ij3|Q%VGFiyGF}&{xWo*93^k zc=x~3@7<;x&;SO-jc$$=yo!0PleWv<^es#|T7%UVZ-pFhsg)cJSPD4BR#R;YcYWcP zdUoAj`esWYd)c{V(w{PP1<}(@@2h2zV+3cI;_$OmPj8bPh1C#XBmEA#`95sx88i7s zYN_~Z%4>WB)3Oc<^L(#6!%-x_V%;XzVlKMgMo%xDZ;KM&dEPZDMd#)L@t~p^IlEBL z<>t)jI%Ro$3a^Cgx@PWxq@EFCmm5T?JuzQ~M;_D-?Lr>6>2uYoq80RavXD|0Sj`do z#XNUC8&YsL@!rR=U?3it<`RpF^Jg^Lqk;kuO8nhjnUz`WaiVglXdCt7PNu!Xi4Bm| zNskIzQ%vg^8+uTVi|fzI{ZRY$mB-W~9sav%7H)HM#vFhihj*rU@=RQscn?PQygva7 z_Kq987KoDpp%gzS0@tj%wYx=>{ix80!Y{H%wq{S{-*n>~y6E@CxKKWvv8j*3II6!x zF_yW~6y5W3s8N%>SZ?A69xV3EQoMsaakU_zXHO>)QHqttg0a@pEjA27x&G`Z7uEW% zH}kRDu~<)uzkYisB_kMGKbS8O!WbBlI0n(KV(Oe)ZGf5 zA7Nw|>(o}ZWf4=}@Tz8@rDxI|)W~$@7zNZY|zeTdH_C*{*7?=iFZAsLf9#l%9w41`d9B^x3-$JYuH9A3%pe+g*Ko;s4P z>8GpM8>#ZCLKTHGPtq%C(%}j#MFVcn(KMLRJj*lybU%*>qkzEeabon%3BYMEDMlkR zcG}KSe0M*2v}f5p(iY(It@+AM9aGWDH<;WcxHk%8F#@Y>!HXZ0xpc7j1r-4MO>g=i zW{lt$bY5?NFazeo)C1|P@iDFP$;8e9*l?Ah^u0f2T}wq#6$&Q`{j{i456(^ms|7xl z{j?tQ5I`v*_?nc!O?Jmb0KoeS#b{q@7R|ANokQirJZaA?)2dEtZeK(+%c~`J@K;}8 z*z?QZD{`c0y|LS~cvQq!NcSEiy23FdgdbbI%2{N|Mz@=t{2(5I%j{pc1BKc~6^(XNqHsV!bOmbR3vRCWZ~@y0(u zUe}D0Pqme;YR;S%(j{QLQ&tQBAnVSZf7asgZ1xH**>%JrF>L)w3K4jYL1pH7z(P#4 zI+F2zLxR4ioX^EqQXU<7*lABRQ5vC);-wVFL^;5U?QD%6fh40xsO%eRGA~N%P-tr4 zSuctCrxKXVWFci=D1FWDlyPJbbonG8ojy3-c+E%l%hhq_%M$+gZXGY=DJeg_Z^M0( zK{9L+@hWS){z$iOy(0Un5Smw9t0T(6#&>)*FT5psD=>yva}A$F~Jv2sOOQP=NCHSLM+gu<*98{Vw)( zc3=)MGSiv%BxuCFA4> zMRoJ)2Y*PR@prI9=Dt^F`H3b|5|F}o)P{uoC3Zh$=YAtA`{lh!^~DFiF>v)P!!wMr zR{DpjLEENU$JO`zz)*ygGv110wUgtqM@Zum?njR<`9rCWR@5#Bd_s|$iq9XzeuEbr zf23*rLpa4x*RhN*uJ$JOECihJ`tu%F?UwxBqP6>pNuk#A?C%G0K1FFS7WC*rvjs-> ze#zwfURaswUvx@VRaMuAWJx~rcJ z`-xc3#bg*i5y#%^Rlx)>=Zd}rJQ!g=pnv$Lip$9iWA;T^oEZVRJx_KDf7~M7(pWe4 z6%&xCY0!%$Si@u`s;UOlnLr^>z2r4#h;U0qS?&*n8P8*nPn-`rJ*pnQ^OaD5<520?ZW_%%?MipF(&E$j0>MT4>l(~4lw!Cn@Rjjf_k+>u>UE3#>nlZz}d>hGk9b>3d=`f$-i|%+q>@oufm3db*_Uq*|=- zRk6DrYwuI5EZSI1|Jgl@+*C=le#(!XwSMs>f0x-We80QvippwZfj;%x;GACiukW^) zSUS1*HYpm!2B#A@YZgrnAnECbdlysrJ1yZmW^KAgH0S0eR#86SXk4O9k2U`XwX}K; zUX%d#keAMY?;+XT{sv{!+&=uHf-KKb_S}ww1tvwe;8M50jd)b3Zn^Z6KQ>%kjBF=H z>=lWtwDmj*Smr%a0-G3jC+ll}FffqRjqaYw%$$*wNbHr2<<95wjulJ`Sxla!Q4pwm z^)4D+^?Ws7l#pWb6*k)nQH*9(dMv&FROCXUL znwyt>lfAurIEDTJ=Ce0$ZzO%OTD9T86`$viHc6Ueu9$2yUI+@`u9LeevlpLK)g4s5 zelgq*d`emSyfph?xFeoGh)}a|i&Z3!Iasuh6x=I1@f+n-;i@tu~hgO5L%*af0 zlX%}(ON)6ef&vf!#R6A{Ba6vias4V={IlMon(!|yV??#?d6*24&bx+yGfFC zGI*SH_6`4e=d>XluG1Ip2sIow@u(2nln_fCi-5=Z*)_--zUIo)pP=xrw@p^*o9XPA z&>N=uR1(5`CZX3UKOLWDeWRyXP4jf>fF0^u_H0#&{3z(Uh)9i~6(E8^xd8}|<*y6CfRrGVyc1E|L!=g*?z z91i4D_@^AH7x64JCBNf!I0@(o(%Yb(-MOzTB#>S>Kmlu?9mfT?LntoM|h=uu+MOBG$%Vp zJ+5~1i(2toVbOq|r`QB@Jy6%`8yv5o6b@mbjjW;S}a{K*BKr65l?94~Ve zT5$@MqG?-m>yhXF2aqTPrd%@Q+gba)Ue~OSP3_p3iqRa1*JsrzJbf?b(vC^F?y{d`O4I=YzN z&sGwyBe9NdeDg7~sx-D!{qK_EI*^z(L-MGqDk{8!xKK4&2MWHo)A_Q5WlGB8FZwb- z3>9IWn;nECj|YJm&{ZdoSS9F0PJgp9@qJ{6=?F?xRabH$)`dar2j42iH{d1>vK7d|DW8qfH@&TQLLRG!FF4;}BisxD|ht>`?3GwUa>Cv|O)=jYZ*vRY6FB*z5HI+^_r63k}&gjlX}5`SBkOZykE`AGRs;vC-@{| znsL`&oTyy^W80hF%l;&0*w_;l zL<=>H{33i;9qN=!uO-y;7r~QfYJZ@UOX2sHnL`5u6-{e7@;YR8a>7qSw>Qm~*A~%r zHj8Ac?d!7g?A$EZFun&y&;3d*HcaHBa5s4zkbN!DE4%HJYrXb;^NqKvNnv$It|&Gd z-P#LWTT__`-xfhx0&KyQt1pDP!<`2R6JZKNEjP_7^P~Wb!;`M^IGo&kDv{=P}kHn zv+*CUtA@-CR`6sv?{5DCwD@M$t+VCd)>$SG8?!^_f$^Q>#%lnLela7V)0Ybn#8PXC z#cQG%c#~fPrv`%;Rui54eP!G?_fV$X*9BiNC%N|+EZ8i(lot|&T|&&GkTiZkDht6 zaM)EA%%au$3JAJ-Dg>-g1KaiV>y8xv0WOjn^O9uvEou!u42Q(WS93N5XbtLWFFJ*E z`WQj*kU42`4VPhPZ7C&GNi;@)k{rvF9UM|P_hUY4qeWw@^=z|FItA>LW^|C(&E9&o zR0>(@`XaVPwQ)70mHOE0OoHuhns7F*3=Z7zl}WA%G2ewL{w zivZ^Bqt)+;>%r|Fk&X>4HTwVMJe;+U^%d?ufGq#89(_cRu=hNs`D(B&vz|9?61ZW^ z>LI!_Tz^b-$tZW3{Gw{9e9`9xa@DK%e%EEQw_KvoTML0JSD~X}W<<~DnQ(eE64!?Lty`VvwT89pv^DPFi)3p=Buvr2j zUtFrGs*Xv4S#_N%o`F82VJc@GkVtbA#D0V-QYtoxs$X*vmulZU7rb@iBCo1TkWD(o z|4U^yhF16>T(Hd3G&F39yS!>KjP#zG<>~4&GHB1-cZe&hDhC3;EmLPC43aKutE(7O zcU5&KI?$2-1ij2n$df~cvn-1=!6GOn!99pk9|<15ALeQS^`}rj9k4|L-6j3c}A1@Dr25iY*e@FX%|E&Aj*(&eJavy&M! z%d@Ey*Oy+SYb82L51m?JFGxV}oF1caz?zlG^KxpTz!Z`yXp=r4XfuQ ztQp0?a;8G>`tejBf81>O4`8})^T&uXpsaFl$u*!_PsUu0oVch0M>Aq z0^Uu}6szt&5gOe(!Q>%zJCCuQk5(P75G(_<)~e2KPn!+3v>@@jD9L0FG)Dw&Uc%um zw|GebbmS81YE(^J7$5E`Pr&>S@P6#xaMZHG%kF5Xr}rO#EF?VTramZ*MZ2_Gpzwk? z3+f;;gJ(n8%k#~6OS~_M4a~(6-2K7(*1hhqD1TS8&qRea&dqnT@gQ znmazF!DKk~zR%K7_|{`;YNqe@vXP~$NE5PB+4|qBf`%#xI1!Dmo%F9TjEHnQeYfh% zm7^4jzLq%oGFL8Z)D-9X+&M(a!vKzGP#A@@Ly+Gm8=WwR<<8+0j$kBdd0<0M-g)F! zqCH#Pj!Ab%5vX5w5Ck&I0w2~}8$J2W+&a6YNku8U=>x^!AWKw$+joDD>D~l-UtL~D zumKQ82h{_XUN@wXXJ_?x-jg**+-+Ms8GXov&q=*TA_%BJ&M+*l$6n-)?5nhvgHqw@ zqquu>WL=Bi7lB<%z7#eDxMcouC7e^OR8tJ|g~k6_ZvG#Vfy}AOg*RX3mW{q{IJ7q4v+SiSchocOAfDyP?&EWg@jMzwmcd^v z7Ii@_5D10gSgBfHIIuSEM>hWY1e)|md)prB6d4G^+mYMfA2~DYPR5zh>hI1KSsvrQ zbd(<<;U2`5t>~i+-P_~|!CUk!@+ERXQg6&Jts=CDuUrU{ho->>`jlHToN$r-Wt?)UpL^9w zSgQPeZ6Kc04<5|9KTTQ$ey`)R;T3CAIXi6_Q3}{kqmDGZkJ<663`I9Lph7J2wj?f( zg<7NU-)o$x;9cfwaHdGx&5}5a^Tk#^?Wlg`dl}R~h% zvsw3JBI7gSE(7YD{G|shczsAF*rTdtJ~(F>Y5`Op1L)GKhCosQaF>bL@T94>G2h|oEhD& zudaCeJ$OaJkitM%no!gN-e3LGOlr+^8!U%?EYGKS@=8`|`mAg^l}T7WiQSPMRh-MK zv*heiaYHFb4dJ&5#oR1O+iRbk#s=k$cl6|8g4W-W(_65!pZo*(p^!tHp5ZH{$#ldu zek`0|nO|RpdVA*F($`+fSRa%KOV_#$n;q7LXx@{WDm0fj9d36M$mHg=LPGt-$*vcf zc4UlmCECkleZxi9xnQD5RHDhQ@>LW5u7>_5rypVXeuGo4@&2<)W4G0V|5q%bWm#se zqz}V!wHMysVn?0$DF0_zYo6Gz-&s5d6*(#%cCkne6>b&)PPKWBBJ8t5wNJJ~aQ2T#arBMV7ATfb$EgaS2>&fO`MF1QWv$3|$~6~xs#^n|Z}U+x6-`z(M3XyLgGS4$ z=J&C~8TzuT?##~DUrUW8=WssDH{!N6;d}1jQ;O7D?8K6%b|y6zM3U~p5x@(MD&bJy za1BE=i((kfT}bJuPjKRlaH78hC+9tm?_J6_M#%RoPgz5eP?`QY-es6JTpWu_n#kVI zz#fqZxJzzba{f*e%5nC969C;K?7=MhOD>t_1FdTpo>|*UyUBOzHbtK0K%tVvJ?~oO zY9f)01@ zwz?j)?v*N$!ugdwp=Ibfl|$eBH?Bd-^b>e>7fF^qM{uL&8J#V7z41^rGSdiUBHr~} z^fL)=NxS6x!nW|7w_qst&(=;w0$tx*Me!|~wr(ZaS+HA(daMtIXsC&7G5=mta+t8l zaUtcjb&l(vS7(39YgMHcq(U1oAW<3`UmYt9z`^a&kjs=-bLO16$=%K4(>8YZ7kNl_ zpY&J+DtN-NQo6oN0Y0564!X4UvJ2~?jrmJxk!j6=R%PNrytAEABpWXWj@wXXbXC7L z?%Db4yTTo|SVk*}cg_qFvj=MG4QCgj5jtIYlydcCF?)^2nrf7XV!zh;i#*w`H@nDm zoFH1WnQYRSS)(MHh^R!%QufxlOOEV->L$byS9ZpSrrhX9)!p3>jE>ZA3(h2yg|3s? zLXEStig^5;+TOMo>9(1VC!hKF;QXqmOLKj_ z$Wi+<(^|`yvNApuuF`AX=z2(iri^1&XW{;DKci`I#C+l3GiU@G9GdM{Zl@{Ft$LdF zP7b1EEFM#{r4^f`86S`J#P%-sw*TE!%b#|&8d7ZjGM_*_}?Ie5bP zDN=dGaJE>=NxwLd{llT_MSGX0C~VvEt$WBYqomtuYE6c)twwO;%&}-U-{t$pPb&Fh zX14E<_*i%f!XOM8jPYmV<& z#VRPgr^o)jw}}27y#szq-w$ff6iNCpT6{RK)tyo4Ut%G@6zd(tK8=XA(^CKH+T%ju z+Dd^!lMUgNs>$mOY0-FO_0dKMwsHNoMV@+k4kW)KGiT zoP8mN?C^>9w|{^>ISKI$r*CialU(Cb?o^zgkH`H_j0f$td9o+L8Fzv@{E9}bofp98 zioAaSC%q@g>S6T&|59kR6e0v(J=UM;_e^d_@Uvi&Y1Zg$>@c) zU4PwZFl~XTVNw2v9Cl6jJS57Wx0V-9{?UUH>wfP#-IwRWLT0*TM8rdpO&1ol-r+^p8#^~ecTHSh{2suaX{SbOgwdp3 z4)j7er&=a=CPkQ6rW%?x#U};>$OUzqJ$fF(>82^l&7FjFd2;qntR{XfdI8U9qw`h4|xDUM}5gpcwtoX)kR*6BF5XZ zT*hcKjNvv|BwqxG`9>q81jcOp zc+UR#xsH|bNb6L02ux2Wt@E*bk|waeI*s4DuDC;T)b}O`Dk;+aT`A04^!^8!08{Gy zc_jm5QvcPK+~Pdys)~SB_<25XPRL`L*Ff*9 zVsG>XwT^hDoC;DnK)1$u^~rh4E?SrGY-tt2BoB(+)ANMF`Q{U93V)INjck@kK812^ z0R-b`4Kt?K0w|Mu9g`D7D%W$lcgW|lxHlSNJ8x=S9yuy6Xg1uXPoVpvTVR89))Moj zjV;M>p>c|@y6PmU#vL#!y1 z+BxwYZA@1`(|j-NH0>Kr6R8rigq2)Yuq;}@1bU??uy$i1b%QA1BG*qi-u8iq>dvNc zqAxaG=|}|g?LZ}e!i&l&_BRB(Ck_pMw=fkmSore5Ijjy0S!oWW1MvEHfV-pKi5YX> zshU!x)UdnjnbZc${Fj#|f=m3s)Ygsu`|2~}`}E7K2%Un6g z=58x#Hu-p5jdfgl%oH>_pF$S)-0{90D_qaSgEY5i1zYYzq|xXLu9W24Y?>utE0LtTQ!Lo^tB_mOt~r;Ran`^|4;SJA zeI(fB%DI4C!kzJBj3EAeJ+ZK@(Ny)8T!u&t9s3i(md4<lV8@jLb2 zdf>of;1nXHJ-}qC{avrv9MbB~VOmT0S298pwyc7l`ULeFq+7^@o=Rg0Z)XF+;n~NH zZ+LjfgqrQmDp!ofNY7buhseab{>c1g8Q}suZRGX4*6P+p&p1(T8)XmRZMmX~mtOt2 z5St;XG54}$@|attpscLeZs0@0Cb#bxBV3KR#)t_PURU63f@Eh8WqTl$BtAC^f5#1p?C4i;e0Ksx z#rGD}IQA-aEePQJ1AsrDel<&;wgbO@i>8fqft}e~&k|bQ5Z>}il*PH24!6p3dBg;N z8_49v@8@?KEruSof29HO8*u6LRU2I`p>Q<$*j^Vwyz~?zd}#j$?ertE3r1ezsCvTs z!A`!I(}DdvgG6pLNGS^2wZNo9(Yh?K*82Eq%Vx;maa-*+xM6+4ZQ@pw5ilPe7N4Jg ztE{;h_2~@jVORcWk2`hZE1_|IPz`6dW?6^B46^DE5gTh-P*=xe5Sg`}F~`Z`SEKsu z7M;1-7R#1_c0|+bDd~J&D?x2I4DOL)Pw36NRxX@KjJ}aWncb%MO617IiSoUd0sJb6 z+ZQw@Qsq`N*K6Z!p$FplHS~Q@cAqys`cwR_qgb7!PKjM;=@ax8GDF;c>NKqsy%tL) zS~JN(WZCSPwQv7fhg+^SYoa&u6-wf70&n?IvJ#b{D@M?Nf1;C`aIVt{Ow0X4rwn=3 z5Bb$g2V1a$Twm?JnOcygyJj0EBd*{h!8FgLiS6#)o;Z+t%T58CixZ4*Q3&wZciT5b z-0z&*zZCz=B1Z@pX3rBWEzr`?Xck%z>R=vL5I(QT%gp)*kQDpLac$YN{bZSBsR^n5 zlQ;DTzaC<$<(S0(Az|$SSP56b=%-ygb@vJS6))`Po*VLCbqNghP^4x(#-oO+M(s&! z9vpNoQg;g^gmblfqx-4koH{Q_E87q(;Q>$|T+Df;x~SZtQi0LG(VcTfl5|#}gw$P= z*iX2@US1FvS5x@N5c_Jhq~;1fs_oR>o4_=p zq?il+R3_5wmAp?%IuwJ-kKBCbHbtaG2~Zy#7EpV}5$AH!FfN4e6_?l8o8)JY%z_Q; zkB#B4`QW)mdb>WQ&*Z3CGNB5)1 z2CnRn$sEN&ZiP;D(GyG_56Mapl5%F)o7zq-p|)p9lNPP%y(H2nL%f@S#yU)Xce?aX zxTnFfkb`06?MhibimH7XO?ziNvXeEvzIIH+ zJzT+D4JxCe77F^e1Vk3DC6Ab74W;9xj)rClEN!dzhe933k|1-}B+V8f1HU=y6$Gn+ z!KbFfcy!67tlt%?3Bvrui&ZGQa${FMF~HilA7RSYGIRB+1_uUJAU~K}7?u!RTc!DYVT0zLR+bOs`ORLjx>*(+ zDWEF!+L5RLE-Q3eYFP1jx3QgB1{nCIToPy7I94mS%~khhU^#W^d8Xcb7m#P9aXFP1 z4PCQMTJH^ww<}ODpUME~@|oA*H#tP%aTyplg`xETO0^YbbPU?FagSz_hE7d5YUU={ z{c4w<>q)!V>CJ&6R14}$ih$@aIx>1blJ(M;>(l90V#)7Y=H8=Ro#|1B*{`>LPLL)) ztK6o)K?>PZcp1gssCu+AIJ)xnePqMvmBnOYjTM*?xB3}80OL9@uW~ieyY{y~W7*at zoL*|Ut$@5<50Q-8dy%VGw}U${vN3wwdP>VNg7MorrshX}3O{wybYZsWt3zF*MwzHg z^dfH6WXbU;>op~&~F{MRxKn@FW=S$`oW>z5dE#8}&;_WG%flkK z_D>0Ku~>}sttI)w-fmvRp|Z-x9oyPiUnHd=Vv?M&bjhRy7ux47tPwY>1@F>Yy6Y=D zK5BooRa=_MGlccAEn9KJmf0EHuw*mOM024zY~*Gf3lA>e3WO zCeWblfRyj8G%QR~ZP^JiY<;;oE%b0}0X0dYRmE!XwpGrPpA%>rBz=g}`=xeh(blI$ ziH&pPC2~P~4ca}bUaD*OWL7?_&B>-={4zm*;ZrlbD}pIpFcj$^U~FM5+)1ckpW22f z$uR`$?(IDWqsmjC^^l^!2+aF-H%!BFzcCQKn`!ze>;2~8?aN;g-@hJ{kghMGl8j=r zlOVx2wK}L9B9$IQItr|)SNbY5oQkrYl3}+hIK=(fG=8G zzumcBI_Mq818#$if@1_mB(ESlXJ{%@{bH5Y>|BphiT5RiA5nsmJq_+%o$md83I0kw zm!H9O?psg3!#2lnPB2>XVlMHx?bDCjHhp2Zwj5{YYH|yF4=*pcW zafc8Xw8{cdDawz&OC`Lz;7Qwc^>yq?;d+a_+b0Cj>W}@1i2l;bn%etQmbV&^H{edx zE>Lpf^peT#L+4Ef6;8T6k>&>Zp|yT>bBPE~j(BYooUsgUS#wMVk__2-x(#^9GtXCA z4AKWHLA@x$WxG&qDdLoSGQ25ujRMOczeP5{ImnEL zeu2d#O&^}j6_5JBf(pFcHW!{N@%M9Whqgr8qs+2WDI@uX4x97*N092P|9Ujw1fW6B z-zhbp(*y0^(WTV15@Zj$4vVtCE-}h}YRcowYJz=3R%H7@HKf#Wf(>H1gQ-KqH<=O- zi>+xJKk=E0AeFs^PDJCHxL)mc9C31b`r0G;8%cI&*aTQbaU!M+>NSi@ROjz*G94Fc zGjoli!&!~bD!l3WBpux&?IH1p{N^|VVqZ23JsU+A4jJgAvMY_jIjuu=U%6aY*%-~x z#(tRf8QTTPq13gX9}Jj0^QFz8CxhF3zFBKTb!vxo*+0!yI@dgDw@K*G5YzFqivAdU ztFd&j(%jmY>*lb!_KGwvrKo}Zwo69)`1_C1*F}kz>JJ$XaEANilGWMcO&`=pGpUTc z*jE)^0hQCji&7+@TUgZ}eoO|CEI`&qVKXS5l0^mNwh!;h@C3u8B!Bv9$A`7&|c-_*Ub(h#^HQ9zL z#9B$G-&bq4EB2nK&C9Q+%QbOFO!$9}ZzV?W=4vugMBfULKgX=EEh2wE-88-$V_E&) z!1K$~HYK}J&7MXlfb`&CqHLQ1^-F+VNRmb<5h%Xgc6ve#K)Ssd>x zdx^A$M+mG@;n4_vvpCpd5;0AyUSl7;3J%>XV&QyJ=6kcplYjLm#bs%H9XcU;D}gee zUZAbJTgeIN*(}ui-9YnGDroIE`PZnLC4JMJR1>zo8(fFe2&BsqP;Y>$12FB1>mhmR zzfDJyNODIxto*5iFyLoL_IUh+GbGgq$6mZb4V#&Y_Dg<>zb)+siyWKRee3v>wi@F^?@)b0 zEItkC++N)z<-=YOx+cq7cH5SQ+iN$M>loDqKcM^OI<7Of36)W{HYaN0yq=4aDwkd0 z)Z|E*{K_K%@zAym@rM3Rc(J&g?4Aw%?T)YY_@Qx0_x)HPP}lY!mWN^{SOIpH zXnuy4y|&x`0jWS%zt9D|JtpnsWLIsz(&yt){n6Wwu{@~c_HO6?5u;`V#xQA=aBEj(?jq3O9&MVL|b zHnDXCB%j2S>QASzWluTRglOvFp+vF4O3HFLKyx_W1QRY^3 z8U=3?)S9E=`drOIapPn%!U~o1J%_p4E}1;2%gm*gMPoZFjn7odLyY07a`m{Q)6Q5T z!@mRE!+m2usd#Fhyd#m6Bw?y*y${wOKwSLD;H;g>2VSSOqH9j3WHwP#WpSo5#G@SI zN`pz9mqHJmR8%e&5TbPd0GZQ%!YomVRHY})eg6PrU6WboLdMgtVFgg6@bPZul_OLv{Lc0Gnu72a~{g1F`BlA7Ub@!Zl*) z&vG(Oi&Ke${{Z3@;E9T2Cv(p!>49=CpR`u#^2xYpoy^N%eiO=oy3GNH(RmX9iqvWO&+?%y*k!D6V2hitAU(pwL2<;wA%dK-?S*)$2gY6rBA9L zcH~>>9)}saW76cIXi{z5N25ZTWmsn5D(GM5jH z9Q%ds0)+KS~!Ow^CuEG#ag7*8Tn5o zxsfo#>TSTwjfJI3Q2<|kHUyjOBC4(76!|RFBp*wXlNbBe+i}LyK_J|O+#8M}Dt42o zs<$0+%FfwY#!~CP7JP>)Rq4kKJUz!eF4hud#d+9@T^h*L6bdyTWf5RLv0|c5Me28z zDWP)iB%~>Yg>pFx7v3n*PoS`+Ayt1ZY4nr^)NDsLzT@Q&ROaZKM8!U(JuasCQd}r1 zJ;i`OP2%||HI*Wy z*A|3*=CYuo1&Al8+6fhU8XZf_s&Okwx81XUsnibe1XZcp)~O10PGV)tk*I8?A7Ve+ z9cHRWN+V7#GRT^Ss^T$3jEfS8Dp*}=pohpkfPR7`gnrz$k3053wDhVc7xSe64 z>MK&s&ej&wxU_66BZOF0Ego#T5L`~_ZLK?6J}Kz~w{^QOaY~)Ci&%M0XFoL8)b@NM zgHpPn>fAQFnt$?q(ZzVr7gj6t!x1q!q)KW^(n8kQ&~%d8io4%|ZN5x+mcm; z(JQ4Mcb@7Vl3J2dENl&~qORLU>6P%4m$d4lYL6O(+emy&31^)OXpnA@p{G)SJqh2e zTJlsHY`RKJgMF{%N1ftco2>1j81T9dYa8d;!|+>^q1n|e&b3c0Z~EZ*uQ`!2Gp#B{ zRTajkrhrpZE>qN*X4X}5hcxO}8wCMt^B3oX>>G?Q%@Vsvtx+c>#Kl1lGV9DK0HH1E zxYPy2YsqLgV8v(}(5JvVZ{{VRT{{Uz`DP8gV8Cc5NF;d3q zzKuRdQqPCN%D1hHZ_J7}g(uZx<{ke4fWcKv0@YrZH%O}wK2CXs%gi#QY_`@2Q2dNb z&LomR(q^TA_jNz-4RiRqoXA$HgoySpqy6LOv~PHens84mkQ7x>^6vLq*P^QORO!VAxk6=I9Pjo#EFTyYI{g_yHQ-D zM!*!bq6z;1J*^*DloLGEWYIU7siBQ5uyss3>1nj{euvmb+rYHx`PpncPMKSv&8}Ro z_ZK!vQMb^>Fv>dqT0f3yO-;|s&8^9n(!{dVq?g_*0Vz_H00Q>f;28QrfiU(Ws^VE1 zl}`-KQspVZOD1I@1g}W}DIA^6@5O|8&e`$XUIW8xRj*Ppo{2c7(sFp&CC%@SM_Yzl z&aFh!+bbo8B%47CC{q4$TxX=PKh`lQ%wv zXd7Gl#C;VBg#c27pdPlq{{S-`bu;f96*;48-0Uy%kJ*ttB%XF2ou5N~mmX~<*XNi@ zs;f%4LKfw{tpqVlQmu7b;`@$}a%5140a3W(+(fdG6tW?jdTM1l0*W{3)(U2v;)mY2 ztDd)qg|d)sViV5W!HqDGJxzhXUSZ`8L_o}slnGzM-0upTmTd81@`Ih13K!M_dqC6W zAtY_9pzR1xU@bC)@aF#TaZ(!uY6oaeNK)*P%z3~Cwkd2K!zKTd4z1(ItR5@hb%&SRKHGi*Hy^u{IDVUnL|Cr?)VfZYxE?x@~g| z3-iHtGiHxpui#0SG@!bQ#Goqw0F>pLm`EPW^4Ik+vpl79k0~?Wc6TFb%b0&N5U)b ze><})EvU3AE(&?7bcK3d=3cC`yw;>z?IBh#YpM zO_)RSEw|{Gl+dk7Lrsm6oveDEq{Uk7)3Xn)_uX5LIHIMkY9&f1a!EUr2Qu-#*Nx64 zIXB*P*AY;sWzURk3o3=ZE95a+WdIusl#s60SD4v70+SEMdj8(Q9rRbRU^xB_J9=$?uBkSxlR3vR{ZpYIHK6b_`?BZ zEnQfnHSr2FPc)J)+=5m=Wzp5hPM?J~JuFr1If}-E;=3{4&1!C7Vrs*a60**DOQx~t zmPD%T7TOydUfbN<+zsPAiG{3{DP=`OsYIKSPb9*Lchzt~;KW}dYsA=oFmmESPg8Th z2L!!F#7JyUEurZTDK~BHzkNF&;!nI-{vWaCP36uPqhMz_G}QBwoN6h4@Ft^o-EJb)OHI+=p9oS;bSe?n^4&NKf%Lt1UlX(9KEkpFzMs zYQ-7*jwGoR2|0zLURqt-%v2JUdQWkvSm{VA-=rM6nzGQ&w!tgLIR@ z*#0Hhb_O9h7#KkWb=QO2n8{uj+q@(S9f=X-23wlJAT?$i* z=H%F$3tww*2>f2<{vf5|rm?!G=PDFxotr^Nah^usz}`TDFj%@ZP1!LE*pWPf={LQiugv<|Ggmex=&#VJxGBb-6qgkq_cC$)^~9K0G8$=^NlzfT3eK#k$I|x;p@Yz1z3sXV%$;? zN7>3!>X1m>2JfMf?y^-79H+}{%EXw-IOhOSSA}UQnp4ux&aFym43qv6K><%D+i(ZG zO@0zorG3L!DM$I7B>P2Snf@xV{yc5kT^ZqrDC!j~x?X^NBREhDJK2`Fiz#V@_kt?aPb=@@bW-%X75_5Qn8CQ|76=?4rHwkT=+y z`$S7LDbfcFs-n!Z75pDsV@s`&9Bi`S-6sD4gpLnGv5ZWY*Q~8eB&ghyZb|PKw}))* zmBUY~FLAoi{u!$%wuL>qWZYkS{{YKT=ds_sQ#i(^(kB$?dr{xK`Pbr~!+g$ASN{OC zm{_?zf~yV{Q|3tZN-W%&XL}LQo=3Qh_~Jk!_vdK)L7urmBZX5?p;sxYtY92Ul7*o9 zjJlQZsVAb-$Rm(D+CEq3-bGh(XA_!y%FOK!pi9(xk~c)lJpm(yjjlei)3&VT{5qeT z^)X!0U(AOhsa3>q>}*fv1}JH`CJQ(J0BF)Hwl38&*cEcvQrTSs(oO-eDEB^kJ~_;#5=#Lan(6s5vF zTMNgcpQGYg+H;{zOsFJp&oSuB>Z-xwFJnTh2hAI2SpNXA14DxR!elHc*g=4A%ordj z1ar)HS7;<1>^Flzxc7*$r-O#)l6Qh!Pq~E(ZOd$v3n=(w5#mR_%|N2gR2|r9VUoUk zk+6zuTvE4#sai@gT-$hG7F`I+ZxLBzLA9)6%E~-DfvGYT4BO2)`Dt7$KyWk>Fg1O$ zY^LdDDCNUq4v~A@Uu&3xPT~5!KCbl=ZcRdVU0NJvwFe4S+UO(`@`(GM7Bk;6>I+=O zGOTIlWyMP}5LjUiYyyFBfE1yzu(`J8){j5h3yVvR9V&G$)fi1I%}FPv^U~YGZEU5! zGnKv#c95wWx}yzEbjM#4N`LJNYd5mr5#x%*GFvUO!jC4uv%2*;r)pIRZ6p=6mHpdj zKM2y5T$=;Qj1?`L_^M|X)EdeZ;)W<;7Dz#NV%TAtZMJ3hBQI4rM&AB{u$0N~P+1%ZzxV=5??EX#t z3_nWEoYTb2uQJFxa~rLiZFE+LlDxC#N!X5 zn3+cB*wjH+lC4M!LdCTwkaz?b*s^$I4q)nHX*gnoPozp9ST`dsmXr0;H~Pg*I@bBpWc;#1kvmU-hfTBFgh?FNqJDnSXTG(@wLsrx{1`bk<; zl6sM(0tJo5gjLkP3;D#<+tZF=wM;Hxm6W?q)RdBLbcG8fT%Ck!oClng%u6b{X^Pan zKC37KXCzlLoouea@q3h;?07#rh?`KeR}D;5UZc!a{MnWp4=km%=t^&G+)xO=uvPt{ z@9z_|+$xK+t7{p!erD9S@!M~qI=lwf>R*V2-u$p+IWGC8H>!zOY#r8h4jrLeo9!6d^p^hy0nzq8bFyruCny)VNq ze#=(QBwi$~nu^r5mz6vdVzYIfC05j;0Vk|kv|PB)$py>5P{w$bq>j*LW-^O1`($iP^)bsW2H3QJDs*jw&cVi+pafRy)mH|{{Xj-t1kZlqfk$IOXAAh zcwNU+8_nhn#s2{R0BG%?wRlUeJXB{f@`xLM=bcrDN0Aj-a?qS(3PB zzNh@^rYv5NG=}AvN9A$;Egrd;y8h*c0ZNyk5y$tPvB#RhfaoZ75N(9iW8HWD=;r&@iAGWcLHcc1! zC1sZBNcJiz7x#~4!8=F5_+?eaxb^gE-7Ka4B^Cad!8DuSEv)q0uL}C!y+rM;VWyks z(=W#+H=*T*2A7wXtKuq*&LC5g=v(saIHp~eeC|?1iO{s9azc&CupE&kb=*BRnR;Cl z4^7LfqU>dAf9b_-I!;<-c?b&mDelvy2Fj6B}r4LWwoBFDMEQa zIJdBtQss6B0n9OkB&FsX_$EtXZw~&oT#umzZ> z=N48=zmwiBCU9#_ykw@FdW^dQL2Z7fTsHnBq~F}f0*@a4^EegVzY?V%UqO1vHU3r2 zZ@zJ}@5gzD%qQ%$`}!Y5+00f3?#a3>@**f75H-n~jb5D0$aN1HV>Rj) z1-EUXL)%Jr_Ow`&6Lg6-()D(G>~!?qb8m#-%V8qxPy?=zex@@eS29jL#dS*jz$i@A z!i=&H{H8~xroiYDgrt2Ueu;Y}MprZAX{GAhido+-JHISHd-5*r4R}za>L9n5KP1I+ zD_)RDRn4~dhli#n63coPH(kg!CL5caXf#&ZLJ1q#SQuL*pE(-Tq}@^a4kw4|JGAHt zLS8mfyB?5s+P>_a&49n<^M>%#jF7aXpXmz5rKdS zV`u}8ask_+?H*6g+#B8|xe2_7gryd-s5qbRd#VpuKiFfgtxCV8DZ} zP)NT1`^T2)3Da(z!eu<5ej{Q5bK2UBdY;dQQY&mgzp`#+u9hL5Y-!QYEkJ`@ajrW$i?$B zH1~)P33GJ0o6O=&B`x;k5_LGIq<-!2I^A;RD)+F8CGZMJBcAbNcvi*=!a1EaMVl#9 zl~J)ehEEG9PnR%{^)jpf0HPkzYEPiEjf}r7Ed1N3W>uBCMtpE7!F z2XI_-aLNe$DY^duEN^nk$yUp8R!De&!Z`l`7QK;6!4#Y?m$;Ii{US<5goQjNR4#`f zdzHAMwfY-#%w;UX!If;g#?_n?SwRzYiM7iuR@uaZ=~(tsN>n~^dU(RcbW9zOXjx~M zSYm-ik-+5>vQxCjb<8Q}0!d4%94RV5B`UhFVovbq;+B&sn;CV2{7d1o#k0*aHY81(~{#(={@ay_sO{NgNaO1}_}CRcfg&<`wXRmw&M7jvpd_A6J^Z z!@z8!*--qF0qN;XMWRXUH{@4`U;Uk9JUd#=9Ke>t75TT)X%sm}(7D$fAluD5r3pM~ zu?LIqXr=r>r!Z;xqgkWoHEM3BQj&azpLqHf!o5wYwEQVaNgE^p4aZ1g%&Jy7%dFC> z)3IG%YG)8rXG5l<?+eK#JL5?=JQB?Gk(!i>nwn z5XjQ%AU>pY)i+{Mz2 zHQ~=7J$SaJGY=LsZ~Q(~kFiU+&@b@w`^2B&5giSU&0`$Pdlu%ehs;%kaNc5O44k~w z@)}#Rbk#Oom@aU%XxJ$MLg1^@)+pS{@o$ZBQ!Aoi#u0_6$w6%il_TvdM;0y%1P^2Z z^NTvL%?HJvPOg6s(=XP~sLW zP@rr<9SAVy+DihZ7-PkJgWjBR=N9v>DQ-AQmbEC7qI!~fFiCXtXd_=p;Z#)1@*bzH z#?T+M5O**|$&UjFGHQPhIl)QC6jfw~WjL9Z=3DY z>jC?T$l9^(|f0u2g<-wcJM|E(&FtM$zyN(E3;6*E4kZ zVvPuQ%MzcSKPvL}yw6)_z$nEhQ8~YY`<1pD@YR86+GZc6{?L36j&(&t{()ioMT;$k z=`vD(U_%n^OO%urSwKQcZDKZrGZ?02!(euX=j)RSxGrpCm{>9H$!>3HYYyRCC3@%J>-w1jz*SL$gux`Y(fRlDg@h#t`ony)a}D^q|aBIQJj z0Tuo$@dcH*M3mO?)hUMs{^_}Z^M~6*dW=_T!0xZ*qTZaRC)}HVBKgjZ75qk$@nykU zstqLxRsOLJo~S4~G+d~ifadX>F|UqXA5WEE_Dc=cYmZyfr&1*u`I4)v{{W^lZe@6> z&J28qB*jsjJxp{Is!o;)7PR(p;!4y#fdkq;k=gG>TyFKJS_+gq=gIFl6NDVb^Jrlvv`%)H8UxYAZNHgp>%!&Qmg5q`0%^6xe9 z#%SSIYgC$voRxG0y35Ay%QKZSqrTk3-U`LI&jQp}RKe9* zs(i}RrD|yGwDwX|3RFH~KHJB=9^3I-Dp98_SEm$|-}sVAdtVKj<>GlRAKDq()t050 zNw<#d_a9K0qG4YtY_&_14Y+kUl*=Fmo_$+-zPE)&Lk_l-1xQQjX*y5~39<*WZDI5g z^R)Pq@e1P8VN>gN{b`kUC+|ls?K6Ja7 zb#kQ}0EHx6_5mr0_FCm%ifq-zGPDUiYgTEeN>?{D?Ee7W$+P&Jb4j@N9Fw$0x1&nd zbY*dScL>Hiw)J6rf*qoTX{b`B+kQsEWykRo$)dAQ{iEmY_IH>|sUx`x2E*PYG;a*~ zV=q44a-o7Luhi4IN(;YdN$=$qloRi#=@VSA%d9JuIB~iaK6+xSJ){}hq}t1m&AHT- z9uwIkVfw_O%Hv2sbkMhW_{IMKAGxJkbU0k)8hfm!6F$irKeGaKWUR>dK*akL{Zwag8u+|VKbFQ0Blr#P`?FIqxPOi=+8Dx4NVuYAa=BQ zRUi{`yTyGyiz#&E7b+E5SyYZo&!wLMZ4&k-3*hmx<_xsgI>rCNnDOdb-WVI+MGBpZ~Rbyaf zr3zAhN^qz7MQ4aV;u0NNnZp<)TZFkgG*12WpbzIBuE)`bXfIc@Y~r8!u^-i^@-S86 z{5p!mntogUhS|im$vJ5^=BB1wX^D2!>yE8jQiP;jsYL7m8{RUW5~}5;UpQ-7&sxk1 z0ueNXHBL*@?I0m2N0yVMo8L~OxZ6}_yun4xNxeJWbW zx&Bcdqsg@_(pzN-LT`H{SVWdhs}XID?H@_(-)F7+IcfMycG}KceA|9rNZ?=JRKKh1 zYHi?I6j{VN-iHpPk!64Qhsk%tXIeH*?agLK2XA2>gH8;C!Y2y< z0K`$Z5OY&YUYl@z;h~kKM2$8BeatFpR@exR5DMHOCsNiGbAMwT^OB)(2pmFXsbt?( zhiEI-PWJb-G=MnFYBwp{jf@Y?+kpuzK?DomZeV(d+jH#$0DfV=uz)Ron{NS9h}`U8 z9$~RbKKmFjBg-HdB~AhV0QVem&X@LrO71z{7_td&8*Bxy36$pM;^XH7QnokWXiD>f z_8h}P0`m5@*D#hszUC3cG+bcQVQQk!wKlZh{6;~usQDBX53C7F-*e0+nr$vJw5d9Q z1QG2GtgH}k4Ek}8@QoijGDfY0>7pEykqj(#}{m6l@xAO~#;g0tneW=>Gr%`HZ|tm9p4&dDSj&3%a^P zqm>q|u2}t0bdPZqMsE1flESqb?i%p-f~9b?lk<-*sa}3rB6el+qpYO{`aM^%RmkWB zU1vQ|()q22J-6r?Oz`QH`57eMGVxK28k(y-48*<@roHCA`nmhn1F2U${3!0D4b?v! z6}g;ELXg6^N|lE2B^G&yzhW^i+RVc!SNz3V*S6p!3n&9}MdK^g@daBDRG+KX>T^@` z(@NI##LKF2#}s<0MBKqlxoTh; z)`t*_**efoz~rF!i^m1!9%E22wqMb5+c@>%NiY0LdDo>tPFRKmyq01e+s{uKcYxfS9sHL$)5UPs}~ zwVuRy!ihNaoU)uJ$dqI#-7f*S(7ixhq*~kbirQ>xym<&bePTQTTgO!C3L0kr0JSu9 zDpIVYFQJmj0c|Hy-Ae?U5G|!#!+(MNQHiOz{$mWwP-kkme!j_;s-dO}jZ5rwIdsEQHuh-k@P8QinDvhR%J;FgXm6*M2dnnB{$O0&^0dCwv-Q=q7#lhYqv$EoEhQs z2T9>7D?>sjR>D5evBspfJt!yllvJ;jHx}6QnBfiB{{X@+Vl$hy93g>dF3r*1P@I#U zaX}L7OE^uC;?BWIAxXC6fCwAJk*o*d&%^0MnU7oWSZuJT z8t4K=mge2`Bib#@1>uV+uoX4Qbx@OBotSA%zd2G!acT+{R)qX5>`wyz@eRui^Ul0q zBU{Y6?pq|_wrTD^AuS~0e$chHZ*&!%PV5vEYyh^vor{$Z#Vn+@Urpu)Vc@F85?s#x zo}AlK%W*TwQb|ZdiU40@a%?+ah%np97UcF^yb2ZZU+|Tk!Rd6q{RV96NF>^Bo~3S+ z)68uCsU6|}0L6w?Q>qeDx$S@gCTR7fHswdi_)@hGL*m^P@T;=EW+r*-kMdVHvN~H3 z&SGggZCPks%_d!H^JdzaPRn6UqUTDjVy%BsRpls1{7XYHKRNcSY(@0PYS8$~dHP4U<5dh5wyiQ}xB{PpP?7dNpI?~ci zpaNN>AzUm=gQ%cwbtoQ0NmE{8%%6c`D;4-(@cs8VRz+Cf5;M2jijwjhSnK8y<)eOx z3O?~+WLJqPEZv@*px31(X_b_zPf4=U$wTZ8QVO>#Pf^rvI5CSbR#)Tv!r#Z2z&A=1q6Z^`!Q4XjpCT?X zmfcNxoq0tgnfP^RZ3$+vD1xaui&)>CB=$d~eBJR8k}qbRI%&701fF1F#4QR+lc)BB zC>JSF{qf(e^>tKzBgnkvre_8dMB2QzD{=uXRHXs6mu;L#{l#)S8;FT`h{<;`M6dSS z3Nl)+)5n;vC|OeoPX7S)4xPosX`@q;>(lIKYd&Yd>V%xcjN6iva_%zo3sX+EwAn~d zHYp(RL4>yiTnO4eCbB4FdVJ1iV9H=-yC5xGrnZ6$$shBZPgA!Z#Bv4ki-`d3)3j_- zQ&~n?M;w9_Epksiqk`t!z=-MI0#pZGy1*m^k`2hTdIoGo>?6l%|x zsstg|QZMB#H`1~PDcpnk#odi_a|lpyyv=q>vnf&X*;3h+Vf6X4E&=I6MeZ(ba(EYy z%s!TFyF7IKD~+o9UD*_=v9Iuz1C_urlSh}FZr2Fh`a@9vs!nv8jW@^9!uw`$=Q z)hK*l)Au)BSH;FfNR=XA9y3u|KMaE|J^ujJV4v0@ScAsq2b4m;V&UqX@fWdvVVP8) z%&A|bWyvhH!IEkUMJOu9>DIL>-%bsvnd zzwJr<&Q)&H-qe4?#C_j z(*sI30zfv}lAhNjpR6~TDMcSdiWGbKLJM+|mE8W$Xde0iB0Vj)Vf#B|*QvCI-RoEW zSuszH-&OqeAG>eRM_gLdV@Pt1?xVSJijN_?dz=GFnlY6|uQ@P2m@=;>O}w~mS_MRz2U_$(Nu zR&^d_zr2KE$7UuaOA@l~IF)R4q@U15eK{#Gg#^1S>wDYQf<+AGe>R6`-j`g+y&g)o8zoV|sHNd1F$x@YYhR{~ETGj?0rdkxGttd!ZQR?#)FQ|V=T}_!Y7P0^X zs$sgCmPViytTL%(B`*N5q~BHTe)b%Xt)YCpgG_)DkO}PyP&VS%{NY1LHwr1dQv6E; zDOzMFWoJV{W?kgFr=3%xNL`!a9i`#Xwy>z8 z)Ux8<6t?R3<XD?_ISdSA_nUj%|P2F|FuqT9oH&Th= zBXb#^t^WWJTq1`Snid1CR1lSEOO7cG1&`hd8q82)i5mrq~y3rxExJT}SW6Jhx(H(`gx|bPVva9H%)K`X98-4HoUrUH~ zp0w&|wAwx^c514Rk34%eR=OrxQy67CYnz{5vD)aeKb%G?zBY4)Y+beCigo%Svgse0 zRA+{%6Z0F1MSy=2lWW4Ju$2416_fLib#K@n&f9MbNPF`y{F^xSINkkDbsF;6v+8~- zvx=q)AIy<1lgN~kE!*T$kYXuP%}iN{#+uGz)L#RTsrlBg^8k?%(Ss)xKvRJu*g=TF zE4_n?UjG0n?%VKA9c%oG5Ra51zJ@v8+*JJaB>n7h6bv(3(&Wx93vqmxXTpvD0QsB3 zhVqJ&IIk{G5-Dot%^?o${*n*r4NAdF0_6%n>VnmTCS{#V(&}}Ur__}o=}NeM|U*v$S2uTUo2YH@@(heo70(YR0s+#3<~i7hglI)o)kZrb>{wW3i~#^2j{ zVD(qcYP(N$QgiGtK~0b>g{4UrxUoqa@3`g~sP3u9Ju50r&Aw3vn%YE`m%1KlQc6Gy zbzp!v5g?NZN`X((@YySMSdMM9ILg>Mo}XAqnJc9f01`$20Nx+lBp@67;xSHH8cp}O zF(N~UNIl^)Sr;^+eL(yC;ZjxEM$mzmEN#8LpzGNBZ@eLj%bVNjXyJ5q2Z$$HfZ1P> z>jF0-_rH5U0F<=qHvFx`1gs5{ZCxT1YDBPOZCI0w5M`*Q{|w4S@#W9gX=Hg4VBocY@U5+-^yRkR5kz?grgqE0*Vd;MJ0D zN&3QdsGY7Y+`~%%6tjLngw82jfd{xq+7p|bg8^H4WwGHG9JzzDmApRu+NI<%b01_U zqGuVqj3|4XEkEjruZ^6#P6f`n%FKkZYL|3$3UvKbaQ##<}rBElAIg4U!q3hucfDxO6dDvUgCEnE?-{{R;q| z)-dS5S6e+L`mflV#Nm$P0gd>D4P5)g$1L(&GBawbDR>hLPE?9i66@(snRb2Lbox{I z-`X?;zE1dX@b20s^Vb=t8)BA*v4yA$yDhJxUvl8vzM_%*COUb^opU1o3)sy09Nwj3 zEDMDy6J71*u zsqpbXmsp+j8ucNR%PHj>tt&wO?J_#_(yx*4A354lNd_&y4%xXqlo^pt#JbjLj;rjo zHi6m6WsnuObnKz-J>m{H-3O4Rd7YfOWlpN)6L^|_jVUQU>L%qGvKE&|rD+aZP(iiq z2nP4-7c{Oe@iXFU1W8~TeiM{#hTBZDJ5_y;EfY#?i#VrT*(YFZ(7o@z_KTM`s~!?C z#wr<~j(=v>Ytu5!xXha$-j?VIyKWW%(iBohsmp8xP}!B^&m318T^lUu^4LJGH3UJd^swagIaVo7~XZSK^->P%|-{Q;snl^5{?qHq(V|%*m)~^AMgjBpdCy7wsQE(D57-8$-cP zdABs*{>L#~;5verMRC$BM=O>$`jn5D?G>elJz8G05#Xy6W#Xr1@TE%*(^|zBRK$%* z#=@SNZ}B#gtLajC8f|gzB5&g3Fmqo6Q>EI&t;scqDyTBkGlEz=_n1MuOV@OzDjX0u z+(jwkV=1cHBbXAn`vfW3T14#7nv->qf4I$qsI~r6xlgepq)l;mjJ&_a`CKsFa|BM+ zYV|2kCZQ~)1|^wvaN-=@M{pD@HsF|+G`tgAO+IOzlkvwZL%@{kO&2fPO?9RNQv&H? zo`sFIwub#i{M*xRFA&OG9wB^LzcR<|##2ON}))NxIg0f51kfr>ioB3UT)g-CAB{JcOWpHeXNvHM$9h>*`+Hmr-R`s3vKq&Q!i1LHr9(D%G{)Y zV{zbYb3n{&DFS_ zjLIp>1xZ<&sE^$}dmA2+^GC%_L(0(HR%+lGRT>3J!fF!8NCrXs zjYNU4Nhf}QkVeYe?rh|v6LRiK%(j&+L5yLZ8Agn}tW7DJX`jM1@TQ-Z zDAbnqUg_U!i<@7Oan?3bp1(EoUvinPvV3Sz^$;u|7O)_57hj1Sgq_L^MBM!;3Y)_f zl3k}wChALf-^o$#IZskO!HQUb1;8fTaT<2UQJ!AO&Mh8+`F8-3?;eCKo46>aec-o( z7j;-(40UT=-BG_nVGFLP-$^69WpT+Q&pl?rDz&}O0ve|u4g!#%FVt@Wo(plc5Knk( zDk<;z#F7T`Eo`*d)U^d4;xK9|GU!kNln_5yXi}7}vA~_RazrMR<}}CiRFz6WEdlfp znd;1{R6%6z)OChsU1rz$L1mSaciX&EC?riyj;_|F)GhXNzq}=OsL4*BvXUVR?YdLM z1f(RSs9anU0kvCv|X<-+nqn{We4F?4wX2R<}10_$3=`XhISb`=D5UQQ+EBzyi@uj27w=HrNWf zhVXCjgZjbR2OvP3Yf;ok=?TP=c?m;jasltW07xa}A2CE+^oK+?ked|ldl)eit}N~e z9U?kLGa59q2vUNSPg}vFS)>gpX#jf-;o(&GoZVIy+vfxsH6-b<;tU`V{ge~wH@pZ+ zZ_z(qVR4pIXKmmq2KNMuZ_G#pYLrPJm2Cq|vI4A?8v(Y^sJ4|0Aslrv)iq)?jt1f) z0XjzoBEzQ;6-%_CxUshIpt7JxHc-{aFd)$z695eJbaYIO1t5YuLp6GXS*$fLF1e)? z%Muc#k6MRI_qDb+;wEWe5L9|>ec<+`ljW#^?Rb=Af+;{{{X0Qm#3|1Lcy|c`NQOlgt)MEH#Xdp`b1?dTd~Ds zWH&H`ga8ssxY%67y*iIH`n7pwDfc#q2}as%Sq%YiZjj)I+erx^YzIy~q6MMSl;-8r z>rmRr*vd)lOazb|G1rEt{D0^xVp4DPE<8F{VA6a_} zWBP4PnT0VgQ9rY06gJbWg3|v0(&$0@m}@I^7*A$wKOPf!`zi1i9H!-UMB%I(G^Z)^ zvXq8XEK=>YHVcRWK`Ka4up;F<-tmI4dvr>cE~QH1*~z9UGgGZ1eqn+OsA)<8YZ|sB z1AXJ_t~a4E;zz?ASVqS#@&mwx-=}MGHaYDjN`e zYuMP<1MJI+3lX6)?YrI)X_8!o=+q zK1(UT5gr)vK1NNt+{PrqpZJvo>bX(@Q9@FCYS!P$#Kv!un$KMNa+!nW<}n^CMJbk@Z;fw5qNs7Gn^Tvn6!*C)UysW^niIPLyavvq1RMG zM&J+;sN4HpymSbgOLu^D|S;#tWQfR$J9ZwQBU}mzg4LT_K80M+ zB>w=KAy{fx8u*3Ixs0{(8<^F4G}MorRUn{*G9GMdQz%GKX|ty43bL*TSgLcg1>tN1 znRR?Of$=o8c84ycs#0m0guGCd4cc1MvX+x@NZW{7a!Hy4#=ct8=aEOx%nXFPM8wRr zCre2IDSb)1PMdM9BT4=w`^HWLj!(`uPlLQ;N6hTMulQ@nj(Lns%R2Qetoo9Jy3(8e zBreV;{ZW-O0|eG`=NMIRU3rp;%3R8rS+>q6@VKAhQb}(W1qA}~f7xRHw)}e!GxLRgt$Ua*$ z#?8v92XwE@RfUMLu(|e$KZOb|a?r87o+iRLN~1AHe$w?K@bV?zmnVN(>$O1Y3&1lW-4#SqX10cQP?Sm|n^WB|M>X{*XR#qVRTGU`zon zCGdtAk*6D3Pa(ZeFZ|9b>OY)F@4O+b)Y+v{rl;MiQ=|h(I%X%G31JJi+Ret-=Em_9 zJh!mDS1?+D8+D9tG2_1&NYo}Ip1^o+Z8GmIuQ~zp84c!Mb^Q)$Bx*^yx=pSRQ5r`n zF{Uug94yu<#W+e=54AV~q?kj?amQQ#0Mt-W8d7==NZumfKyh*AOq350%Zc#iI}}mo zD3w`o?7YefPTQpPQ1=k{s|Y1UB-?Aj7UJ@RVSLrfT3bxjGPfQ`srn^ND*H6R8?vHq zhmcRgL9zBe#NXxiZ)FxE$_!;UgmD8ZDM2ntO#~2$wh201PtSrn5JY|R%*+KLkEn~M z>9q48u2s~g_E+woV;KWI{4~|`M+CiuaYYH~O06_4$~8@G>q~B0-ddEK-3r(d+Q*dd zsy7ge-KCzNQnyL9j)%!zD;pj7k3-fb!*z@$Tb;wz`31>TDbK1}VTJOAq>x9;vFZ$Z zHST45JTqiF=^O0`T_O}Xr5>Q zL@Q$n=4IBU6(Iy)eS{p>8(8(@cyg6&x{q1(j$tquQ!ce8#V2k^5K85hdxR5fbct`5 zEJ+AJ`Ns+d`(CqYtEN(lzD|fGPQ8(ZOW5vfw`M^ec<=>Q+j9~_cxAe4YD?F1=-dsqS? zfPoAZC~bQk;bSuCKqpAw^Ds8iqSrP(EeE9=5&^ZjfDW_i+mQ;JSsb5!VJa`i!}-C; z3Lsc>ye6^(gAX`>q@?^@{I3d|bbvJjbe0iRv@$CnqY0W9ueO5n|HGog5XwiR z**ic;aBa^p+V+D|=?CJuFx=Lpt56~ox9aC}wV(lWZj0Q2E$zvJLx@)b+w*uviw(H2 zgVGhI<88Zh00t-mFYm+_uu10DPTO$;301aG=ZNtKat6ZPAOJS!i~B z%j9edTQ>TWv`I3`tA}Y*^R8E)X_YOchF0r((hdDw17pVj063jaW`+-v1uaZYPAjL7 z!3p{mSVyVhd>e;cTPvH(r3~lU+dF&u`5X)WC3{A@qiaFS{art=c}!L*GgNwm^vS~C zpDsdN4XaQ}tO*zHFJtd`NK{Wx%)+UsRQu07qO}(krCny~H$0Qn_k_wDeB;d2(&qGX z>Ew4|sYNMs3nJ#{^n%<@_WYph2n~kg{_PMONr_8W3%tH1JmTH)nyY$YT>>F-dY}@6NC%)Y;*=q#r6}1LmZKePTD5 z-Wo}W=?TqbfR9TIJxOuJco1}uhgXZZQa)aR;yb`-q(!j;cGBn%dExwGUjwk z%grLFmdUVNDzj5a{{W<+e?t+!7FZr{0q3S7kyG!?NYtE7R+{^+DZ=Ed<61ycw;?A| zf72W7qNTFfg;{kj%w^-=n}#W;33znNdTt!0vpz6c1noMFNZ%jkmmQRN}cA+}kv_f$ZHmcbn4mN|eru zPAUnfOlm%s-@Vv#{=Z4~R9blbx4bHNwUoyaO)Z=$j8>B@}4k@=AP+C-zVv-HP9^l8(EL_W8EW9I1;%PR~F^aI;W?@Md)@ImEpGW@y zF~3mvA}O3SymVxtEbK!60OALf?4oLuow$9F?5#l!$W+wOx>T(gI{c*# zgJP*!TTD!m*q?zvnT;uc{4-?MUIyQ$VK!jkMoDspTO z6Qv;u03{;HNEQHg0t-<*+B&5Z1*t|N#s$QO5~M2jP$@PxHal!b5g(nwQWdIf_8i2C zVy8l#Y#~yvw_*q&gK=vSZbszqw^!F+YF$2K?j;EwRBR!&+({sSp)RF7Q^`oMA5##O zSkv9S5{zXp%0R0=8FGTDT%gt&!t%8CsCum#&U$8AU9gq7 zhLC^&1b_hPXolBuyuCJEQ*{cQtcwX3DqDdz`KZMqjyzmtZW&XFma4s6oItjTQLJD5 z=WobHN!_`;Q;TKl-M!}UXxHm<>IdxLxCi1p56r_Ia~M+SY$|HER+g1z15%2X5&-`I zF|>U8&957|t2s-WUS3e7WR%z?A;MjFr>O^v`wK=M%slnYJj!)$;@n$5G8BDGhg62| z*U}I3i)mX|xLKUEuK~K2^f%^xWz8@6iR9K@u=IsCwu+qdev-_Jb&_}htNPk8Zf<|X z?=>-4n5W>02BsO56rQYQ_?aqR&U-;& z4p^HKS21m3XAojKlauus#p{@ucG{d(xj#SzdKz^bSPl9TW6`-7x6WC&I5q(N1O-Ux zYy9BCf=%tQ>l`M-ej_v3m`Px7Xz*YS`7miB(bA+^+d>)>mhNnjK9CTeNbgz97@4WNZKpEAkq$%T#*u6P2_Rkyvj>lHN607GMvAi&T!I(CkN z4Tn7-#EtAM4Yq^DQf{GR)*{ITc|aS1>@eh zcOvI_Jll@ko5zi?ayCik$ne|S##N|R)ZTf8`}Hx?sqZQ2njuvU7GU<4p3 z+i?O)6Ky1)$`m-yUeFXZ>gLwo6GYUqb+DaQ2{!KvQK$d~`MeU5Wm*)4Y&zIlB1ke= z`iMMn3IV>v-@m*kY}nhI!iN3#x8)IFbSC>Nj$lYg-(j%)ph-wp_JdN81&>$&O60Ls zjjkZ2%U_WW$x0NF={>eRU<3r40l!aZSplR;IBYcW*3e*+aU)8FyILIEMzT}}!VXG+ zAOyn54JnZa)Nl2J{?3bW^6w6yrrU(v4zNnvy2^<=bF?53h6o10+?}l+Lrdb;J#1le z8{Wz1tO*E6xjg=`4RaD9g(REa_w5SM#;uPhV+IrhxP=Wiked|lU@Q?eDRZet;B>Se zxpWUm9QF{c2u+l9@37qQ9k8&lx!>ggatQJY2AdG8*n_vy&^Ft(jrR~`yYmOM$kTNQ z0OyxsVZUfK8oaAGtB^SmU}-u%N1e7XKP&<*U^|FSfyUaqYq=M5GDZ%Nlnf6 z=KMiUr_D$wo;HPLC0>%F#rxdBAv>MniL8vqbr@?9(y>z0si`THqG%28EWbm5{{WClCoo{b|&no|u{C*Zex! zXf->T_{&k*N|B_e{{W>KW=QP1minO-h!T`S41jVS!Y|AoBF)q$MdGfEx(uRmp&od&X%?earOsf+~ROJ>!whLh%MXBy^K+lmwn(H1$eCoXVe{ zoMu_E%AIf^ez6Ix=Pnbcl{FxzsxA&Zl<7tP0L-uG3^f@`k`t0#**jA21WirK$g*Z< zW|^00a<^6xpUlM#i}TjD!wYxq)Y=5Kt`_Rk=3C~!=$Ov<*ToiI$UNh<98EciXHrP= z*;167{{Wd>50HVupi)nfvFq8_f@vC?l$v&?@Y_tPeHZ@#I;=k{#d}`QY*94Ri<6g> zp~pQBYEu!nBDxLuTPFsXlAg{{S+$`dTWCLyqx&D4H`> z+(A1wPWD@*q zr^`|$)p4yu%yQpdhvgZew?cuw<^)-}^nWO))Kku;viwQ<>GDLfznD0~7`$dNCL^7l zQm^T8(xi*_90+C$BEsenG?g?i5Box<2vO2{h@T=Yxo*WxEFD0b9)rAB*@u?@0JM_r zdV>c7BC^|+>KwkA)D2ItivaNb;rw3ResM`TbQL8@AwceuEJSqaR81QDj&^LU9ghV)C^ zoBnanv)xw*Q0Ohk&K!dcCdWx6j-!vP2g|nMJZ*l^1BieytDEg;5M%&Ay}De&(#n@} z*Sv5`*;dnYz`Oya2+X?9!;hSF%B?B9IHijZhnPJQ=~8X+6UNXF2ySjsKmrB*;N-Jy zh__#mh0142t*36sqy=dzR>J-p+QI-Z)3E3Nz;D4?Gnfd zRN5Aj6rH<>d&r9H))CEBO3OLr3Yt+nCIPVfWU;i9&=$_&JY zDx{@GdW|&P^LFJy2I^1MHy>EGjPy-1s?l?UNcwe1$-JAa*PYE|V2XV0TM%fUa$;>h za6;682|^X=Nx0nF_qmG^IvPS*Z7EA=7D`eql#gIIinqhIFO$T$FYL0@0mj%$Cgl}- z>Xm$JO}<2t^@~Je~ zDRlQAweI7`%Rv6W$}XjAUcU6={)8)DP@%O+B>rUdkRcU9u}r0-T|%J5xc>n476CsY z$&B5Yz9=xx7HPwYvHXQJ0Drs6xC+m<#GlkgWye490KyVZy50|<&Q@kPOvJ*95Pqj~ z7ybei_pdeNw`PCF)T^}`Z_VtD{{U3YN<6NlQ)Qo>Sm9RCkJo?FD*SKa1NJLUIatK> z+7k&IfZ+2j^EW?`5%X&q_`J?+WL#!atcpCU{{ZDlqe-`IQBzc^)w<*6VzX44opDCx zPr6oj`NMO!q_IjFeediF73T9+e(3GqH{{UE^gs!2wZ}Nq5%#@IlEz5 z7Cl!mS(Z~}F#yFL!v(H}90z$VNxV4Vm z%z2w5;@bT$0%^eQ({X*^E+*Uo01H}i1O*iWIr+epu%v5hL?yaELwR3RKp1t4~uS7{9n%9;>1hp7XS-EZY=gUy~Iir zaCt;zl$IHlT}G9ZDK}D0jmF|V7Y1bI*sC_W_8yl4e%hX4I|u#8@qa&@dOC2c8LXou zsm?D5(1dpuya_e~ad_$f01y381^)n16p{o;z2F5sL>P_8zj)#65gGtM0Nx4#I)nB3PCK zFJb@&-^+LsU2t7iy|&z1$GmYJesHN=rXt2C1#T+l zKp4*@H@4huybS;n<-ph*eozClk_Q|A07yV|Bnxa!zgQRr zM3oWOj*;R0KdcEmC|k5J1E+g-*p89qB-~!&`&tik;t|wE1kBuok!@ zpym{ylgaJ;pp+6_wa&&E1gQil>EHT6Gpu>cDW^@sl0gG&`$FeBe4w|!7k#0o1=ryB zF($Cx26mk@H!8!JVQ$E|_P0V1WgYv*4apbMbtHe5CHarzD=%>BTxzYDB5~y089GoZ zH|^>F0CO0>6g1Lj1Bqz?N=dqcMb6z!MU^hXbriDLCwqj5?AzaZxGX_AcfHPCTOU~_ zbOrhMbwP1Uhnm%+rsaF zZh({R%uLtG`9L^3MsHfn{aPc8VbvsQQQzOJDMbLH$4I|(4~Y1W{q}(?beqIn2-_lps=cDjd2(SHBth>1)GqCj9jf Yr39WyfDWld3j=FMpdEiW^Z Date: Thu, 5 Mar 2026 16:13:47 -0500 Subject: [PATCH 11/12] minor bug fix Signed-off-by: Tiffany Cappellari --- spot_wrapper/calibration/calibrate_spot_hand.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spot_wrapper/calibration/calibrate_spot_hand.py b/spot_wrapper/calibration/calibrate_spot_hand.py index 6d1e8ff3..1e3e3be6 100644 --- a/spot_wrapper/calibration/calibrate_spot_hand.py +++ b/spot_wrapper/calibration/calibrate_spot_hand.py @@ -71,7 +71,7 @@ def calibrate_spot_hand() -> None: args, aruco_dict, charuco = setup_calibration_param(parser) # Collect new data and calibrate - if not args.from_data: + if not args.from_data and not args.from_yaml: in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) logger.warning("This script moves the robot around. !!! USE AT YOUR OWN RISK !!!") @@ -108,7 +108,7 @@ def calibrate_spot_hand() -> None: elif args.from_yaml: try: in_hand_bot, args = create_robot(args, charuco=charuco, aruco_dict=aruco_dict) - with open(args.data_path, "r") as file: + with open(args.result_path, "r") as file: calibration = yaml.safe_load(file) send_to_robot = input( f"Loaded calibration data:\n{calibration}\nDo you want to send this calibration to the robot?" From 7b93d600cbe10a06bcf01c6be16a3f4cc6d191a9 Mon Sep 17 00:00:00 2001 From: Tiffany Cappellari Date: Thu, 5 Mar 2026 16:15:41 -0500 Subject: [PATCH 12/12] readme typo fix Signed-off-by: Tiffany Cappellari --- spot_wrapper/calibration/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spot_wrapper/calibration/README.md b/spot_wrapper/calibration/README.md index 2cdd7922..54e103ba 100644 --- a/spot_wrapper/calibration/README.md +++ b/spot_wrapper/calibration/README.md @@ -119,7 +119,7 @@ If you have a previously computed calibration YAML and only want to upload it to ```bash python3 spot_wrapper/calibration/calibrate_spot_hand.py \ --ip -u -pw \ - --from_data --from_yaml --data_path --save_to_robot + --from_data --from_yaml --result_path --save_to_robot ``` > [!NOTE] If you are using a legacy charuco board (Only relevant for OpenCV ≥ 4.7) you should also be adding the `--legacy_charuco_pattern`. More information and how to check is below in [Check if You Have a Legacy Charuco Board](#check-if-you-have-a-legacy-charuco-board). @@ -229,13 +229,13 @@ python3 spot_wrapper/calibration/calibrate_spot_hand.py -h | `--data_path` | `-dp` | `None` | Directory for dataset images/poses, or YAML path when `--from_yaml` | | `--save_data` | `-sd` | off | Save captured images and poses to `--data_path` | | `--from_data` | `-fd` | off | Skip data collection; calibrate from existing dataset at `--data_path` | -| `--from_yaml` | `-yaml` | off | Used with `--from_data` and `--save_to_robot` to push a saved YAML to the robot | +| `--from_yaml` | `-yaml` | off | Used with `--result_path` to push a saved YAML to the robot | | `--result_path` | `-rp` | `calibration_result.yaml` | Where to write the calibration YAML | | `--tag` | `-t` | `default` | Tag name for this run within the YAML file | | `--unsafe_tag_save` | | off | Skip overwrite confirmation prompts | ### Robot connection (required when connecting to the robot) - +and `--save_to_robot` | Flag | Short | Description | |---|---|---| | `--ip` | `-i` / `-ip` | Robot IP address |