From dbe777ddd7b4934b8c75a228a6a19903e9521528 Mon Sep 17 00:00:00 2001 From: clairefollett Date: Tue, 19 Nov 2024 04:57:09 +0000 Subject: [PATCH] testing the lint checker --- main.py | 17 +-- pybullet_tree_sim/pruning_environment.py | 21 ++- pybullet_tree_sim/robot.py | 168 ++++++++++++---------- pybullet_tree_sim/tree.py | 2 +- pybullet_tree_sim/utils/camera_helpers.py | 2 +- pybullet_tree_sim/utils/ur5_utils.py | 10 +- 6 files changed, 120 insertions(+), 100 deletions(-) diff --git a/main.py b/main.py index cd10c02..c12d8f6 100644 --- a/main.py +++ b/main.py @@ -12,23 +12,24 @@ from zenlog import log -def main(): +def main(): pbutils = PyBUtils(renders=False) # Init sensors sensor_config = { "camera0": { - 'sensor': Camera(pbutils, sensor_name="realsense_d435i"), - 'tf_frame': "mock_pruner__camera0", + "sensor": Camera(pbutils, sensor_name="realsense_d435i"), + "tf_frame": "mock_pruner__camera0", } } - - penv = PruningEnv( - pbutils=pbutils, load_robot=True, sensor_config=sensor_config, robot_pos=[0, 2, 0], verbose=True, + pbutils=pbutils, + load_robot=True, + sensor_config=sensor_config, + robot_pos=[0, 2, 0], + verbose=True, ) - penv.activate_shape(shape="cylinder", radius=2 * 0.0254, height=2.0, orientation=[0, np.pi / 2, 0]) # penv.load_tree( @@ -51,7 +52,7 @@ def main(): # Simulation loop while True: try: - tof0_view_matrix = penv.robot.get_view_mat_at_curr_pose(camera=penv.robot.sensors['mock_pruner__camera0']) + tof0_view_matrix = penv.robot.get_view_mat_at_curr_pose(camera=penv.robot.sensors["mock_pruner__camera0"]) rgbd = penv.pbutils.get_rgbd_at_cur_pose(type="robot", view_matrix=view_matrix) keys_pressed = penv.get_key_pressed() action = penv.get_key_action(keys_pressed=keys_pressed) diff --git a/pybullet_tree_sim/pruning_environment.py b/pybullet_tree_sim/pruning_environment.py index 4f58b39..62d7a22 100644 --- a/pybullet_tree_sim/pruning_environment.py +++ b/pybullet_tree_sim/pruning_environment.py @@ -2,6 +2,7 @@ from pybullet_tree_sim import CONFIG_PATH, MESHES_PATH, URDF_PATH, RGB_LABEL, ROBOT_URDF_PATH from pybullet_tree_sim.robot import Robot from pybullet_tree_sim.tree import Tree, TreeException + # from pybullet_tree_sim.utils.ur5_utils import UR5 from pybullet_tree_sim.utils.pyb_utils import PyBUtils import pybullet_tree_sim.utils.xacro_utils as xutils @@ -107,8 +108,6 @@ def __init__( self.tree_count = tree_count self.is_goal_state = False - - # self.cam_width = cam_width # self.cam_height = cam_height # self.cam_pan = 0 @@ -144,15 +143,15 @@ def __init__( # self.ur5 = self.load_robot( # type=robot_type, robot_pos=robot_pos, robot_orientation=robot_orientation, randomize_pose=False # ) - + # Load all sensor attributes. # TODO: Load only the required sensor attributes self._load_sensor_attributes() - + self.sensor_config = sensor_config self._assign_tf_frame_to_sensors(self.sensor_config) # log.warning(self.sensor_attributes) return - + def _load_sensor_attributes(self): self.sensor_attributes = {} camera_configs_path = os.path.join(CONFIG_PATH, "camera") @@ -168,13 +167,13 @@ def _load_sensor_attributes(self): for key, value in yamlcontent.items(): self.sensor_attributes[key] = value return - + def _assign_tf_frame_to_sensors(self, sensor_config: dict): for sensor_name, conf in sensor_config.items(): - sensor = conf['sensor'] - sensor.tf_frame = conf['tf_frame'] + sensor = conf["sensor"] + sensor.tf_frame = conf["tf_frame"] log.warn(f"{sensor.params}") - sensor.tf_frame_index = self.robot.robot_conf['joint_info']['mock_pruner__base--camera0']['id'] + sensor.tf_frame_index = self.robot.robot_conf["joint_info"]["mock_pruner__base--camera0"]["id"] return def load_robot(self, type: str, robot_pos: ArrayLike, robot_orientation: ArrayLike, randomize_pose: bool = False): @@ -191,7 +190,7 @@ def load_robot(self, type: str, robot_pos: ArrayLike, robot_orientation: ArrayLi # verbose=self.verbose, # ) robot = Robot(pbclient=self.pbutils.pbclient) - + else: raise NotImplementedError(f"Robot type {type} not implemented") return robot @@ -589,7 +588,7 @@ def main(): # data = data.reshape((cam_width * cam_height, 1), order="F") # log.warning(f"joint angles: {penv.ur5.get_joint_angles()}") - + return diff --git a/pybullet_tree_sim/robot.py b/pybullet_tree_sim/robot.py index 18070ca..ff67ff6 100644 --- a/pybullet_tree_sim/robot.py +++ b/pybullet_tree_sim/robot.py @@ -13,6 +13,7 @@ from zenlog import log + class Robot: _robot_configs_path = os.path.join(CONFIG_PATH, "robot") @@ -34,7 +35,7 @@ def __init__( position=(0, 0, 0), orientation=(0, 0, 0, 1), randomize_pose=False, - verbose=1 + verbose=1, # **kwargs ) -> None: self.pbclient = pbclient @@ -62,7 +63,7 @@ def __init__( self.robot = None # self.robot_conf["control_joints"] = control_joints # self.robot_collision_filter_idxs = robot_collision_filter_idxs - # + # self.init_joint_angles = ( -np.pi / 2, -np.pi * 2 / 3, @@ -70,15 +71,14 @@ def __init__( -np.pi, -np.pi / 2, np.pi, - ) - + ) self.robot_conf = {} self.robot_conf["joint_info"] = {} self._generate_robot_urdf() self._assign_control_joints() self.setup_robot() - + self.action = None return @@ -86,37 +86,37 @@ def __init__( def _generate_robot_urdf(self) -> None: # Get robot params self.robot_conf.update(yutils.load_yaml(os.path.join(self._robot_configs_path, "robot.yaml"))) - self.robot_conf['robot_stack_qty'] = str(len(self.robot_conf["robot_stack"])) - + self.robot_conf["robot_stack_qty"] = str(len(self.robot_conf["robot_stack"])) + # Add the required urdf args from each element of the robot_stack config for i, robot_part in enumerate(self.robot_conf["robot_stack"]): robot_part = robot_part.strip().lower() log.warn(i) # Assign parent frames if i == 0: - self.robot_conf.update({f'parent{i}': 'world'}) + self.robot_conf.update({f"parent{i}": "world"}) else: - self.robot_conf.update({f'parent{i}': self.robot_conf["robot_stack"][i-1]}) + self.robot_conf.update({f"parent{i}": self.robot_conf["robot_stack"][i - 1]}) # Assign part frames - self.robot_conf.update({f'robot_part{i}': self.robot_conf['robot_stack'][i]}) + self.robot_conf.update({f"robot_part{i}": self.robot_conf["robot_stack"][i]}) # Add each robot part's config to the robot_conf self.robot_conf.update(yutils.load_yaml(os.path.join(self._robot_configs_path, f"{robot_part}.yaml"))) - + import pprint as pp + pp.pprint(self.robot_conf) - + # Generate URDF from mappings robot_urdf = xutils.load_urdf_from_xacro( xacro_path=self._robot_xacro_path, - mappings=self.robot_conf # for some reason, this adds in the rest of the args from the xacro. + mappings=self.robot_conf, # for some reason, this adds in the rest of the args from the xacro. ) - + # UR_description uses filename="package://<>" for meshes, and this doesn't work with pybullet - if self.robot_conf['arm_type'].startswith('ur'): - ur_absolute_mesh_path = '/opt/ros/humble/share/ur_description/meshes' + if self.robot_conf["arm_type"].startswith("ur"): + ur_absolute_mesh_path = "/opt/ros/humble/share/ur_description/meshes" robot_urdf = robot_urdf.toprettyxml().replace( - f'filename="package://ur_description/meshes', - f'filename="{ur_absolute_mesh_path}' + f'filename="package://ur_description/meshes', f'filename="{ur_absolute_mesh_path}' ) else: robot_urdf = robot_urdf.toprettyxml() @@ -124,12 +124,14 @@ def _generate_robot_urdf(self) -> None: # Save the generated URDF self.robot_urdf_path = os.path.join(self._urdf_tmp_path, "robot.urdf") xutils.save_urdf(robot_urdf, urdf_path=self.robot_urdf_path) - + import pprint as pp + pp.pprint(self.robot_conf) pp.pprint(robot_urdf) log.warn("hello") import sys + sys.exit(0) return @@ -138,10 +140,10 @@ def _assign_control_joints(self) -> None: with open(self.robot_urdf_path) as f: for line in f: line = line.strip() - if line.startswith(' None: assert len(joint_angles) == len(self.robot_conf["control_joints"]) for i, name in enumerate(self.robot_conf["control_joints"]): joint = self.robot_conf["joint_info"][name] - self.pbclient.resetJointState(self.robot, joint['id'], joint_angles[i], targetVelocity=0) + self.pbclient.resetJointState(self.robot, joint["id"], joint_angles[i], targetVelocity=0) return def set_joint_angles(self, joint_angles) -> None: @@ -328,16 +324,17 @@ def set_joint_angles(self, joint_angles) -> None: for i, name in enumerate(self.robot_conf["control_joints"]): joint = self.robot_conf["joint_info"][name] poses.append(joint_angles[i]) - indexes.append(joint['id']) - forces.append(joint['max_force']) + indexes.append(joint["id"]) + forces.append(joint["max_force"]) self.pbclient.setJointMotorControlArray( - self.robot, indexes, + self.robot, + indexes, self.pbclient.POSITION_CONTROL, targetPositions=joint_angles, targetVelocities=[0] * len(poses), positionGains=[0.05] * len(poses), - forces=forces + forces=forces, ) return @@ -351,14 +348,15 @@ def set_joint_velocities(self, joint_velocities) -> None: for i, name in enumerate(self.robot_conf["control_joints"]): joint = self.robot_conf["joint_info"][name] velocities.append(joint_velocities[i]) - indexes.append(joint['id']) - forces.append(joint['max_force']) - - self.pbclient.setJointMotorControlArray(self.robot, - indexes, - controlMode=self.pbclient.VELOCITY_CONTROL, - targetVelocities=joint_velocities, - ) + indexes.append(joint["id"]) + forces.append(joint["max_force"]) + + self.pbclient.setJointMotorControlArray( + self.robot, + indexes, + controlMode=self.pbclient.VELOCITY_CONTROL, + targetVelocities=joint_velocities, + ) return # TODO: Use proprty decorator for getters? @@ -382,8 +380,9 @@ def get_current_pose(self, index): def get_current_vel(self, index): """Returns current pose of the index.""" - link_state = self.pbclient.getLinkState(self.robot, index, computeLinkVelocity=True, - computeForwardKinematics=True) + link_state = self.pbclient.getLinkState( + self.robot, index, computeLinkVelocity=True, computeForwardKinematics=True + ) trans, ang = link_state[6], link_state[7] return trans, ang @@ -397,16 +396,26 @@ def calculate_ik(self, position, orientation): """Calculates joint angles from end effector position and orientation using inverse kinematics""" joint_angles = self.pbclient.calculateInverseKinematics( - self.robot, self.end_effector_index, position, orientation, - jointDamping=[0.01] * len(self.robot_conf["control_joints"]), upperLimits=self.joint_upper_limits, - lowerLimits=self.joint_lower_limits, jointRanges=self.joint_ranges # , restPoses=self.init_joint_angles + self.robot, + self.end_effector_index, + position, + orientation, + jointDamping=[0.01] * len(self.robot_conf["control_joints"]), + upperLimits=self.joint_upper_limits, + lowerLimits=self.joint_lower_limits, + jointRanges=self.joint_ranges, # , restPoses=self.init_joint_angles ) return joint_angles def calculate_jacobian(self): - jacobian = self.pbclient.calculateJacobian(self.robot, self.tool0_link_index, [0, 0, 0], - self.get_joint_angles(), - [0]*len(self.robot_conf["control_joints"]), [0]*len(self.robot_conf["control_joints"])) + jacobian = self.pbclient.calculateJacobian( + self.robot, + self.tool0_link_index, + [0, 0, 0], + self.get_joint_angles(), + [0] * len(self.robot_conf["control_joints"]), + [0] * len(self.robot_conf["control_joints"]), + ) jacobian = np.vstack(jacobian) return jacobian @@ -417,13 +426,11 @@ def calculate_joint_velocities_from_ee_velocity(self, end_effector_velocity): joint_velocities = np.matmul(inv_jacobian, end_effector_velocity).astype(np.float32) return joint_velocities, jacobian - def calculate_joint_velocities_from_ee_velocity_dls(self, - end_effector_velocity, - damping_factor: float = 0.05): + def calculate_joint_velocities_from_ee_velocity_dls(self, end_effector_velocity, damping_factor: float = 0.05): """Calculate joint velocities from end effector velocity using damped least squares""" jacobian = self.calculate_jacobian() identity_matrix = np.eye(jacobian.shape[0]) - damped_matrix = jacobian @ jacobian.T + (damping_factor ** 2) * identity_matrix + damped_matrix = jacobian @ jacobian.T + (damping_factor**2) * identity_matrix damped_matrix_inv = np.linalg.inv(damped_matrix) dls_inv_jacobian = jacobian.T @ damped_matrix_inv joint_velocities = dls_inv_jacobian @ end_effector_velocity @@ -443,11 +450,15 @@ def create_camera_transform(self, world_position, world_orientation, camera: "Ca ee_transform[:3, 3] = world_position tilt_tf = np.identity(4) - tilt_rot = np.array([[1, 0, 0], [0, np.cos(camera.tilt), -np.sin(camera.tilt)], [0, np.sin(camera.tilt), np.cos(camera.tilt)]]) + tilt_rot = np.array( + [[1, 0, 0], [0, np.cos(camera.tilt), -np.sin(camera.tilt)], [0, np.sin(camera.tilt), np.cos(camera.tilt)]] + ) tilt_tf[:3, :3] = tilt_rot pan_tf = np.identity(4) - pan_rot = np.array([[np.cos(camera.pan), 0, np.sin(camera.pan)], [0, 1, 0], [-np.sin(camera.pan), 0, np.cos(camera.pan)]]) + pan_rot = np.array( + [[np.cos(camera.pan), 0, np.sin(camera.pan)], [0, 1, 0], [-np.sin(camera.pan), 0, np.cos(camera.pan)]] + ) pan_tf[:3, :3] = pan_rot tf = ee_transform @ pan_tf @ tilt_tf @ base_offset_tf @@ -463,14 +474,18 @@ def get_view_mat_at_curr_pose(self, camera: "Camera") -> np.ndarray: # Initial vectors camera_vector = np.array([0, 0, 1]) @ camera_tf[:3, :3].T # - up_vector = np.array([0, 1, 0]) @ camera_tf[:3, :3].T # + up_vector = np.array([0, 1, 0]) @ camera_tf[:3, :3].T # # log.debug(f"cam vec, up vec:\n{camera_vector}, {up_vector}") - view_matrix = self.pbclient.computeViewMatrix(camera_tf[:3, 3], camera_tf[:3, 3] + 0.1 * camera_vector, up_vector) + view_matrix = self.pbclient.computeViewMatrix( + camera_tf[:3, 3], camera_tf[:3, 3] + 0.1 * camera_vector, up_vector + ) return view_matrix - def get_camera_location(self, camera: "Camera"): # TODO: get transform from dictionary. choose between rgb or tof frames + def get_camera_location( + self, camera: "Camera" + ): # TODO: get transform from dictionary. choose between rgb or tof frames pose, orientation = self.get_current_pose(camera.tf_frame_index) tilt = np.pi / 180 * 8 @@ -511,8 +526,8 @@ def check_collisions(self, collision_objects) -> Tuple[bool, dict]: """ collision_info = {"collisions_acceptable": False, "collisions_unacceptable": False} - collision_acceptable_list = ['SPUR', 'WATER_BRANCH'] - collision_unacceptable_list = ['TRUNK', 'BRANCH', 'SUPPORT'] + collision_acceptable_list = ["SPUR", "WATER_BRANCH"] + collision_unacceptable_list = ["TRUNK", "BRANCH", "SUPPORT"] for type in collision_acceptable_list: collisions_acceptable = self.pbclient.getContactPoints(bodyA=self.robot, bodyB=collision_objects[type]) if collisions_acceptable: @@ -551,8 +566,9 @@ def check_success_collision(self, body_b) -> bool: """Check if there are any collisions between the robot and the environment Returns: Boolw """ - collisions_success = self.pbclient.getContactPoints(bodyA=self.robot, bodyB=body_b, - linkIndexA=self.success_link_index) + collisions_success = self.pbclient.getContactPoints( + bodyA=self.robot, bodyB=body_b, linkIndexA=self.success_link_index + ) for i in range(len(collisions_success)): if collisions_success[i][-6] < 0.05: if self.verbose > 1: @@ -573,20 +589,20 @@ def unset_collision_filter_tree(self, collision_objects): self.pbclient.setCollisionFilterPair(self.robot, i, j, 0, 1) return + def main(): from pybullet_tree_sim.utils.pyb_utils import PyBUtils import time + pbutils = PyBUtils(renders=True) robot = Robot( - pbclient = pbutils.pbclient, + pbclient=pbutils.pbclient, # robot_type="ur5e", # base_link_type="linear_slider", # end_effector_type="mock_pruner", - ) - time.sleep(10) return diff --git a/pybullet_tree_sim/tree.py b/pybullet_tree_sim/tree.py index bc9d05f..ae5f265 100644 --- a/pybullet_tree_sim/tree.py +++ b/pybullet_tree_sim/tree.py @@ -356,7 +356,7 @@ def get_all_points(self, tree_obj): self.projection_mean = self.projection_sum_x / len(self.vertex_and_projection) self.projection_std = np.sqrt( - self.projection_sum_x2 / len(self.vertex_and_projection) - self.projection_mean ** 2 + self.projection_sum_x2 / len(self.vertex_and_projection) - self.projection_mean**2 ) return diff --git a/pybullet_tree_sim/utils/camera_helpers.py b/pybullet_tree_sim/utils/camera_helpers.py index ce374a4..8fda57e 100644 --- a/pybullet_tree_sim/utils/camera_helpers.py +++ b/pybullet_tree_sim/utils/camera_helpers.py @@ -34,7 +34,7 @@ def get_fov_from_dfov(camera_width: int, camera_height: int, dFoV: Union[int, fl _dfov = np.deg2rad(dFoV) else: _dfov = dFoV - camera_diag = np.sqrt(camera_width ** 2 + camera_height ** 2) + camera_diag = np.sqrt(camera_width**2 + camera_height**2) fov_h = 2 * np.arctan(np.tan(_dfov / 2) * camera_height / camera_diag) fov_w = 2 * np.arctan(np.tan(_dfov / 2) * camera_width / camera_diag) return (np.rad2deg(fov_w), np.rad2deg(fov_h)) diff --git a/pybullet_tree_sim/utils/ur5_utils.py b/pybullet_tree_sim/utils/ur5_utils.py index 844ef82..48202d9 100644 --- a/pybullet_tree_sim/utils/ur5_utils.py +++ b/pybullet_tree_sim/utils/ur5_utils.py @@ -308,7 +308,7 @@ def calculate_joint_velocities_from_ee_velocity_dls( """Calculate joint velocities from end effector velocity using damped least squares""" jacobian = self.calculate_jacobian() identity_matrix = np.eye(jacobian.shape[0]) - damped_matrix = jacobian @ jacobian.T + (damping_factor ** 2) * identity_matrix + damped_matrix = jacobian @ jacobian.T + (damping_factor**2) * identity_matrix damped_matrix_inv = np.linalg.inv(damped_matrix) dls_inv_jacobian = jacobian.T @ damped_matrix_inv joint_velocities = dls_inv_jacobian @ end_effector_velocity @@ -424,11 +424,15 @@ def create_camera_transform(self, world_position, world_orientation, camera: "Ca ee_transform[:3, 3] = world_position tilt_tf = np.identity(4) - tilt_rot = np.array([[1, 0, 0], [0, np.cos(camera.tilt), -np.sin(camera.tilt)], [0, np.sin(camera.tilt), np.cos(camera.tilt)]]) + tilt_rot = np.array( + [[1, 0, 0], [0, np.cos(camera.tilt), -np.sin(camera.tilt)], [0, np.sin(camera.tilt), np.cos(camera.tilt)]] + ) tilt_tf[:3, :3] = tilt_rot pan_tf = np.identity(4) - pan_rot = np.array([[np.cos(camera.pan), 0, np.sin(camera.pan)], [0, 1, 0], [-np.sin(camera.pan), 0, np.cos(camera.pan)]]) + pan_rot = np.array( + [[np.cos(camera.pan), 0, np.sin(camera.pan)], [0, 1, 0], [-np.sin(camera.pan), 0, np.cos(camera.pan)]] + ) pan_tf[:3, :3] = pan_rot tf = ee_transform @ pan_tf @ tilt_tf @ base_offset_tf